void setupStreams() {
      ALOG(TX_LOG_INFO, TAG,"setup Streams\n");
   static MediaSubsessionIterator* setupIter = NULL;
   if(setupIter == NULL)
   {
       ALOG(TX_LOG_INFO, TAG,"setuplter == NULL\n");
   }else{
       ALOG(TX_LOG_INFO, TAG,"setuplter !!!!== NULL\n");
   }
   if (setupIter == NULL) setupIter = new MediaSubsessionIterator(*session);
   ALOG(TX_LOG_INFO, TAG,"1111\n");
   while ((subsession = setupIter->next()) != NULL) {
     // We have another subsession left to set up:
     if (subsession->clientPortNum() == 0) continue; // port # was not set

     setupSubsession(subsession, streamUsingTCP, forceMulticastOnUnspecified, continueAfterSETUP);
     return;
   }

   // We're done setting up subsessions.
   delete setupIter;
   if (!madeProgress) shutdown();

   // Create output files:
   if (createReceivers) {
     if (fileOutputInterval > 0) {
       createPeriodicOutputFiles();
     } else {
       createOutputFiles("");
     }
   }

   // Finally, start playing each subsession, to start the data flow:
   if (duration == 0) {
     if (scale > 0) duration = session->playEndTime() - initialSeekTime; // use SDP end time
     else if (scale < 0) duration = initialSeekTime;
   }
   if (duration < 0) duration = 0.0;

   endTime = initialSeekTime;
   if (scale > 0) {
     if (duration <= 0) endTime = -1.0f;
     else endTime = initialSeekTime + duration;
   } else {
     endTime = initialSeekTime - duration;
     if (endTime < 0) endTime = 0.0f;
   }

   char const* absStartTime = initialAbsoluteSeekTime != NULL ? initialAbsoluteSeekTime : session->absStartTime();
   if (absStartTime != NULL) {
     // Either we or the server have specified that seeking should be done by 'absolute' time:
     startPlayingSession(session, absStartTime, session->absEndTime(), scale, continueAfterPLAY);
   } else {
     // Normal case: Seek by relative time (NPT):
     startPlayingSession(session, initialSeekTime, endTime, scale, continueAfterPLAY);
   }
 }
Exemple #2
0
void sessionAfterPlaying(void* /*clientData*/) {
  if (!playContinuously) {
    shutdown(0);
  } else {
    // We've been asked to play the stream(s) over again:
    startPlayingSession(session, initialSeekTime, endTime, scale, continueAfterPLAY);
  }
}
Exemple #3
0
void sessionAfterPlaying(void* /*clientData*/) {
  if (!playContinuously) {
    shutdown(0);
  } else {
    // We've been asked to play the stream(s) over again.
    // First, reset state from the current session:
    if (env != NULL) {
      env->taskScheduler().unscheduleDelayedTask(sessionTimerTask);
      env->taskScheduler().unscheduleDelayedTask(arrivalCheckTimerTask);
      env->taskScheduler().unscheduleDelayedTask(interPacketGapCheckTimerTask);
      env->taskScheduler().unscheduleDelayedTask(qosMeasurementTimerTask);
    }
    totNumPacketsReceived = ~0;

    startPlayingSession(session, initialSeekTime, endTime, scale, continueAfterPLAY);
  }
}
Exemple #4
0
void setupStreams() {
  static MediaSubsessionIterator* setupIter = NULL;
  if (setupIter == NULL) setupIter = new MediaSubsessionIterator(*session);
  while ((subsession = setupIter->next()) != NULL) {
    // We have another subsession left to set up:
    if (subsession->clientPortNum() == 0) continue; // port # was not set

    setupSubsession(subsession, streamUsingTCP, continueAfterSETUP);
    return;
  }

  // We're done setting up subsessions.
  delete setupIter;
  if (!madeProgress) shutdown();

  // Create output files:
  if (createReceivers) {
#if 0 /*wayde*/
    if (outputQuickTimeFile) {
      // Create a "QuickTimeFileSink", to write to 'stdout':
      qtOut = QuickTimeFileSink::createNew(*env, *session, "stdout",
					   fileSinkBufferSize,
					   movieWidth, movieHeight,
					   movieFPS,
					   packetLossCompensate,
					   syncStreams,
					   generateHintTracks,
					   generateMP4Format);
      if (qtOut == NULL) {
	*env << "Failed to create QuickTime file sink for stdout: " << env->getResultMsg();
	shutdown();
      }

      qtOut->startPlaying(sessionAfterPlaying, NULL);
    } else if (outputAVIFile) {
      // Create an "AVIFileSink", to write to 'stdout':
      aviOut = AVIFileSink::createNew(*env, *session, "stdout",
				      fileSinkBufferSize,
				      movieWidth, movieHeight,
				      movieFPS,
				      packetLossCompensate);
      if (aviOut == NULL) {
	*env << "Failed to create AVI file sink for stdout: " << env->getResultMsg();
	shutdown();
      }

      aviOut->startPlaying(sessionAfterPlaying, NULL);
#endif /*wayde*/
    } else {
      // Create and start "FileSink"s for each subsession:
      madeProgress = False;
      MediaSubsessionIterator iter(*session);
      while ((subsession = iter.next()) != NULL) {
	if (subsession->readSource() == NULL) continue; // was not initiated

	// Create an output file for each desired stream:
	char outFileName[1000];
	if (singleMedium == NULL) {
	  // Output file name is
	  //     "<filename-prefix><medium_name>-<codec_name>-<counter>"
	  static unsigned streamCounter = 0;
	  snprintf(outFileName, sizeof outFileName, "%s%s-%s-%d",
		   fileNamePrefix, subsession->mediumName(),
		   subsession->codecName(), ++streamCounter);
	} else {
	  sprintf(outFileName, "stdout");
	}
	FileSink* fileSink;
	if (strcmp(subsession->mediumName(), "audio") == 0 &&
	    (strcmp(subsession->codecName(), "AMR") == 0 ||
	     strcmp(subsession->codecName(), "AMR-WB") == 0)) {
	  // For AMR audio streams, we use a special sink that inserts AMR frame hdrs:
	  fileSink = AMRAudioFileSink::createNew(*env, outFileName,
						 fileSinkBufferSize, oneFilePerFrame);
	} else if (strcmp(subsession->mediumName(), "video") == 0 &&
	    (strcmp(subsession->codecName(), "H264") == 0)) {
	  // For H.264 video stream, we use a special sink that insert start_codes:
	  fileSink = H264VideoFileSink::createNew(*env, outFileName,
						 fileSinkBufferSize, oneFilePerFrame);
	} else {
	  // Normal case:
	  fileSink = FileSink::createNew(*env, outFileName,
					 fileSinkBufferSize, oneFilePerFrame);
	}
	subsession->sink = fileSink;
	if (subsession->sink == NULL) {
	  *env << "Failed to create FileSink for \"" << outFileName
		  << "\": " << env->getResultMsg() << "\n";
	} else {
	  if (singleMedium == NULL) {
	    *env << "Created output file: \"" << outFileName << "\"\n";
	  } else {
	    *env << "Outputting data from the \"" << subsession->mediumName()
			<< "/" << subsession->codecName()
			<< "\" subsession to 'stdout'\n";
	  }

	  if (strcmp(subsession->mediumName(), "video") == 0 &&
	      strcmp(subsession->codecName(), "MP4V-ES") == 0 &&
	      subsession->fmtp_config() != NULL) {
	    // For MPEG-4 video RTP streams, the 'config' information
	    // from the SDP description contains useful VOL etc. headers.
	    // Insert this data at the front of the output file:
	    unsigned configLen;
	    unsigned char* configData
	      = parseGeneralConfigStr(subsession->fmtp_config(), configLen);
	    struct timeval timeNow;
	    gettimeofday(&timeNow, NULL);
	    fileSink->addData(configData, configLen, timeNow);
	    delete[] configData;
	  }

	  subsession->sink->startPlaying(*(subsession->readSource()),
					 subsessionAfterPlaying,
					 subsession);

	  // Also set a handler to be called if a RTCP "BYE" arrives
	  // for this subsession:
	  if (subsession->rtcpInstance() != NULL) {
	    subsession->rtcpInstance()->setByeHandler(subsessionByeHandler,
						      subsession);
	  }

	  madeProgress = True;
	}
      }
      if (!madeProgress) shutdown();
    }
  }

  // Finally, start playing each subsession, to start the data flow:
  if (duration == 0) {
    if (scale > 0) duration = session->playEndTime() - initialSeekTime; // use SDP end time
    else if (scale < 0) duration = initialSeekTime;
  }
  if (duration < 0) duration = 0.0;

  endTime = initialSeekTime;
  if (scale > 0) {
    if (duration <= 0) endTime = -1.0f;
    else endTime = initialSeekTime + duration;
  } else {
    endTime = initialSeekTime - duration;
    if (endTime < 0) endTime = 0.0f;
  }

  startPlayingSession(session, initialSeekTime, endTime, scale, continueAfterPLAY);
}
Exemple #5
0
void startPlayingSession(MediaSession* /*session*/, const char* /*start*/, const char* /*end*/, float /*scale*/, RTSPClient::responseHandler* afterFunc) {
	startPlayingSession(NULL,(double)0,(double)0,0,afterFunc);
}
void setupStreams() 
{
	static MediaSubsessionIterator* setupIter = NULL;

	if (setupIter == NULL) setupIter = new MediaSubsessionIterator(*session);

	while ((subsession = setupIter->next()) != NULL) {

		// We have another subsession left to set up:
		if (subsession->clientPortNum() == 0) continue; // port # was not set

		setupSubsession(subsession, streamUsingTCP, continueAfterSETUP);

		return;
	}

	// We're done setting up subsessions.
	delete setupIter;
	if (!madeProgress) shutdown();

	// Create output files:
	if (createReceivers) 
	{
		// Create and start "FileSink"s for each subsession:
		madeProgress = False;
		MediaSubsessionIterator iter(*session);

		while ((subsession = iter.next()) != NULL) 
		{
			if (subsession->readSource() == NULL) continue; // was not initiated

			// Create an output file for each desired stream:
			char outFileName[1000];
			if (singleMedium == NULL) 
			{					
				sprintf(outFileName,"C:\\msys\\1.0\\home\\admin\\ffmpeg\\live.264");
			} 
			else 
			{
				sprintf(outFileName, "stdout");
			}

			FileSink* fileSink;

			if (strcmp(subsession->mediumName(), "video") == 0 &&(strcmp(subsession->codecName(), "H264") == 0)) 
			{
				// For H.264 video stream, we use a special sink that insert start_codes:
				fileSink = H264VideoFileSink::createNew(*env,outFileName,subsession->fmtp_spropparametersets(),fileSinkBufferSize,oneFilePerFrame);
			} 
			else 
			{
				// Normal case:
				fileSink = FileSink::createNew(*env, outFileName,fileSinkBufferSize, oneFilePerFrame);
			}

			subsession->sink = fileSink;

			if (subsession->sink == NULL) {
				fprintf(stderr,"Failed to create FileSink for \"%s" ,outFileName);
			} 
			else 
			{
				subsession->sink->startPlaying(*(subsession->readSource()),subsessionAfterPlaying,subsession);

				// Also set a handler to be called if a RTCP "BYE" arrives
				// for this subsession:
				if (subsession->rtcpInstance() != NULL) {
					subsession->rtcpInstance()->setByeHandler(subsessionByeHandler, subsession);
				}
				madeProgress = True;
			}
		}
		if (!madeProgress) shutdown();
	}

	// Finally, start playing each subsession, to start the data flow:
	if (duration == 0) {
		if (scale > 0) duration = session->playEndTime() - initialSeekTime; // use SDP end time
		else if (scale < 0) duration = initialSeekTime;
	}

	if (duration < 0) duration = 0.0;

	endTime = initialSeekTime;
	if (scale > 0) {
		if (duration <= 0) endTime = -1.0f;
		else endTime = initialSeekTime + duration;
	} else {
		endTime = initialSeekTime - duration;
		if (endTime < 0) endTime = 0.0f;
	}

	startPlayingSession(session, initialSeekTime, endTime, scale, continueAfterPLAY);
}