reactor::MediaFrame reactor::ColorSpaceReaderFilter::readFrame(void)
{
  if(!m_initialized)
  {
	return ReaderFilter::readFrame();
  }

  MediaFrame frame = ReaderFilter::readFrame();

  sws_scale(m_conversionContext, frame.getBuffer(), frame.getLineSize(), 0, frame.getHeight(),
			m_convertedFrame->data, m_convertedFrame->linesize);

  return MediaFrame(m_convertedFrame, m_format);
}
Beispiel #2
0
static void
VDADecoderCallback (void *decompressionOutputRefCon, CFDictionaryRef frameInfo, OSStatus status, uint32_t infoFlags, CVImageBufferRef imageBuffer)
{
	MoonVDADecoder *decoder = (MoonVDADecoder *) decompressionOutputRefCon;
	VideoStream *vs = (VideoStream *) decoder->GetStream ();

	// FIXME: Is this always 1 thread?  Can we optimize this
	decoder->GetDeployment ()->RegisterThread ();

	Deployment::SetCurrent (decoder->GetDeployment ());

	if (imageBuffer == NULL) {
		return;
	}

	OSType format_type = CVPixelBufferGetPixelFormatType (imageBuffer);
	if (format_type != kCVPixelFormatType_422YpCbCr8) {
		g_warning ("Mismatched format in VDA");
		return;
	}

	MediaFrame *mf = (MediaFrame *) CFDictionaryGetValue (frameInfo, CFSTR ("MoonMediaFrame"));

	mf->AddState (MediaFrameVUY2);
	mf->FreeBuffer ();
	mf->SetBufLen (0);

	mf->srcSlideY = 0;
	mf->srcSlideH = vs->GetHeight ();

	mf->width = vs->GetWidth ();
	mf->height = vs->GetHeight ();

	CVPixelBufferLockBaseAddress (imageBuffer, 0);

	mf->data_stride [0] = (uint8_t *) CVPixelBufferGetBaseAddress (imageBuffer);
	mf->srcStride [0] = CVPixelBufferGetBytesPerRow (imageBuffer);

	mf->AddState (MediaFrameDecoded);

	mf->decoder_specific_data = imageBuffer;
	CVPixelBufferRetain (imageBuffer);

	decoder->ReportDecodeFrameCompleted (mf);

	mf->unref ();
}
Beispiel #3
0
void
Mp3FrameReader::ReadFrame ()
{
	MpegFrameHeader mpeg;
	guint64 duration;
	guint32 len;
	MediaFrame *frame;
	MemoryBuffer *current_source = demuxer->GetCurrentSource ();
	guint64 start_position = current_source->GetPosition ();
	
	if (!FindMpegHeader (&mpeg, NULL, current_source)) {
		LOG_MP3 ("Mp3FrameReader::ReadFrame (): Not enough data (mpeg header not found or not enough data for entire frame) - requesting more\n");
		if (!demuxer->RequestMoreData (ReadFrameCallback)) {
			/* No more data */
			LOG_MP3 ("Mp3FrameReader::ReadFrame (): reached end of stream.\n");
			demuxer->ReportGetFrameCompleted (NULL);
		}
		return;
	}

	//printf ("Mp3FrameReader::ReadFrame():\n");
	//mpeg_print_info (&mpeg);
	
	if (mpeg.bit_rate == 0) {
		// use the most recently specified bit rate
		mpeg.bit_rate = bit_rate;
	}
	
	bit_rate = mpeg.bit_rate;
	
	duration = mpeg_frame_duration (&mpeg);
	
	AddFrameIndex (demuxer->GetCurrentPosition () + current_source->GetPosition (), cur_pts, duration, bit_rate);
	
	len = (guint32) mpeg_frame_length (&mpeg);

	/* Check if we have enough data */
	if (current_source->GetRemainingSize () < len) {
		/* We need to seek back to where we started reading this frame so that the next time we're called
		 * we start parsing from the beginning again */
		current_source->SeekSet (start_position);
		
		if (!demuxer->RequestMoreData (ReadFrameCallback, MAX (len, 1024))) {
			/* No more data */
			demuxer->ReportGetFrameCompleted (NULL);
			return;
		}
		
		return;
	}

	frame = new MediaFrame (stream);
	if (!frame->AllocateBuffer (len)) {
		frame->unref ();
		return;
	}

	if (!current_source->Read (frame->GetBuffer (), len)) {
		/* This shouldn't happen, we've already checked that we have enough data */
		demuxer->ReportErrorOccurred ("Mp3Demuxer could not read from stream.");
		frame->unref ();
		return;
	}
	
	frame->pts = cur_pts;
	frame->duration = duration;
	
	frame->AddState (MediaFrameDemuxed);
	
	cur_pts += duration;
	
	demuxer->ReportGetFrameCompleted (frame);
	frame->unref ();
}
void H264RealTimeStreamFramer::doGetNextFrame() {
    struct timespec TimeSpec = {0, 0};

    if (fNeedNextFrame) {
        if (fFirstFrame) {
            nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc *)&tryGetNextFrame, this);
            fFirstFrame = False;
            PRINT_LOG(ASSERT, "First H264 frame");
            return;
        }

        MediaFrame * nextFrame = fFrameCapture->GetNextFrame(fCurrentFrame);
        if (NULL == nextFrame) {
            nextTask() = envir().taskScheduler().scheduleDelayedTask(10000, (TaskFunc *)&tryGetNextFrame, this);
            return;
        }

        // ASSERT(nextFrame->IsH264Frame(), "nextFrame MUST be H264Frame");

        PRINT_LOG(ASSERT, "Get %c frame, size = %u", nextFrame->IsKeyFrame() ? 'I' : 'P', nextFrame->Length());

        clock_gettime(CLOCK_MONOTONIC, &TimeSpec);
        PRINT_LOG(VERBOSE, "Time to get frame :  %12ld.%9ld", TimeSpec.tv_sec, TimeSpec.tv_nsec);

        fCurrentFrame = dynamic_cast<H264Frame *>(nextFrame);
        fNeedNextFrame = False;
        fCurrentNaluUnitIndex = 0;
        fOffset = 1;

        // ASSERT(fCurrentFrame->NaluCount() > 0, "H264 frame MUST have at least 1 nalu unit");
    }

    const H264Frame::NaluUnit & Nalu     = fCurrentFrame->GetNaluUnit(fCurrentNaluUnitIndex);
    const uint8_t             * addr     = Nalu.s_Addr;
    uint32_t                    size     = Nalu.s_Length;
    uint8_t                     naluType = (addr[0] & 0x1F);

    if (naluType == NALU_TYPE_SPS && fNeedSPS) {
        // Save SPS(Sequence Parameter Set)
        saveCopyOfSPS((u_int8_t *)addr, size);
        fNeedSPS = False;
    } else if (naluType == NALU_TYPE_PPS && fNeedPPS) {
        // Save PPS(Picture Parameter Set)
        saveCopyOfPPS((u_int8_t *)addr, size);
        fNeedPPS = False;
    }

#ifdef USE_H264_VIDEO_RTP_SINK

    fFrameSize = (size > fMaxSize) ? fMaxSize : size;
    fNumTruncatedBytes = size - fFrameSize;

    memmove(fTo, addr, fFrameSize);

    fPresentationTime = fCurrentFrame->TimeStamp();

    fCurrentNaluUnitIndex ++;

#else // USE_H264_VIDEO_RTP_SINK

    // Make sure max size of the data MUST NOT be greater then (MAX_BYTES_PER_UDP_PACKET - RTP_HEADER_SIZE)
    if (fMaxSize > MAX_BYTES_PER_UDP_PACKET - RTP_HEADER_SIZE) {
        fMaxSize = MAX_BYTES_PER_UDP_PACKET - RTP_HEADER_SIZE;
    }

    if (size > fMaxSize) {
        fFrameSize = size - fOffset + 2;
        fFrameSize = (fFrameSize > fMaxSize) ? fMaxSize : fFrameSize;
        memmove(fTo + 2, addr + fOffset, fFrameSize - 2);

        fTo[0] = (addr[0] & 0xE0) | 0x1C;
        if (fOffset == 1) {
            fTo[1] = (addr[0] & 0x1F) | 0x80;
        } else if (fOffset + fFrameSize - 2 >= size) {
            fTo[1] = (addr[0] & 0x1F) | 0x40;
            fCurrentNaluUnitIndex ++;
            fOffset = 1;
        } else {
            fTo[1] = (addr[0] & 0x1F);
        }

        fOffset += fFrameSize - 2;

    } else {
        fFrameSize = size;
        memmove(fTo, addr, fFrameSize);
        fCurrentNaluUnitIndex ++;
    }

    fPresentationTime = fCurrentFrame->TimeStamp();
    fNumTruncatedBytes = 0;

#endif // USE_H264_VIDEO_RTP_SINK

    if (fCurrentNaluUnitIndex >= fCurrentFrame->NaluCount()) {
        fPictureEndMarker = True;
        fNeedNextFrame = True;
        fDurationInMicroseconds = fCurrentFrame->Duration();
    } else {
        fPictureEndMarker = False;
        fDurationInMicroseconds = 0;
    }

    afterGetting(this);

    if (fNeedNextFrame)
    {
        clock_gettime(CLOCK_MONOTONIC, &TimeSpec);
        PRINT_LOG(VERBOSE, "Time to sent frame : %12ld.%9ld", TimeSpec.tv_sec, TimeSpec.tv_nsec);
    }
}
void RTMPMP4Stream::onMediaFrame(MediaFrame &media)
{
	//Depending on the media type
	switch (media.GetType())
	{
		case MediaFrame::Audio:
		{
			//Create rtmp frame
			RTMPAudioFrame *frame = new RTMPAudioFrame(0,512);
			//Get audio frame
			AudioFrame& audio = (AudioFrame&)media;
			//Check codec
			switch(audio.GetCodec())
			{
				case AudioCodec::PCMA:
				case AudioCodec::PCMU:
				{
					WORD raw[512];
					DWORD rawsize = 512;
					//Decode audio frame
					DWORD rawlen = decoder->Decode(audio.GetData(),audio.GetLength(),raw,rawsize);
					//Encode frame
					DWORD len = encoder->Encode(raw,rawlen,frame->GetMediaData(),frame->GetMaxMediaSize());
					//Set length
					frame->SetMediaSize(len);
					//Set type
					frame->SetAudioCodec(RTMPAudioFrame::SPEEX);
					frame->SetSoundRate(RTMPAudioFrame::RATE11khz);
					frame->SetSamples16Bits(1);
					frame->SetStereo(0);
					//Set timestamp
					frame->SetTimestamp(audio.GetTimeStamp()/8);
					break;
				}
				default:
					//Not supported
					return;
			}
			//Send it
			PlayMediaFrame(frame);
		}
		break;
		case MediaFrame::Video:
		{
			//Get video frame
			VideoFrame& video = (VideoFrame&)media;
			//Create rtmp frame
			RTMPVideoFrame *frame = new RTMPVideoFrame(video.GetTimeStamp()/90,video.GetLength());
			//Check codec
			switch(video.GetCodec())
			{
				case VideoCodec::H263_1996:
				case VideoCodec::H263_1998:
				#ifdef FLV1PARSER
				{
					//Create FLV1parser in case we need it
					flv1Parser *parser = new flv1Parser(frame->GetMediaData(),frame->GetMaxMediaSize());
					//Proccess
					if (!parser->FrameFromH263(video.GetData(),video.GetLength()))
						throw new std::exception();
					//Set lengtht
					frame->SetMediaSize(parser->GetSize());
					//If it is intra
					if (video.IsIntra())
						//Set type
						frame->SetFrameType(RTMPVideoFrame::INTRA);
					else
						//Set type
						frame->SetFrameType(RTMPVideoFrame::INTER);
					//Set type
					frame->SetVideoCodec(RTMPVideoFrame::FLV1);
				}
				#endif
					break;
				case VideoCodec::H264:
				{
					//Set Codec
					frame->SetVideoCodec(RTMPVideoFrame::AVC);
					//If it is intra
					if (video.IsIntra())
					{
						//Set type
						frame->SetFrameType(RTMPVideoFrame::INTRA);
						//If we have one
						if (desc)
						{
							//Create the fraame
							RTMPVideoFrame fdesc(frame->GetTimestamp(),desc);
							//Play it
							PlayMediaFrame(&fdesc);
						}
					} else {
						//Set type
						frame->SetFrameType(RTMPVideoFrame::INTER);
					}
					//Set NALU type
					frame->SetAVCType(1);
					//Set no delay
					frame->SetAVCTS(0);
					//Set Data
					frame->SetVideoFrame(video.GetData(),video.GetLength());
					break;
				}
				default:
					//Not supported
					return;
			}
			//Send it
			PlayMediaFrame(frame);
			//Delete it
			delete(frame);
		}
		break;
	}
}