void SkypekitTestEngine::TransportSendFrame(VideoData &frame) {
	Sid::Binary binary((char *)frame.Data(), frame.Size());
	m_pCb->SendFrame(binary, frame.GetTimestamp());

	m_SendFcnt++;
	m_SendBStat.Update(frame.Size());
	if (m_SendBStat.IsValid())
		TI_DBG("Send bitrate: %s", m_SendBStat.GetStatStr());

	PACKET_DBG("TransportSendFrame: packet len %d bytes, count: %d", frame.Size(), m_SendFcnt);
}
void ReceiveVideoWrapper::SubmitFrame(VideoData &frame) {
	PACKET_DBG("To decoder: frame len %d bytes", frame.Size());

	if (Configuration::GetCfg()->GetIntVal("DumpReceiveAnxbPackets")) {
		m_pReceiveAnxbDump->StorePacket(frame.Data(), frame.Size());
	}

#ifndef MINIMAL_MODE
	if (Configuration::GetCfg()->GetIntVal("RenderVideo")) {
		m_pDecoder->DecodeFrame(frame);
	}
#endif
}
Exemple #3
0
void X11Renderer::RenderFrame(VideoData & frame) {
	VideoData *frameBuf;
//	RenderFrameInt(yuvBuf);
	// Renderer can be too slow when software rendering is used. So 
	// we just drop some frames so the slow renrerer can't slow  everything down 
	if (m_pFrameQueue->GetCnt() == FRAME_QUEUE_LEN) {
		REND_DBG("%s: Dropping frame", m_pTitle);
	} else {
		frameBuf = m_pFrameQueue->GetWritePtr();
		memcpy((uint8_t *)frameBuf->Data(), frame.Data(), m_VPar.FrameSize());
		frameBuf->SetTimestamp(frame.GetTimestamp());
		m_pFrameQueue->WriteDone();
	}
}
void DummyCapturer::CaptureFrame(VideoData & frame)
{	
	uint32_t sleepTimeUs;
	m_TStat.Update();
	TIMING_DBG("V4l2Capturer: %s", m_TStat.GetStatStr());
	uint8_t *frameptr = (uint8_t *)frame.Data();
	curr_Timestamp += 1000 / m_VPar.framerate;
	frame.m_Timestamp = curr_Timestamp;

	for (int i = 0; i < m_VPar.width * m_VPar.height; i++) {
		*frameptr = 0x13 + (int)(5 * ((float)rand()/RAND_MAX));
		frameptr++;
	}

	for (int i = 0; i < m_VPar.width * m_VPar.height / 2; i++) {
		*frameptr = 0x80 + (int)(5 * ((float)rand()/RAND_MAX));
		frameptr++;
	}

	m_pPreviewRenderer->RenderFrame(frame);
	
	sleepTimeUs = (uint32_t)(1 / (float)m_VPar.framerate * 1000000);
	CAP_DBG("usleep %d us", sleepTimeUs);
	usleep(sleepTimeUs);
}
void H264RtpToAnxbCb::TransmitAnxbFrame(const uint8_t *data, uint32_t len, int timestamp_ms) {
	m_FrameCnt++;
	PACKET_DBG("To decoder: frame len %d bytes,  received_frame_cnt: %d, timestamp_ms: %d", len, m_FrameCnt, timestamp_ms);

	m_pReceive.m_ReceiveFStat.Update();
	if (m_pReceive.m_ReceiveFStat.IsValid())
		TI_DBG("Receive Framerate: %s", m_pReceive.m_ReceiveFStat.GetStatStr());

	if (Configuration::GetCfg()->GetIntVal("DumpReceiveAnxbPackets")) {
		m_pReceive.m_pReceiveAnxbDump->StorePacket(data, len);
	}

#ifndef MINIMAL_MODE
	if (Configuration::GetCfg()->GetIntVal("RenderVideo")) {
		VideoData frame;
		frame.SetBuf(data, len);
		frame.SetTimestamp(timestamp_ms);
		m_pReceive.m_pDecoder->DecodeFrame(frame);
	}
#endif
}
void iMX27RtpInterface::FrameReceived(const Sid::Binary & data, int timestamp_ms) {
	PACKET_DBG("Received frame, len %d, count", data.size(), m_ReceiveFcnt);
	m_ReceiveFcnt++;

	m_ReceiveBStat.Update(data.getSize());
	if (m_ReceiveBStat.IsValid())
		RTP_DBG("Receive bitrate: %s", m_ReceiveBStat.GetStatStr());

	if (Configuration::GetCfg()->GetIntVal("UseLoopback")) {
		if (m_RecordingStarted) {
			VideoData frame;
			frame.SetBuf((const uint8_t*)data.data(), data.size());
			frame.SetTimestamp(timestamp_ms);
			TransportSendFrame(frame);
		}
		return;
	}

	if (m_pReceiveWrapper) {
		VideoData frame;
		frame.SetBuf((uint8_t *)data.data(), data.size());
		frame.SetTimestamp(timestamp_ms);
		m_pReceiveWrapper->SubmitFrame(frame);
	}
	return;
}
Exemple #7
0
void
DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHandle)
{
  AssertOwnerThread();

  if (!mInfo.HasVideo()) {
    return;
  }

  VideoSegment output;
  TrackID videoTrackId = mInfo.mVideo.mTrackId;
  AutoTArray<RefPtr<MediaData>, 10> video;
  SourceMediaStream* sourceStream = mData->mStream;

  // It's OK to hold references to the VideoData because VideoData
  // is ref-counted.
  mVideoQueue.GetElementsAfter(mData->mNextVideoTime, &video);

  // tracksStartTimeStamp might be null when the SourceMediaStream not yet
  // be added to MediaStreamGraph.
  TimeStamp tracksStartTimeStamp = sourceStream->GetStreamTracksStrartTimeStamp();
  if (tracksStartTimeStamp.IsNull()) {
    tracksStartTimeStamp = TimeStamp::Now();
  }

  for (uint32_t i = 0; i < video.Length(); ++i) {
    VideoData* v = video[i]->As<VideoData>();

    if (mData->mNextVideoTime < v->mTime) {
      // Write last video frame to catch up. mLastVideoImage can be null here
      // which is fine, it just means there's no video.

      // TODO: |mLastVideoImage| should come from the last image rendered
      // by the state machine. This will avoid the black frame when capture
      // happens in the middle of playback (especially in th middle of a
      // video frame). E.g. if we have a video frame that is 30 sec long
      // and capture happens at 15 sec, we'll have to append a black frame
      // that is 15 sec long.
      WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage, v->mTime,
          mData->mNextVideoTime, mData->mLastVideoImageDisplaySize,
          tracksStartTimeStamp + TimeDuration::FromMicroseconds(v->mTime),
          &output, aPrincipalHandle);
      mData->mNextVideoTime = v->mTime;
    }

    if (mData->mNextVideoTime < v->GetEndTime()) {
      WriteVideoToMediaStream(sourceStream, v->mImage, v->GetEndTime(),
          mData->mNextVideoTime, v->mDisplay,
          tracksStartTimeStamp + TimeDuration::FromMicroseconds(v->GetEndTime()),
          &output, aPrincipalHandle);
      mData->mNextVideoTime = v->GetEndTime();
      mData->mLastVideoImage = v->mImage;
      mData->mLastVideoImageDisplaySize = v->mDisplay;
    }
  }

  // Check the output is not empty.
  if (output.GetLastFrame()) {
    mData->mEOSVideoCompensation = ZeroDurationAtLastChunk(output);
  }

  if (!aIsSameOrigin) {
    output.ReplaceWithDisabled();
  }

  if (output.GetDuration() > 0) {
    sourceStream->AppendToTrack(videoTrackId, &output);
  }

  if (mVideoQueue.IsFinished() && !mData->mHaveSentFinishVideo) {
    if (mData->mEOSVideoCompensation) {
      VideoSegment endSegment;
      // Calculate the deviation clock time from DecodedStream.
      int64_t deviation_usec = sourceStream->StreamTimeToMicroseconds(1);
      WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage,
          mData->mNextVideoTime + deviation_usec, mData->mNextVideoTime,
          mData->mLastVideoImageDisplaySize,
          tracksStartTimeStamp + TimeDuration::FromMicroseconds(mData->mNextVideoTime + deviation_usec),
          &endSegment, aPrincipalHandle);
      mData->mNextVideoTime += deviation_usec;
      MOZ_ASSERT(endSegment.GetDuration() > 0);
      if (!aIsSameOrigin) {
        endSegment.ReplaceWithDisabled();
      }
      sourceStream->AppendToTrack(videoTrackId, &endSegment);
    }
    sourceStream->EndTrack(videoTrackId);
    mData->mHaveSentFinishVideo = true;
  }
}
Status VideoDepthConversion::GetFrame(MediaData *input, MediaData *output)
{
    VideoData *in    = DynamicCast<VideoData, MediaData>(input);
    VideoData *out   = DynamicCast<VideoData, MediaData>(output);
    VideoData *inter = DynamicCast<VideoData, MediaData>(m_pInterData);
    bool   bConvert = false;
    Ipp32u i;

    UMC_CHECK(in,    UMC_ERR_NULL_PTR);
    UMC_CHECK(out,   UMC_ERR_NULL_PTR);
    UMC_CHECK(inter, UMC_ERR_NOT_INITIALIZED);

    // nothing to do
    if(in == out)
        return UMC_OK;
    for(i = 0; i < in->GetPlanesNumber(); i++)
    {
        if(in->GetPlaneBitDepth(i) != out->GetPlaneBitDepth(i) || in->GetPlaneSampleSize(i) != out->GetPlaneSampleSize(i))
        {
            bConvert = true;
            break;
        }
    }
    if(!bConvert)
    {
        if(!out->GetPlaneDataPtr(0))
            return UMC_WRN_SKIP;
        else
            in->Copy(out);
        return UMC_OK;
    }

    // unsuported configurations
    if(in->GetMaxSampleSize() != 1 && in->GetMaxSampleSize() != 2)
        return UMC_ERR_INVALID_PARAMS;

    if(!out->GetDataPointer())
    {
        m_bStateInitialized = !InvalidData(in, inter);
        m_bUseIntermediate = true;
    }
    else
    {
        m_bUseIntermediate = false;
        if(InvalidData(in, out))
            return UMC_ERR_INVALID_PARAMS;
    }

    if(m_srcFormat != in->m_colorFormat || m_dstFormat != out->m_colorFormat)
        m_bStateInitialized = false;

    if(!m_bStateInitialized)
    {
        m_size.width  = in->m_iWidth;
        m_size.height = in->m_iHeight;
        m_srcFormat   = in->m_colorFormat;
        m_dstFormat   = out->m_colorFormat;

        if(m_bUseIntermediate)
        {
            inter->Close();
            inter->Init(in);
            for(Ipp32u i = 0; i < inter->GetPlanesNumber(); i++)
            {
                inter->SetPlaneSampleSize(1, i);
                inter->SetPlaneBitDepth(8, i);
            }
            inter->Alloc();
        }

        m_bStateInitialized = true;
    }

    if(m_bUseIntermediate)
        *out = *inter;

    VideoData::PlaneData *pSrcPlane;
    VideoData::PlaneData *pDstPlane;
    IppiSize size;
    Ipp32u iPlanes;
    IppStatus sts = ippStsNoErr;

    iPlanes = in->GetPlanesNumber();
    if (iPlanes > out->GetPlanesNumber())
        iPlanes = out->GetPlanesNumber();

    for (i = 0; i < iPlanes; i++)
    {
        pSrcPlane = in->GetPtrToPlane(i);
        pDstPlane = out->GetPtrToPlane(i);

        size.width  = pSrcPlane->m_size.width * pSrcPlane->m_iSamples;
        size.height = pSrcPlane->m_size.height;

        if(pSrcPlane->m_iSampleSize == pDstPlane->m_iSampleSize)
        {
            size.width *= pSrcPlane->m_iSampleSize;
            sts = ippiCopy_8u_C1R(pSrcPlane->m_pPlane, (Ipp32s)pSrcPlane->m_iPitch, pDstPlane->m_pPlane, (Ipp32s)pDstPlane->m_iPitch, size);
        }
        else if (pSrcPlane->m_iSampleSize == 2 && pDstPlane->m_iSampleSize == 1)
            cppiConvert_16s8u_C1R((const Ipp16s*)pSrcPlane->m_pPlane, (Ipp32s)pSrcPlane->m_iPitch, pSrcPlane->m_iBitDepth, pDstPlane->m_pPlane, (Ipp32s)pDstPlane->m_iPitch, size);
        else if (pSrcPlane->m_iSampleSize == 1 && pDstPlane->m_iSampleSize == 2)
            cppiConvert_8u16s_C1R((const Ipp8u*)pSrcPlane->m_pPlane, (Ipp32s)pSrcPlane->m_iPitch, pDstPlane->m_iBitDepth, (Ipp16s*)pDstPlane->m_pPlane, (Ipp32s)pDstPlane->m_iPitch, size);
        else
            return UMC_ERR_UNSUPPORTED;
    }

    return (ippStsNoErr == sts) ? UMC_OK : UMC_ERR_FAILED;
}
Exemple #9
0
void VideoEventAction::start()
{
    VideoData *videoData = qobject_cast<VideoData*>(m_shape->userData());
    Q_ASSERT(videoData);
    m_player = new FullScreenPlayer(videoData->playableUrl());
}
Exemple #10
0
void VideoSink::RenderVideoFrames(int32_t aMaxFrames, int64_t aClockTime,
                                  const TimeStamp& aClockTimeStamp) {
  AssertOwnerThread();

  AutoTArray<RefPtr<VideoData>, 16> frames;
  VideoQueue().GetFirstElements(aMaxFrames, &frames);
  if (frames.IsEmpty() || !mContainer) {
    return;
  }

  AutoTArray<ImageContainer::NonOwningImage, 16> images;
  TimeStamp lastFrameTime;
  MediaSink::PlaybackParams params = mAudioSink->GetPlaybackParams();
  for (uint32_t i = 0; i < frames.Length(); ++i) {
    VideoData* frame = frames[i];

    frame->MarkSentToCompositor();

    if (!frame->mImage || !frame->mImage->IsValid() ||
        !frame->mImage->GetSize().width || !frame->mImage->GetSize().height) {
      continue;
    }

    if (frame->mTime.IsNegative()) {
      // Frame times before the start time are invalid; drop such frames
      continue;
    }

    TimeStamp t;
    if (aMaxFrames > 1) {
      MOZ_ASSERT(!aClockTimeStamp.IsNull());
      int64_t delta = frame->mTime.ToMicroseconds() - aClockTime;
      t = aClockTimeStamp +
          TimeDuration::FromMicroseconds(delta / params.mPlaybackRate);
      if (!lastFrameTime.IsNull() && t <= lastFrameTime) {
        // Timestamps out of order; drop the new frame. In theory we should
        // probably replace the previous frame with the new frame if the
        // timestamps are equal, but this is a corrupt video file already so
        // never mind.
        continue;
      }
      lastFrameTime = t;
    }

    ImageContainer::NonOwningImage* img = images.AppendElement();
    img->mTimeStamp = t;
    img->mImage = frame->mImage;
    if (mBlankImage) {
      img->mImage = mBlankImage;
    }
    img->mFrameID = frame->mFrameID;
    img->mProducerID = mProducerID;

    VSINK_LOG_V("playing video frame %" PRId64 " (id=%x) (vq-queued=%zu)",
                frame->mTime.ToMicroseconds(), frame->mFrameID,
                VideoQueue().GetSize());
  }

  if (images.Length() > 0) {
    mContainer->SetCurrentFrames(frames[0]->mDisplay, images);

    if (mSecondaryContainer) {
      mSecondaryContainer->SetCurrentFrames(frames[0]->mDisplay, images);
    }
  }
}
Exemple #11
0
bool AppIdle(void)
{
    XSurface* pSurface = g_pView->GetSurface();
    if (!pSurface)
        return true;

    static int rgb=0; 
    rgb += 0x050000;
    if (rgb & 0xff000000)
        rgb = 0;
    if (pSurface->GetXPitch() != 2)
        return false;

    if (!g_pVideoCapture) {
        uchar* pDstLine = pSurface->GetImage();
        for (uint i=0; i < pSurface->GetHeight(); ++i) {
            ushort* pDst = (ushort*) pDstLine;  pDstLine += pSurface->GetYPitch();
            ulong color = rgb | ((i*255/(pSurface->GetHeight()-1))<<8);
            for (uint j=0;j<pSurface->GetWidth();++j) {
                color = (color&0x00FFFF00) | (j*255/(pSurface->GetWidth()-1));
                *pDst ++ = (ushort) pSurface->RgbToPixel( color);
            }
        }
        g_pDisplay->Flush();
        return true;
    }

    int iFrameWidth, iFrameHeight;
    if (g_pVideoCapture->GetSize(iFrameWidth, iFrameHeight) != S_OK)
        return false;

    if (iFrameWidth < 0)
        iFrameWidth = - iFrameWidth;

    if (iFrameHeight < 0)
        iFrameHeight = - iFrameHeight;

    g_pVideoCapture->Run();

//    DWORD dwFrameSize = g_pVideoCapture->GetFrameSize();
//    PBYTE  pYUV420C = g_pVideoCapture->GetFrame();
//    if (!dwFrameSize || !pYUV420C) 
//        return true;

    MediaData outH263(4*iFrameWidth*iFrameHeight);
    
    if (!g_pCaptureFrame)
        return true;

    DWORD size = g_pCaptureFrame->GetBufferSize();

    if (size != g_pVideoCapture->GetFrame((PBYTE)g_pCaptureFrame->GetBufferPointer(), size))
        return true;

    Draw(*g_pCaptureFrame, pSurface, 0);
#if 0
    {
    __int64 avrg = 0;
    int size = iFrameWidth*iFrameHeight;   
    for (int j=0;j<iFrameHeight;++j) {
        ushort* pImage = (ushort*) (pSurface->GetImage() + pSurface->GetYPitch()*j);
        for (int i=0;i<iFrameWidth;++i) 
            avrg += __int64 (pImage[i]);
    }
    avrg /= size;
    __int64 d2 = 0;
    for (int j=0;j<iFrameHeight;++j) {
        ushort* pImage = (ushort*) (pSurface->GetImage() + pSurface->GetYPitch()*j);
        for (int i=0;i<iFrameWidth;++i)  {
            __int64 d = (avrg- __int64(pImage[i]));
            d2 += d*d;
        }
    }
    
    if (int(d2/size) > 2) 
        Draw(in, pSurface, iFrameWidth);
    WCHAR buff[5000];
    swprintf(buff,L"avrg=%d,d2=%d\n",int(avrg),int(d2/size));
    OutputDebugString(buff);
    }
#endif
//  g_pDisplay->Flush();
//  return true;

    g_pCaptureFrame->SetTime(++g_nFramesEncoded);

    UMC::Status ret = g_pEncoder->GetFrame(g_pCaptureFrame, &outH263);

    if (ret != UMC_OK && ret != UMC_ERR_NOT_ENOUGH_DATA && ret != UMC_ERR_END_OF_STREAM) {
//      vm_string_fprintf(vm_stderr, VM_STRING("Error: encoding failed at %d source frame (exit with %d)!\n"), mFramesEncoded, ret);
      return false;
    }


    if (!g_pDecoder && !g_pDecoderParam) {
        g_pDecoder = new H263VideoDecoder();
        g_pDecoderParam = new VideoDecoderParams();

//      g_pDecoderParam->pPostProcessing  = new VideoProcessing();
        g_pDecoderParam->info.stream_type = H263_VIDEO;
        g_pDecoderParam->numThreads = 1;
        g_pDecoderParam->lFlags = 0;
        g_pDecoderParam->m_pData = &outH263;
        g_pDecoderParam->info.clip_info.width  = iFrameWidth;
        g_pDecoderParam->info.clip_info.height = iFrameHeight;

        if (g_pDecoder->Init(g_pDecoderParam)!=0) {
            delete g_pDecoder;
            g_pDecoder = NULL;
        }
        else {
            BaseCodecParams params;
            if (g_pDecoder->GetInfo(&params) != UMC_OK) {
                delete g_pDecoder;
                g_pDecoder = NULL;
            }
        }
    }

    if (g_pDecoder) {
        VideoData out;
        out.SetAlignment(16);
//          out->Init(params.info.clip_info.width, params.info.clip_info.height, cf, bitDepth);
        out.Init(iFrameWidth, iFrameHeight, YUV420, 8);
        out.Alloc();
        g_pDecoder->GetFrame(&outH263, &out);
//#if 0
        Draw(out, pSurface, iFrameWidth);
#if 0
        int nDiff = 0;
        for (int i=0;i<iFrameHeight;++i) {
            ushort* p1 = (ushort*)(pSurface->GetImage() + pSurface->GetYPitch()*i);
            ushort* p2 = p1 + iFrameWidth;
            ushort* pout = (ushort*)((uchar*)p1 + pSurface->GetYPitch()*iFrameHeight);
            for (int k=0;k<iFrameWidth;++k) {
/*
#define R(a)  (((a)>>7)  & 0x0F8)
#define G(a)  (((a)>>2)  & 0x0F8)
#define B(a)  (((a)<<3)  & 0x0F8)
                ushort r = (R(p1[k]) - R(p2[k]) + 64) & 0xf8;
                ushort g = (G(p1[k]) - G(p2[k]) + 64) & 0xf8;
                ushort b = (B(p1[k]) - B(p2[k]) + 64) & 0xf8;
                pout[k] = (r << 7)| (g<<2) | (b>>3);
                
                ushort a1 = p1[k] | 0x04210;
                ushort a2 = p2[k] & ~0x04210;
                a1 -= a2;
                a1 &= ~0x04210;
//                a1 += 0x01ce7;
//                a1 += 0x03def;
                a1 += 0x02108;
                a1 &= ~0x04210;
                if (pout[k] != a1) {
                    ++ nDiff;
                    pout[k] = 0xffff;
                }
*/
                pout[k] = (((p1[k]|0x04210) - (p2[k]&~0x04210)) + 0x02108) & ~0x04210;
            }
        }
#endif
    }

    g_pDisplay->Flush();
    return true;
}
Exemple #12
0
void Draw(VideoData& video, XSurface* pSurface,int Xoffset)
{
    video.GetWidth();
    video.GetHeight();

    assert(Xoffset >= 0);
    if (Xoffset < 0)
        return;

    int iWidth = pSurface->GetWidth() - Xoffset;
    if (iWidth <= 0)
        return;

    int iHeight = pSurface->GetHeight();

    if (video.GetWidth() < iWidth)
        iWidth = video.GetWidth();

    if (video.GetHeight() < iHeight)
        iHeight = video.GetHeight();

    if (pSurface->GetPixelFormat() == eRGB555)
        yuv420_to_rgb555(iWidth, iHeight,               
            (const uchar*) video.GetPlanePointer(0), video.GetPlanePitch(0), 
            (const uchar*) video.GetPlanePointer(1), video.GetPlanePitch(1), 
            (const uchar*) video.GetPlanePointer(2), video.GetPlanePitch(2),
            pSurface->GetImage()+ pSurface->GetXPitch()*Xoffset, 
            pSurface->GetYPitch());
    else if (pSurface->GetPixelFormat() == eRGB565)
        yuv420_to_rgb565(iWidth, iHeight,               
            (const uchar*) video.GetPlanePointer(0), video.GetPlanePitch(0), 
            (const uchar*) video.GetPlanePointer(1), video.GetPlanePitch(1), 
            (const uchar*) video.GetPlanePointer(2), video.GetPlanePitch(2),
            pSurface->GetImage()+ pSurface->GetXPitch()*Xoffset, 
            pSurface->GetYPitch());
}