NS_IMETHODIMP MediaEngineDefaultVideoSource::Notify(nsITimer* aTimer) { // Update the target color if (mCr <= 16) { if (mCb < 240) { mCb++; } else { mCr++; } } else if (mCb >= 240) { if (mCr < 240) { mCr++; } else { mCb--; } } else if (mCr >= 240) { if (mCb > 16) { mCb--; } else { mCr--; } } else { mCr--; } // Allocate a single solid color image RefPtr<layers::PlanarYCbCrImage> ycbcr_image = mImageContainer->CreatePlanarYCbCrImage(); layers::PlanarYCbCrData data; AllocateSolidColorFrame(data, mOpts.mWidth, mOpts.mHeight, 0x80, mCb, mCr); #ifdef MOZ_WEBRTC uint64_t timestamp = PR_Now(); YuvStamper::Encode(mOpts.mWidth, mOpts.mHeight, mOpts.mWidth, data.mYChannel, reinterpret_cast<unsigned char*>(×tamp), sizeof(timestamp), 0, 0); #endif bool setData = ycbcr_image->SetData(data); MOZ_ASSERT(setData); // SetData copies data, so we can free the frame ReleaseFrame(data); if (!setData) { return NS_ERROR_FAILURE; } MonitorAutoLock lock(mMonitor); // implicitly releases last image mImage = ycbcr_image.forget(); return NS_OK; }
nsresult MediaEngineDefaultVideoSource::Start(SourceMediaStream* aStream, TrackID aID) { if (mState != kAllocated) { return NS_ERROR_FAILURE; } mTimer = do_CreateInstance(NS_TIMER_CONTRACTID); if (!mTimer) { return NS_ERROR_FAILURE; } mSource = aStream; // Allocate a single blank Image ImageFormat format = PLANAR_YCBCR; mImageContainer = layers::LayerManager::CreateImageContainer(); nsRefPtr<layers::Image> image = mImageContainer->CreateImage(&format, 1); mImage = static_cast<layers::PlanarYCbCrImage*>(image.get()); layers::PlanarYCbCrImage::Data data; // Allocate a single blank Image mCb = 16; mCr = 16; AllocateSolidColorFrame(data, mOpts.mWidth, mOpts.mHeight, 0x80, mCb, mCr); // SetData copies data, so we can free the frame mImage->SetData(data); ReleaseFrame(data); // AddTrack takes ownership of segment VideoSegment *segment = new VideoSegment(); segment->AppendFrame(image.forget(), USECS_PER_S / mOpts.mFPS, gfxIntSize(mOpts.mWidth, mOpts.mHeight)); mSource->AddTrack(aID, VIDEO_RATE, 0, segment); // We aren't going to add any more tracks mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX); // Remember TrackID so we can end it later mTrackID = aID; // Start timer for subsequent frames mTimer->InitWithCallback(this, 1000 / mOpts.mFPS, nsITimer::TYPE_REPEATING_SLACK); mState = kStarted; return NS_OK; }
NS_IMETHODIMP MediaEngineDefaultVideoSource::Notify(nsITimer* aTimer) { // Update the target color if (mCr <= 16) { if (mCb < 240) { mCb++; } else { mCr++; } } else if (mCb >= 240) { if (mCr < 240) { mCr++; } else { mCb--; } } else if (mCr >= 240) { if (mCb > 16) { mCb--; } else { mCr--; } } else { mCr--; } // Allocate a single solid color image ImageFormat format = PLANAR_YCBCR; nsRefPtr<layers::Image> image = mImageContainer->CreateImage(&format, 1); nsRefPtr<layers::PlanarYCbCrImage> ycbcr_image = static_cast<layers::PlanarYCbCrImage*>(image.get()); layers::PlanarYCbCrImage::Data data; AllocateSolidColorFrame(data, mOpts.mWidth, mOpts.mHeight, 0x80, mCb, mCr); ycbcr_image->SetData(data); // SetData copies data, so we can free the frame ReleaseFrame(data); // AddTrack takes ownership of segment VideoSegment segment; segment.AppendFrame(ycbcr_image.forget(), USECS_PER_S / mOpts.mFPS, gfxIntSize(mOpts.mWidth, mOpts.mHeight)); mSource->AppendToTrack(mTrackID, &segment); return NS_OK; }
NS_IMETHODIMP MediaEngineDefaultVideoSource::Notify(nsITimer* aTimer) { // Update the target color if (mCr <= 16) { if (mCb < 240) { mCb++; } else { mCr++; } } else if (mCb >= 240) { if (mCr < 240) { mCr++; } else { mCb--; } } else if (mCr >= 240) { if (mCb > 16) { mCb--; } else { mCr--; } } else { mCr--; } // Allocate a single solid color image ImageFormat format = PLANAR_YCBCR; nsRefPtr<layers::Image> image = mImageContainer->CreateImage(&format, 1); nsRefPtr<layers::PlanarYCbCrImage> ycbcr_image = static_cast<layers::PlanarYCbCrImage*>(image.get()); layers::PlanarYCbCrData data; AllocateSolidColorFrame(data, mOpts.mWidth, mOpts.mHeight, 0x80, mCb, mCr); ycbcr_image->SetData(data); // SetData copies data, so we can free the frame ReleaseFrame(data); MonitorAutoLock lock(mMonitor); // implicitly releases last image mImage = ycbcr_image.forget(); return NS_OK; }