コード例 #1
0
nsresult
GonkVideoDecoderManager::QueueFrameTimeOut(int64_t aPTS, int64_t& aDuration)
{
  MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());

  // Set default to 1 here.
  // During seeking, frames could still in MediaCodec and the mFrameTimeInfo could
  // be cleared before these frames are out from MediaCodec. This is ok because
  // these frames are old frame before seeking.
  aDuration = 1;
  for (uint32_t i = 0; i < mFrameTimeInfo.Length(); i++) {
    const FrameTimeInfo& entry = mFrameTimeInfo.ElementAt(i);
    if (i == 0) {
      if (entry.pts > aPTS) {
        // Codec sent a frame with rollbacked PTS time. It could
        // be codec's problem.
        ReleaseVideoBuffer();
        return NS_ERROR_NOT_AVAILABLE;
      }
    }

    // Ideally, the first entry in mFrameTimeInfo should be the one we are looking
    // for. However, MediaCodec could dropped frame and the first entry doesn't
    // match current decoded frame's PTS.
    if (entry.pts == aPTS) {
      aDuration = entry.duration;
      if (i > 0) {
        LOG("Frame could be dropped by MediaCodec, %d dropped frames.", i);
      }
      mFrameTimeInfo.RemoveElementsAt(0, i+1);
      break;
    }
  }
  return NS_OK;
}
コード例 #2
0
//start the tracker
void MainWindow::start_tracking()
{
	cvNamedWindow("binary");
	cvNamedWindow("vid");
	IplImage* image=NULL;
	IplImage* binary_image = NULL;

	//int k = 0;      //skipped frame count
	clock_t start_time = clock();

	while(tracker_state == STATE_STARTED_TRACKING && 
		((image = video_loader.next_frame())!= NULL))
	{
		tracker->next_image(image);
		binary_image = tracker->get_binary_model();
		draw_targets(image);

		//Record when there is substantial motion
		if (config->record_activity) {
			cout << "RECORDING" << endl;
			record_activity(image);
		}

		cvShowImage("binary", binary_image);
		cvShowImage("vid", image);
		cvWaitKey(1);
	}

	clock_t elapsed_time = clock() - start_time;

	cout << "clock ticks = " << elapsed_time << endl;
	cout << "seconds = " << (float)elapsed_time/(float)CLOCKS_PER_SEC << endl;


	if (tracker_state == STATE_STARTED_TRACKING && !image)
	{
		//video is over, terminate tracking.
		tracker->end_tracking();
		ReleaseVideoWriter();
		ReleaseVideoBuffer();
		tracker_state = STATE_NOT_STARTED;

		//Update the trace table
		update_trace_list();
		//Update the target table
		update_target_list();
		//Update the elevator list table
		update_event_list();

		update_gui();
	}

}
コード例 #3
0
void MainWindow::on_exit_clicked()
{
	QMessageBox msgBox;
	msgBox.setText("Would you like to exit the application?");
	msgBox.setStandardButtons(QMessageBox::Yes | QMessageBox::No);
	msgBox.setDefaultButton(QMessageBox::No);
	if (msgBox.exec()==QMessageBox::Yes) {
		ReleaseVideoWriter();
		ReleaseVideoBuffer();
		exit(0);
	}
}
コード例 #4
0
ファイル: OmxPlugin.cpp プロジェクト: Nazi-Nigger/gecko-dev
OmxDecoder::~OmxDecoder()
{
  ReleaseVideoBuffer();
  ReleaseAudioBuffer();

  if (mVideoSource.get()) {
    mVideoSource->stop();
  }

  if (mAudioSource.get()) {
    mAudioSource->stop();
  }

#ifndef MOZ_ANDROID_HC
  if (mColorConverter) {
    delete mColorConverter;
  }
#endif
}
コード例 #5
0
void CRendererMediaCodecSurface::RenderUpdate(int index, int index2, bool clear, unsigned int flags, unsigned int alpha)
{
  m_bConfigured = true;

  // this hack is needed to get the 2D mode of a 3D movie going
  RENDER_STEREO_MODE stereo_mode = CServiceBroker::GetWinSystem()->GetGfxContext().GetStereoMode();
  if (stereo_mode)
    CServiceBroker::GetWinSystem()->GetGfxContext().SetStereoView(RENDER_STEREO_VIEW_LEFT);

  ManageRenderArea();

  if (stereo_mode)
    CServiceBroker::GetWinSystem()->GetGfxContext().SetStereoView(RENDER_STEREO_VIEW_OFF);

  m_surfDestRect = m_destRect;
  switch (stereo_mode)
  {
    case RENDER_STEREO_MODE_SPLIT_HORIZONTAL:
      m_surfDestRect.y2 *= 2.0;
      break;
    case RENDER_STEREO_MODE_SPLIT_VERTICAL:
      m_surfDestRect.x2 *= 2.0;
      break;
    case RENDER_STEREO_MODE_MONO:
      if (CONF_FLAGS_STEREO_MODE_MASK(m_iFlags) == CONF_FLAGS_STEREO_MODE_TAB)
        m_surfDestRect.y2 = m_surfDestRect.y2 * 2.0;
      else
        m_surfDestRect.x2 = m_surfDestRect.x2 * 2.0;
      break;
    default:
      break;
  }

  if (index != m_lastIndex)
  {
    ReleaseVideoBuffer(index, true);
    m_lastIndex = index;
  }
}
コード例 #6
0
nsresult
GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
{
  *v = nullptr;
  nsRefPtr<VideoData> data;
  int64_t timeUs;
  int32_t keyFrame;

  if (mVideoBuffer == nullptr) {
    GVDM_LOG("Video Buffer is not valid!");
    return NS_ERROR_UNEXPECTED;
  }

  if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
    GVDM_LOG("Decoder did not return frame time");
    return NS_ERROR_UNEXPECTED;
  }

  int64_t duration;
  nsresult rv = QueueFrameTimeOut(timeUs, duration);
  NS_ENSURE_SUCCESS(rv, rv);

  if (mVideoBuffer->range_length() == 0) {
    // Some decoders may return spurious empty buffers that we just want to ignore
    // quoted from Android's AwesomePlayer.cpp
    ReleaseVideoBuffer();
    return NS_ERROR_NOT_AVAILABLE;
  }

  if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
    keyFrame = 0;
  }

  gfx::IntRect picture = ToIntRect(mPicture);
  if (mFrameInfo.mWidth != mInitialFrame.width ||
      mFrameInfo.mHeight != mInitialFrame.height) {

    // Frame size is different from what the container reports. This is legal,
    // and we will preserve the ratio of the crop rectangle as it
    // was reported relative to the picture size reported by the container.
    picture.x = (mPicture.x * mFrameInfo.mWidth) / mInitialFrame.width;
    picture.y = (mPicture.y * mFrameInfo.mHeight) / mInitialFrame.height;
    picture.width = (mFrameInfo.mWidth * mPicture.width) / mInitialFrame.width;
    picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height;
  }

  RefPtr<mozilla::layers::TextureClient> textureClient;

  if ((mVideoBuffer->graphicBuffer().get())) {
    textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
  }

  if (textureClient) {
    GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
    grallocClient->SetMediaBuffer(mVideoBuffer);
    textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);

    data = VideoData::Create(mInfo.mVideo,
                             mImageContainer,
                             aStreamOffset,
                             timeUs,
                             duration,
                             textureClient,
                             keyFrame,
                             -1,
                             picture);
  } else {
    if (!mVideoBuffer->data()) {
      GVDM_LOG("No data in Video Buffer!");
      return NS_ERROR_UNEXPECTED;
    }
    uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data();
    int32_t stride = mFrameInfo.mStride;
    int32_t slice_height = mFrameInfo.mSliceHeight;

    // Converts to OMX_COLOR_FormatYUV420Planar
    if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
      ARect crop;
      crop.top = 0;
      crop.bottom = mFrameInfo.mHeight;
      crop.left = 0;
      crop.right = mFrameInfo.mWidth;
      yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight);
      if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(),
          mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) {
          ReleaseVideoBuffer();
          GVDM_LOG("Color conversion failed!");
          return NS_ERROR_UNEXPECTED;
      }
        stride = mFrameInfo.mWidth;
        slice_height = mFrameInfo.mHeight;
    }

    size_t yuv420p_y_size = stride * slice_height;
    size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
    uint8_t *yuv420p_y = yuv420p_buffer;
    uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
    uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;

    // This is the approximate byte position in the stream.
    int64_t pos = aStreamOffset;

    VideoData::YCbCrBuffer b;
    b.mPlanes[0].mData = yuv420p_y;
    b.mPlanes[0].mWidth = mFrameInfo.mWidth;
    b.mPlanes[0].mHeight = mFrameInfo.mHeight;
    b.mPlanes[0].mStride = stride;
    b.mPlanes[0].mOffset = 0;
    b.mPlanes[0].mSkip = 0;

    b.mPlanes[1].mData = yuv420p_u;
    b.mPlanes[1].mWidth = (mFrameInfo.mWidth + 1) / 2;
    b.mPlanes[1].mHeight = (mFrameInfo.mHeight + 1) / 2;
    b.mPlanes[1].mStride = (stride + 1) / 2;
    b.mPlanes[1].mOffset = 0;
    b.mPlanes[1].mSkip = 0;

    b.mPlanes[2].mData = yuv420p_v;
    b.mPlanes[2].mWidth =(mFrameInfo.mWidth + 1) / 2;
    b.mPlanes[2].mHeight = (mFrameInfo.mHeight + 1) / 2;
    b.mPlanes[2].mStride = (stride + 1) / 2;
    b.mPlanes[2].mOffset = 0;
    b.mPlanes[2].mSkip = 0;

    data = VideoData::Create(
        mInfo.mVideo,
        mImageContainer,
        pos,
        timeUs,
        1, // We don't know the duration.
        b,
        keyFrame,
        -1,
        picture);
    ReleaseVideoBuffer();
  }

  data.forget(v);
  return NS_OK;
}
コード例 #7
0
void CRendererMediaCodecSurface::Reset()
{
  for (int i = 0 ; i < 4 ; ++i)
    ReleaseVideoBuffer(i, false);
  m_lastIndex = -1;
}
コード例 #8
0
void CRendererMediaCodecSurface::ReleaseBuffer(int idx)
{
  ReleaseVideoBuffer(idx, false);
}
コード例 #9
0
ファイル: OmxPlugin.cpp プロジェクト: Nazi-Nigger/gecko-dev
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs,
                           BufferCallback *aBufferCallback)
{
  MOZ_ASSERT(aSeekTimeUs >= -1);

  if (!mVideoSource.get())
    return false;

  ReleaseVideoBuffer();

  status_t err;

  if (aSeekTimeUs != -1) {
    ReadOptions options;
    options.setSeekTo(aSeekTimeUs);
    err = mVideoSource->read(&mVideoBuffer, &options);
  } else {
    err = mVideoSource->read(&mVideoBuffer);
  }

  aFrame->mSize = 0;

  if (err == OK && mVideoBuffer->range_length() > 0) {
    int64_t timeUs;
    int32_t keyFrame;

    if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
      LOG("no frame time");
      return false;
    }

    if (timeUs < 0) {
      LOG("frame time %lld must be nonnegative", timeUs);
      return false;
    }

    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
       keyFrame = 0;
    }

    char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
    size_t length = mVideoBuffer->range_length();

    if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame, aBufferCallback)) {
      return false;
    }
  }
  else if (err == INFO_FORMAT_CHANGED) {
    // If the format changed, update our cached info.
    LOG("mVideoSource INFO_FORMAT_CHANGED");
    if (!SetVideoFormat())
      return false;
    else
      return ReadVideo(aFrame, aSeekTimeUs, aBufferCallback);
  }
  else if (err == ERROR_END_OF_STREAM) {
    LOG("mVideoSource END_OF_STREAM");
  }
  else if (err != OK) {
    LOG("mVideoSource ERROR %#x", err);
  }

  return err == OK;
}