bool OpenGLComposite::Start()
{
	mTotalPlayoutFrames = 0;

	// Preroll frames
	for (unsigned i = 0; i < 5; i++)
	{
		// Take each video frame from the front of the queue and move it to the back
		IDeckLinkMutableVideoFrame* outputVideoFrame = mDLOutputVideoFrameQueue.front();
		mDLOutputVideoFrameQueue.push_back(outputVideoFrame);
		mDLOutputVideoFrameQueue.pop_front();

		// Start with a black frame for playout
		void*	pFrame;
		outputVideoFrame->GetBytes((void**)&pFrame);
		memset(pFrame, 0, outputVideoFrame->GetRowBytes() * mFrameHeight);		// 0 is black in RGBA format

		if (mDLOutput->ScheduleVideoFrame(outputVideoFrame, (mTotalPlayoutFrames * mFrameDuration), mFrameDuration, mFrameTimescale) != S_OK)
			return false;

		mTotalPlayoutFrames++;
	}

	mDLInput->StartStreams();
	mDLOutput->StartScheduledPlayback(0, mFrameTimescale, 1.0);

	return true;
}
HRESULT TestPattern::CreateFrame(IDeckLinkVideoFrame** frame, void (*fillFunc)(IDeckLinkVideoFrame*))
{
	HRESULT						result;
	int							bytesPerPixel = GetBytesPerPixel(m_config->m_pixelFormat);
	IDeckLinkMutableVideoFrame*	newFrame = NULL;
	IDeckLinkMutableVideoFrame*	referenceFrame = NULL;
	IDeckLinkVideoConversion*	frameConverter = NULL;

	*frame = NULL;

	result = m_deckLinkOutput->CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth * bytesPerPixel, m_config->m_pixelFormat, bmdFrameFlagDefault, &newFrame);
	if (result != S_OK)
	{
		fprintf(stderr, "Failed to create video frame\n");
		goto bail;
	}

	if (m_config->m_pixelFormat == bmdFormat8BitYUV)
	{
		fillFunc(newFrame);
	}
	else
	{
		// Create a black frame in 8 bit YUV and convert to desired format
		result = m_deckLinkOutput->CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth * 2, bmdFormat8BitYUV, bmdFrameFlagDefault, &referenceFrame);
		if (result != S_OK)
		{
			fprintf(stderr, "Failed to create reference video frame\n");
			goto bail;
		}

		fillFunc(referenceFrame);

		frameConverter = CreateVideoConversionInstance();

		result = frameConverter->ConvertFrame(referenceFrame, newFrame);
		if (result != S_OK)
		{
			fprintf(stderr, "Failed to convert frame\n");
			goto bail;
		}
	}

	*frame = newFrame;
	newFrame = NULL;

bail:
	if (referenceFrame != NULL)
		referenceFrame->Release();

	if (frameConverter != NULL)
		frameConverter->Release();

	if (newFrame != NULL)
		newFrame->Release();

	return result;
}
Example #3
0
		explicit DecklinkVideoFrame(DecklinkFrameManager* pFactory) : factoryID_(pFactory->ID()) {
			IDeckLinkMutableVideoFrame* pFrame = NULL;
			const FrameFormatDescription& fmtDesc = pFactory->pConsumerImpl_->GetFrameFormatDescription();
			if(pFactory->pConsumerImpl_->pDecklinkOutput_->CreateVideoFrame(fmtDesc.width, fmtDesc.height, fmtDesc.size/fmtDesc.height, bmdFormat8BitBGRA, bmdFrameFlagDefault, &pFrame) != S_OK) {
				throw std::exception("DECKLINK: Failed to create frame");
			}
			pDecklinkFrame_ = pFrame;
			pFrame->Release();

			if(pDecklinkFrame_->GetBytes((void**)&pBytes_) != S_OK)
				throw std::exception("DECKLINK: Failed to get bytes to frame");
		}
SignalGenerator3DVideoFrame* CSignalGeneratorDlg::CreateBlackFrame ()
{
	IDeckLinkMutableVideoFrame*		referenceBlack = NULL;
	IDeckLinkMutableVideoFrame*		scheduleBlack = NULL;
	HRESULT							hr;
	BMDPixelFormat					pixelFormat;
	int								bytesPerPixel;
	IDeckLinkVideoConversion*		frameConverter = NULL;
	SignalGenerator3DVideoFrame*	ret = NULL;

	pixelFormat = (BMDPixelFormat)m_pixelFormatCombo.GetItemData(m_pixelFormatCombo.GetCurSel());
	bytesPerPixel = GetBytesPerPixel(pixelFormat);

	hr = m_deckLinkOutput->CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth*bytesPerPixel, pixelFormat, bmdFrameFlagDefault, &scheduleBlack);
	if (hr != S_OK)
		goto bail;

	if (pixelFormat == bmdFormat8BitYUV)
	{
		FillBlack(scheduleBlack);
	}
	else
	{
		hr = m_deckLinkOutput->CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth*2, bmdFormat8BitYUV, bmdFrameFlagDefault, &referenceBlack);
		if (hr != S_OK)
			goto bail;
		FillBlack(referenceBlack);

		hr = CoCreateInstance(CLSID_CDeckLinkVideoConversion, NULL, CLSCTX_ALL, IID_IDeckLinkVideoConversion, (void**)&frameConverter);
		if (hr != S_OK)
			goto bail;

		hr = frameConverter->ConvertFrame(referenceBlack, scheduleBlack);
		if (hr != S_OK)
			goto bail;
	}

	ret = new SignalGenerator3DVideoFrame(scheduleBlack);

bail:
	if (referenceBlack)
		referenceBlack->Release();
	if (scheduleBlack)
		scheduleBlack->Release();
	if (frameConverter)
		frameConverter->Release();

	return ret;
}
Example #5
0
	void createFrame()
	{
		m_videoFrame = 0;
		// Generate a DeckLink video frame
		if ( S_OK != m_deckLinkOutput->CreateVideoFrame( m_width, m_height,
			m_width * 2, bmdFormat8BitYUV, bmdFrameFlagDefault, &m_videoFrame ) )
		{
			mlt_log_verbose( &m_consumer, "Failed to create video frame\n" );
			stop();
			return;
		}
		
		// Make the first line black for field order correction.
		uint8_t *buffer = 0;
		if ( S_OK == m_videoFrame->GetBytes( (void**) &buffer ) && buffer )
		{
			for ( int i = 0; i < m_width; i++ )
			{
				*buffer++ = 128;
				*buffer++ = 16;
			}
		}
		mlt_log_debug( &m_consumer, "created video frame\n" );
		mlt_deque_push_back( m_videoFrameQ, m_videoFrame );
	}
Example #6
0
void Player::ScheduleNextFrame(bool prerolling)
{
    AVPacket pkt;
    AVPicture picture;

    if (serial_fd > 0 && packet_queue_get(&dataqueue, &pkt, 0)) {
        if (pkt.data[0] != ' '){
            fprintf(stderr,"written %.*s  \n", pkt.size, pkt.data);
            write(serial_fd, pkt.data, pkt.size);
        }
        av_free_packet(&pkt);
    }

    if (packet_queue_get(&videoqueue, &pkt, 1) < 0)
        return;

    IDeckLinkMutableVideoFrame *videoFrame;
    m_deckLinkOutput->CreateVideoFrame(m_frameWidth,
                                       m_frameHeight,
                                       m_frameWidth * 2,
                                       pix,
                                       bmdFrameFlagDefault,
                                       &videoFrame);
    void *frame;
    int got_picture;
    videoFrame->GetBytes(&frame);

    avcodec_decode_video2(video_st->codec, avframe, &got_picture, &pkt);
    if (got_picture) {
        avpicture_fill(&picture, (uint8_t *)frame, pix_fmt,
                       m_frameWidth, m_frameHeight);

        sws_scale(sws, avframe->data, avframe->linesize, 0, avframe->height,
                  picture.data, picture.linesize);

        if (m_deckLinkOutput->ScheduleVideoFrame(videoFrame,
                                                 pkt.pts *
                                                 video_st->time_base.num,
                                                 pkt.duration *
                                                 video_st->time_base.num,
                                                 video_st->time_base.den) !=
            S_OK)
            fprintf(stderr, "Error scheduling frame\n");
    }
    videoFrame->Release();
    av_free_packet(&pkt);
}
Example #7
0
void BMDOutputDelegate::StopRunning ()
{
	// Stop the audio and video output streams immediately
	m_deckLinkOutput->StopScheduledPlayback(0, NULL, 0);
	//
	//m_deckLinkOutput->DisableAudioOutput();
	m_deckLinkOutput->DisableVideoOutput();
	
	if (m_yuvFrame != NULL)
		m_yuvFrame->Release();
	m_yuvFrame = NULL;
	
	if (m_rgbFrame != NULL)
		m_rgbFrame->Release();
	m_rgbFrame = NULL;
	
	// Success; update the UI
	m_running = false;
}
void SignalGenerator::scheduleNextFrame(bool prerolling)
{
	IDeckLinkMutableVideoFrame *currentFrame;
	if (prerolling == false)
	{
		// If not prerolling, make sure that playback is still active
		if (running == false)
			return;
	}
	
	if (outputSignal == kOutputSignalPip)
	{
		if ((totalFramesScheduled % framesPerSecond) == 0)
			currentFrame = videoFrameBars;
		else
			currentFrame = videoFrameBlack;
	}
	else
	{
		if ((totalFramesScheduled % framesPerSecond) == 0)
			currentFrame = videoFrameBlack;
		else
			currentFrame = videoFrameBars;
	}
	
	printf("frames: %d\n", timeCode->frames());
	currentFrame->SetTimecodeFromComponents(timeCodeFormat, 
		timeCode->hours(), 
		timeCode->minutes(), 
		timeCode->seconds(), 
		timeCode->frames(), 
		bmdTimecodeFlagDefault);

	if (deckLinkOutput->ScheduleVideoFrame(currentFrame, (totalFramesScheduled * frameDuration), frameDuration, frameTimescale) != S_OK)
		goto out;
	
	totalFramesScheduled += 1;
out:	
	timeCode->update();
	
}
Example #9
0
        void preroll_video_frames(unsigned int n_frames) {
            IDeckLinkMutableVideoFrame *frame;
            IDeckLinkVideoFrameAncillary *anc;
            for (unsigned int i = 0; i < n_frames; i++) {
                if (deckLinkOutput->CreateVideoFrame(norms[norm].w, 
                        norms[norm].h, 2*norms[norm].w, bpf,
                        bmdFrameFlagDefault, &frame) != S_OK) {

                    throw std::runtime_error("Failed to create frame"); 
                }

                if (deckLinkOutput->CreateAncillaryData(bpf, &anc) != S_OK) {
                    throw std::runtime_error("failed to set frame ancillary data");
                }

                if (frame->SetAncillaryData(anc) != S_OK) {
                    throw std::runtime_error("failed to set frame ancillary data");
                }

                schedule_frame(frame);
            }
        }
Example #10
0
	void stop()
	{
		// Stop the audio and video output streams immediately
		if ( m_deckLinkOutput )
		{
			m_deckLinkOutput->StopScheduledPlayback( 0, 0, 0 );
			m_deckLinkOutput->DisableAudioOutput();
			m_deckLinkOutput->DisableVideoOutput();
		}
		while ( mlt_deque_count( m_videoFrameQ ) )
		{
			m_videoFrame = (IDeckLinkMutableVideoFrame*) mlt_deque_pop_back( m_videoFrameQ );
			m_videoFrame->Release();
		}
		m_videoFrame = 0;
		if ( m_fifo ) sample_fifo_close( m_fifo );
	}
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  gint i;
  GstDecklinkVideoFormat caps_format;
  BMDPixelFormat format;
  gint bpp;
  GstVideoTimeCodeMeta *tc_meta;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
  format = gst_decklink_pixel_format_from_type (caps_format);
  bpp = gst_decklink_bpp_from_type (caps_format);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  /* See gst_base_sink_adjust_time() */
  latency = gst_base_sink_get_latency (bsink);
  render_delay = gst_base_sink_get_render_delay (bsink);
  ts_offset = gst_base_sink_get_ts_offset (bsink);

  running_time += latency;

  if (ts_offset < 0) {
    ts_offset = -ts_offset;
    if ((GstClockTime) ts_offset < running_time)
      running_time -= ts_offset;
    else
      running_time = 0;
  } else {
    running_time += ts_offset;
  }

  if (running_time > render_delay)
    running_time -= render_delay;
  else
    running_time = 0;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
      &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * bpp);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  tc_meta = gst_buffer_get_video_time_code_meta (buffer);
  if (tc_meta) {
    BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
    gchar *tc_str;

    if (((GstVideoTimeCodeFlags) (tc_meta->tc.
                config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
    else
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
    if (tc_meta->tc.field_count == 2)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);

    tc_str = gst_video_time_code_to_string (&tc_meta->tc);
    ret = frame->SetTimecodeFromComponents (self->timecode_format,
        (uint8_t) tc_meta->tc.hours,
        (uint8_t) tc_meta->tc.minutes,
        (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
    if (ret != S_OK) {
      GST_ERROR_OBJECT (self,
          "Failed to set timecode %s to video frame: 0x%08x", tc_str, ret);
      flow_ret = GST_FLOW_ERROR;
      g_free (tc_str);
      goto out;
    }
    GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
    g_free (tc_str);
  }

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
static GstFlowReturn
gst_decklink_sink_videosink_chain (GstPad * pad, GstObject * parent,
    GstBuffer * buffer)
{
  GstDecklinkSink *decklinksink;
  IDeckLinkMutableVideoFrame *frame;
  void *data;
  GstFlowReturn ret;
  const GstDecklinkMode *mode;

  decklinksink = GST_DECKLINK_SINK (parent);

#if 0
  if (!decklinksink->video_enabled) {
    HRESULT ret;
    ret = decklinksink->output->EnableVideoOutput (decklinksink->display_mode,
        bmdVideoOutputFlagDefault);
    if (ret != S_OK) {
      GST_WARNING ("failed to enable video output");
      //return FALSE;
    }
    decklinksink->video_enabled = TRUE;
  }
#endif

  mode = gst_decklink_get_mode (decklinksink->mode);

  decklinksink->output->CreateVideoFrame (mode->width,
      mode->height, mode->width * 2, decklinksink->pixel_format,
      bmdFrameFlagDefault, &frame);

  frame->GetBytes (&data);
  gst_buffer_extract (buffer, 0, data, gst_buffer_get_size (buffer));
  gst_buffer_unref (buffer);

  g_mutex_lock (&decklinksink->mutex);
  while (decklinksink->queued_frames > 2 && !decklinksink->stop) {
    g_cond_wait (&decklinksink->cond, &decklinksink->mutex);
  }
  if (!decklinksink->stop) {
    decklinksink->queued_frames++;
  }
  g_mutex_unlock (&decklinksink->mutex);

  if (!decklinksink->stop) {
    decklinksink->output->ScheduleVideoFrame (frame,
        decklinksink->num_frames * mode->fps_d, mode->fps_d, mode->fps_n);
    decklinksink->num_frames++;

    if (!decklinksink->sched_started) {
      decklinksink->output->StartScheduledPlayback (0, mode->fps_d, 1.0);
      decklinksink->sched_started = TRUE;
    }

    ret = GST_FLOW_OK;
  } else {
    ret = GST_FLOW_FLUSHING;
  }

  frame->Release ();

  return ret;
}
Example #13
0
static void DisplayVideo(vout_display_t *vd, picture_t *picture, subpicture_t *)
{
    vout_display_sys_t *sys = vd->sys;
    struct decklink_sys_t *decklink_sys = GetDLSys(VLC_OBJECT(vd));
    mtime_t now = mdate();

    if (!picture)
        return;

    picture_t *orig_picture = picture;

    if (now - picture->date > sys->nosignal_delay * CLOCK_FREQ) {
        msg_Dbg(vd, "no signal");
        if (sys->pic_nosignal) {
            picture = sys->pic_nosignal;
        } else {
            if (sys->tenbits) { // I422_10L
                plane_t *y = &picture->p[0];
                memset(y->p_pixels, 0x0, y->i_lines * y->i_pitch);
                for (int i = 1; i < picture->i_planes; i++) {
                    plane_t *p = &picture->p[i];
                    size_t len = p->i_lines * p->i_pitch / 2;
                    int16_t *data = (int16_t*)p->p_pixels;
                    for (size_t j = 0; j < len; j++) // XXX: SIMD
                        data[j] = 0x200;
                }
            } else { // UYVY
                size_t len = picture->p[0].i_lines * picture->p[0].i_pitch;
                for (size_t i = 0; i < len; i+= 2) { // XXX: SIMD
                    picture->p[0].p_pixels[i+0] = 0x80;
                    picture->p[0].p_pixels[i+1] = 0;
                }
            }
        }
        picture->date = now;
    }

    HRESULT result;
    int w, h, stride, length;
    w = decklink_sys->i_width;
    h = decklink_sys->i_height;

    IDeckLinkMutableVideoFrame *pDLVideoFrame;
    result = decklink_sys->p_output->CreateVideoFrame(w, h, w*3,
        sys->tenbits ? bmdFormat10BitYUV : bmdFormat8BitYUV,
        bmdFrameFlagDefault, &pDLVideoFrame);

    if (result != S_OK) {
        msg_Err(vd, "Failed to create video frame: 0x%X", result);
        pDLVideoFrame = NULL;
        goto end;
    }

    void *frame_bytes;
    pDLVideoFrame->GetBytes((void**)&frame_bytes);
    stride = pDLVideoFrame->GetRowBytes();

    if (sys->tenbits)
        v210_convert(frame_bytes, picture, stride);
    else for(int y = 0; y < h; ++y) {
        uint8_t *dst = (uint8_t *)frame_bytes + stride * y;
        const uint8_t *src = (const uint8_t *)picture->p[0].p_pixels +
            picture->p[0].i_pitch * y;
        memcpy(dst, src, w * 2 /* bpp */);
    }


    // compute frame duration in CLOCK_FREQ units
    length = (decklink_sys->frameduration * CLOCK_FREQ) / decklink_sys->timescale;

    picture->date -= decklink_sys->offset;
    result = decklink_sys->p_output->ScheduleVideoFrame(pDLVideoFrame,
        picture->date, length, CLOCK_FREQ);

    if (result != S_OK) {
        msg_Err(vd, "Dropped Video frame %"PRId64 ": 0x%x",
            picture->date, result);
        goto end;
    }

    now = mdate() - decklink_sys->offset;

    BMDTimeValue decklink_now;
    double speed;
    decklink_sys->p_output->GetScheduledStreamTime (CLOCK_FREQ, &decklink_now, &speed);

    if ((now - decklink_now) > 400000) {
        /* XXX: workaround card clock drift */
        decklink_sys->offset += 50000;
        msg_Err(vd, "Delaying: offset now %"PRId64"", decklink_sys->offset);
    }

end:
    if (pDLVideoFrame)
        pDLVideoFrame->Release();
    picture_Release(orig_picture);
}
// Draw the captured video frame texture onto a box, rendering to the off-screen frame buffer.
// Read the rendered scene back from the frame buffer and schedule it for playout.
void OpenGLComposite::PlayoutFrameCompleted(IDeckLinkVideoFrame* completedFrame, BMDOutputFrameCompletionResult completionResult)
{
	EnterCriticalSection(&pMutex);

	// Get the first frame from the queue
	IDeckLinkMutableVideoFrame* outputVideoFrame = mDLOutputVideoFrameQueue.front();
	mDLOutputVideoFrameQueue.push_back(outputVideoFrame);
	mDLOutputVideoFrameQueue.pop_front();

	void*	pFrame;
	outputVideoFrame->GetBytes(&pFrame);

	long rowbytes = outputVideoFrame->GetRowBytes();
	long height = outputVideoFrame->GetHeight();
	long memSize = rowbytes * height;

	// make GL context current in this thread
	wglMakeCurrent( hGLDC, hGLRC );

	// Draw OpenGL scene to the off-screen frame buffer
	glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, mIdFrameBuf);

	// Setup view and projection
	GLfloat aspectRatio = (GLfloat)mFrameWidth / (GLfloat)mFrameHeight;
	glViewport (0, 0, mFrameWidth, mFrameHeight);
	glMatrixMode( GL_PROJECTION );
	glLoadIdentity();
	gluPerspective( 45.0f, aspectRatio, 0.1f, 100.0f );
	glMatrixMode( GL_MODELVIEW );
	glLoadIdentity();

	glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
	glScalef( aspectRatio, 1.0f, 1.0f );			// Scale x for correct aspect ratio
	glTranslatef( 0.0f, 0.0f, -4.0f );				// Move into screen
	glRotatef( mRotateAngle, 1.0f, 1.0f, 1.0f );	// Rotate model around a vector
	mRotateAngle -= mRotateAngleRate;				// update the rotation angle for next iteration
	glFinish();										// Ensure changes to GL state are complete

	// Draw a colourful frame around the front face of the box
	// (provides a pleasing nesting effect when you connect the playout output to the capture input)
	glBegin(GL_QUAD_STRIP);
	glColor3f( 1.0f, 0.0f, 0.0f );
	glVertex3f( 1.2f,  1.2f, 1.0f);
	glVertex3f( 1.0f,  1.0f, 1.0f);
	glColor3f( 0.0f, 0.0f, 1.0f );
	glVertex3f( 1.2f, -1.2f, 1.0f);
	glVertex3f( 1.0f, -1.0f, 1.0f);
	glColor3f( 0.0f, 1.0f, 0.0f );
	glVertex3f(-1.2f, -1.2f, 1.0f);
	glVertex3f(-1.0f, -1.0f, 1.0f);
	glColor3f( 1.0f, 1.0f, 0.0f );
	glVertex3f(-1.2f,  1.2f, 1.0f);
	glVertex3f(-1.0f,  1.0f, 1.0f);
	glColor3f( 1.0f, 0.0f, 0.0f );
	glVertex3f( 1.2f,  1.2f, 1.0f);
	glVertex3f( 1.0f,  1.0f, 1.0f);
	glEnd();

	if (mHasNoInputSource)
	{
		// Draw a big X when no input is available on capture
		glBegin( GL_QUADS );
		glColor3f( 1.0f, 0.0f, 1.0f );
		glVertex3f(  0.8f,  0.9f,  1.0f );
		glVertex3f(  0.9f,  0.8f,  1.0f );
		glColor3f( 1.0f, 1.0f, 0.0f );
		glVertex3f( -0.8f, -0.9f,  1.0f );
		glVertex3f( -0.9f, -0.8f,  1.0f );
		glColor3f( 1.0f, 0.0f, 1.0f );
		glVertex3f( -0.8f,  0.9f,  1.0f );
		glVertex3f( -0.9f,  0.8f,  1.0f );
		glColor3f( 1.0f, 1.0f, 0.0f );
		glVertex3f(  0.8f, -0.9f,  1.0f );
		glVertex3f(  0.9f, -0.8f,  1.0f );
		glEnd();
	}
	else
	{
		if (mFastTransferExtensionAvailable)
		{
			// Signal that we're about to draw using mCaptureTexture onto mFBOTexture
			mCaptureAllocator->beginTextureInUse();
		}

		// Pass texture unit 0 to the fragment shader as a uniform variable
		glEnable(GL_TEXTURE_2D);
		glBindTexture(GL_TEXTURE_2D, mCaptureTexture);
		glUseProgram(mProgram);
		GLint locUYVYtex = glGetUniformLocation(mProgram, "UYVYtex");
		glUniform1i(locUYVYtex, 0);		// Bind texture unit 0

		// Draw front and back faces of box applying video texture to each face
		glBegin(GL_QUADS);
		glTexCoord2f(1.0f, 0.0f);	glVertex3f(  1.0f,  1.0f,  1.0f );		// Top right of front side
		glTexCoord2f(0.0f, 0.0f);	glVertex3f( -1.0f,  1.0f,  1.0f );		// Top left of front side
		glTexCoord2f(0.0f, 1.0f);	glVertex3f( -1.0f, -1.0f,  1.0f );		// Bottom left of front side
		glTexCoord2f(1.0f, 1.0f);	glVertex3f(  1.0f, -1.0f,  1.0f );		// Bottom right of front side

		glTexCoord2f(1.0f, 1.0f);	glVertex3f(  1.0f, -1.0f, -1.0f );		// Top right of back side
		glTexCoord2f(0.0f, 1.0f);	glVertex3f( -1.0f, -1.0f, -1.0f );		// Top left of back side
		glTexCoord2f(0.0f, 0.0f);	glVertex3f( -1.0f,  1.0f, -1.0f );		// Bottom left of back side
		glTexCoord2f(1.0f, 0.0f);	glVertex3f(  1.0f,  1.0f, -1.0f );		// Bottom right of back side
		glEnd();

		// Draw left and right sides of box with partially transparent video texture
		glEnable(GL_BLEND);
		glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
		glBegin(GL_QUADS);
		glTexCoord2f(0.1f, 0.0f);	glVertex3f( -1.0f,  1.0f,  1.0f );		// Top right of left side
		glTexCoord2f(1.0f, 0.0f);	glVertex3f( -1.0f,  1.0f, -1.0f );		// Top left of left side
		glTexCoord2f(1.0f, 1.0f);	glVertex3f( -1.0f, -1.0f, -1.0f );		// Bottom left of left side
		glTexCoord2f(0.1f, 1.0f);	glVertex3f( -1.0f, -1.0f,  1.0f );		// Bottom right of left side

		glTexCoord2f(1.0f, 0.0f);	glVertex3f(  1.0f,  1.0f, -1.0f );		// Top right of right side
		glTexCoord2f(0.0f, 0.0f);	glVertex3f(  1.0f,  1.0f,  1.0f );		// Top left of right side
		glTexCoord2f(0.0f, 1.0f);	glVertex3f(  1.0f, -1.0f,  1.0f );		// Bottom left of right side
		glTexCoord2f(1.0f, 1.0f);	glVertex3f(  1.0f, -1.0f, -1.0f );		// Bottom right of right side
		glEnd();
		glDisable(GL_BLEND);

		glUseProgram(0);
		glDisable(GL_TEXTURE_2D);
		glBindTexture(GL_TEXTURE_2D, 0);
	}

	if (mFastTransferExtensionAvailable)
	{
		// Finished with mCaptureTexture
		mCaptureAllocator->endTextureInUse();

		if (! mPlayoutAllocator->transferFrame(pFrame, mFBOTexture))
			OutputDebugStringA("Playback: transferFrame() failed\n");

		paintGL();

		// Wait for transfer to system memory to complete
		mPlayoutAllocator->waitForTransferComplete(pFrame);
	}
	else
	{
		glReadPixels(0, 0, mFrameWidth, mFrameHeight, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, pFrame);
		paintGL();
	}

	// If the last completed frame was late or dropped, bump the scheduled time further into the future
	if (completionResult == bmdOutputFrameDisplayedLate || completionResult == bmdOutputFrameDropped)
		mTotalPlayoutFrames += 2;

	// Schedule the next frame for playout
	HRESULT hr = mDLOutput->ScheduleVideoFrame(outputVideoFrame, (mTotalPlayoutFrames * mFrameDuration), mFrameDuration, mFrameTimescale);
	if (SUCCEEDED(hr))
		mTotalPlayoutFrames++;

	wglMakeCurrent( NULL, NULL );

	LeaveCriticalSection(&pMutex);
}
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  gint i;
  GstClock *clock;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  // FIXME: https://bugzilla.gnome.org/show_bug.cgi?id=742916
  // We need to drop late buffers here immediately instead of
  // potentially overflowing the internal queue of the hardware
  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  if (clock) {
    GstClockTime clock_running_time, base_time, clock_time, latency,
        max_lateness;

    base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
    clock_time = gst_clock_get_time (clock);
    if (base_time != GST_CLOCK_TIME_NONE && clock_time != GST_CLOCK_TIME_NONE) {
      clock_running_time = clock_time - base_time;
      latency = gst_base_sink_get_latency (GST_BASE_SINK_CAST (self));
      max_lateness = gst_base_sink_get_max_lateness (GST_BASE_SINK_CAST (self));

      if (clock_running_time >
          running_time + running_time_duration + latency + max_lateness) {
        GST_DEBUG_OBJECT (self,
            "Late buffer: %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
            GST_TIME_ARGS (clock_running_time),
            GST_TIME_ARGS (running_time + running_time_duration));

        if (self->last_render_time == GST_CLOCK_TIME_NONE
            || (self->last_render_time < clock_running_time
                && clock_running_time - self->last_render_time >= GST_SECOND)) {
          GST_DEBUG_OBJECT (self,
              "Rendering frame nonetheless because we had none for more than 1s");
          running_time = clock_running_time;
          running_time_duration = 0;
        } else {
          GST_WARNING_OBJECT (self, "Dropping frame");
          gst_object_unref (clock);
          return GST_FLOW_OK;
        }
      }
    }

    gst_object_unref (clock);
  }
  self->last_render_time = running_time;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], bmdFormat8BitYUV,
      bmdFrameFlagDefault, &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * 2);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
static GstFlowReturn
gst_decklink_sink_videosink_chain (GstPad * pad, GstBuffer * buffer)
{
  GstDecklinkSink *decklinksink;
  IDeckLinkMutableVideoFrame *frame;
  void *data;
  GstFlowReturn ret;
  const GstDecklinkMode *mode;

  decklinksink = GST_DECKLINK_SINK (gst_pad_get_parent (pad));

  GST_DEBUG_OBJECT (decklinksink, "chain");

#if 0
  if (!decklinksink->video_enabled) {
    HRESULT ret;
    ret = decklinksink->output->EnableVideoOutput (decklinksink->display_mode,
        bmdVideoOutputFlagDefault);
    if (ret != S_OK) {
      GST_ERROR ("failed to enable video output");
      //return FALSE;
    }
    decklinksink->video_enabled = TRUE;
  }
#endif

  mode = gst_decklink_get_mode (decklinksink->mode);

  decklinksink->output->CreateVideoFrame (mode->width,
      mode->height, mode->width * 2, bmdFormat8BitYUV,
      bmdFrameFlagDefault, &frame);

  frame->GetBytes (&data);
  memcpy (data, GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
  gst_buffer_unref (buffer);

  g_mutex_lock (decklinksink->mutex);
  while (decklinksink->queued_frames > 2 && !decklinksink->stop) {
    g_cond_wait (decklinksink->cond, decklinksink->mutex);
  }
  if (!decklinksink->stop) {
    decklinksink->queued_frames++;
  }
  g_mutex_unlock (decklinksink->mutex);

  if (!decklinksink->stop) {
    decklinksink->output->ScheduleVideoFrame (frame,
        decklinksink->num_frames * mode->fps_d, mode->fps_d, mode->fps_n);
    decklinksink->num_frames++;

    if (!decklinksink->sched_started) {
      decklinksink->output->StartScheduledPlayback (0, mode->fps_d, 1.0);
      decklinksink->sched_started = TRUE;
    }

    ret = GST_FLOW_OK;
  } else {
    ret = GST_FLOW_WRONG_STATE;
  }

  frame->Release ();

  gst_object_unref (decklinksink);
  return ret;
}
Example #17
0
	HRESULT render( mlt_frame frame )
	{
		HRESULT result = S_OK;
		// Get the audio		
		double speed = mlt_properties_get_double( MLT_FRAME_PROPERTIES(frame), "_speed" );
		if ( speed == 1.0 )
		{
			mlt_audio_format format = mlt_audio_s16;
			int frequency = bmdAudioSampleRate48kHz;
			int samples = mlt_sample_calculator( m_fps, frequency, m_count );
			int16_t *pcm = 0;
			
			if ( !mlt_frame_get_audio( frame, (void**) &pcm, &format, &frequency, &m_channels, &samples ) )
			{
				int count = samples;
				
				if ( !m_isPrerolling )
				{
					uint32_t audioCount = 0;
					uint32_t videoCount = 0;
					
					// Check for resync
					m_deckLinkOutput->GetBufferedAudioSampleFrameCount( &audioCount );
					m_deckLinkOutput->GetBufferedVideoFrameCount( &videoCount );
					
					// Underflow typically occurs during non-normal speed playback.
					if ( audioCount < 1 || videoCount < 1 )
					{
						// Upon switching to normal playback, buffer some frames faster than realtime.
						mlt_log_info( &m_consumer, "buffer underrun: audio buf %u video buf %u frames\n", audioCount, videoCount );
						m_prerollCounter = 0;
					}
					
					// While rebuffering
					if ( isBuffering() )
					{
						// Only append audio to reach the ideal level and not overbuffer.
						int ideal = ( m_preroll - 1 ) * bmdAudioSampleRate48kHz / m_fps;
						int actual = m_fifo->used / m_channels + audioCount;
						int diff = ideal / 2 - actual;
						count = diff < 0 ? 0 : diff < count ? diff : count;
					}
				}
				if ( count > 0 )
					sample_fifo_append( m_fifo, pcm, count * m_channels );
			}
		}
		
		// Create video frames while pre-rolling
		if ( m_isPrerolling )
		{
			createFrame();
			if ( !m_videoFrame )
			{
				mlt_log_error( &m_consumer, "failed to create video frame\n" );
				return S_FALSE;
			}
		}
		
		// Get the video
		if ( mlt_properties_get_int( MLT_FRAME_PROPERTIES( frame ), "rendered") )
		{
			mlt_image_format format = mlt_image_yuv422;
			uint8_t* image = 0;
			uint8_t* buffer = 0;

			if ( !mlt_frame_get_image( frame, &image, &format, &m_width, &m_height, 0 ) )
			{
				m_videoFrame = (IDeckLinkMutableVideoFrame*) mlt_deque_pop_back( m_videoFrameQ );
				m_videoFrame->GetBytes( (void**) &buffer );
				if ( m_displayMode->GetFieldDominance() == bmdUpperFieldFirst )
					// convert lower field first to top field first
					swab( image, buffer + m_width * 2, m_width * ( m_height - 1 ) * 2 );
				else
					swab( image, buffer, m_width * m_height * 2 );
				m_deckLinkOutput->ScheduleVideoFrame( m_videoFrame, m_count * m_duration, m_duration, m_timescale );
				mlt_deque_push_front( m_videoFrameQ, m_videoFrame );
			}
		}
		else
		{
			mlt_log_verbose( &m_consumer, "dropped video frame\n" );
		}
		++m_count;

		// Check for end of pre-roll
		if ( ++m_prerollCounter > m_preroll && m_isPrerolling )
		{
			// Start audio and video output
			m_deckLinkOutput->EndAudioPreroll();
			m_deckLinkOutput->StartScheduledPlayback( 0, m_timescale, 1.0 );
			m_isPrerolling = false;
		}

		return result;
	}
Example #18
0
void BMDOutputDelegate::ScheduleNextFrame(bool preroll)
{
	if(!preroll)
	{
		// If not prerolling, make sure that playback is still active
		if (m_running == false)
			return;
	}
	
	if(m_frameSet)
	{
		//qDebug() << "m_frameSet: not setting";
		return;
	}
	
	m_frameSet = true;
	
	QTime t;
	t.start();
// 		if ((m_totalFramesScheduled % m_framesPerSecond) == 0)
// 		{
// 			// On each second, schedule a frame of black
// 			if (m_deckLinkOutput->ScheduleVideoFrame(m_videoFrameBlack, (m_totalFramesScheduled * m_frameDuration), m_frameDuration, m_frameTimescale) != S_OK)
// 				return;
// 		}
			
	
	
	//qDebug() << "BMDOutputDelegate::ScheduleNextFrame: [3] Frame Repaint: "<<t.restart()<<" ms";
	
	if(!m_image.isNull())
	{
		void *frameBytes;
		m_rgbFrame->GetBytes(&frameBytes);
		
		int maxBytes = m_frameWidth * m_frameHeight * 4;
		memcpy(frameBytes, (const uchar*)m_image.bits(), m_image.byteCount() > maxBytes ? maxBytes : m_image.byteCount());
		
		//qDebug() << "BMDOutputDelegate::ScheduleNextFrame: [4] Load BMD Frame with RGB: "<<t.restart()<<" ms";
	
		
		// 	Pixel conversions
		//  Source frame      Target frame
		//  bmdFormat8BitRGBA bmdFormat8BitYUV
		//                    bmdFormat8BitARGB
		//  bmdFormat8BitBGRA bmdFormat8BitYUV
		//  bmdFormat8BitARGB bmdFormat8BitYUV
		if(m_deckLinkConverter)
		{
			m_deckLinkConverter->ConvertFrame(m_rgbFrame, m_yuvFrame);
			
			//qDebug() << "BMDOutputDelegate::ScheduleNextFrame: [5] RGB->YUV: "<<t.restart()<<" ms";
			
			if (m_deckLinkOutput->ScheduleVideoFrame(m_yuvFrame, 
				(m_totalFramesScheduled * m_frameDuration), 
				m_frameDuration, 
				m_frameTimescale) != S_OK)
				return;
				
			//qDebug() << "BMDOutputDelegate::ScheduleNextFrame: [6] ScheduleVideoFrame(): "<<t.restart()<<" ms";
		}
		else
		{
			qDebug() << "BMDOutputDelegate::ScheduleNextFrame: No m_deckLinkConverter available, unable to convert frame.";
		}
	}
	
		
	m_totalFramesScheduled += 1;
}