HRESULT ofxBlackmagicGrabber::VideoInputFrameArrived(IDeckLinkVideoInputFrame * videoFrame, IDeckLinkAudioInputPacket * audioFrame){
	IDeckLinkVideoFrame*	                rightEyeFrame = NULL;
	IDeckLinkVideoFrame3DExtensions*        threeDExtensions = NULL;

	// Handle Video Frame
	if(videoFrame)
	{
		// If 3D mode is enabled we retreive the 3D extensions interface which gives.
		// us access to the right eye frame by calling GetFrameForRightEye() .
		if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
			(threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
		{
			rightEyeFrame = NULL;
		}

		if (threeDExtensions)
			threeDExtensions->Release();

		if (videoFrame->GetFlags() & bmdFrameHasNoInputSource){
			ofLogError(LOG_NAME) <<  "Frame received (#" << frameCount << "- No input signal detected";
		}
		/*else
		{*/
			const char *timecodeString = NULL;
			if (g_timecodeFormat != 0)
			{
				IDeckLinkTimecode *timecode;
				if (videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK)
				{
					CFStringRef timecodeString;
					timecode->GetString(&timecodeString);
				}
			}

//			ofLogVerbose(LOG_NAME) << "Frame received (#" <<  frameCount
//					<< ") [" << (timecodeString != NULL ? timecodeString : "No timecode")
//					<< "] -" << (rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame")
//					<< "- Size: " << (videoFrame->GetRowBytes() * videoFrame->GetHeight()) << "bytes";

			if (timecodeString)
				free((void*)timecodeString);

			yuvToRGB(videoFrame);
			if(bDeinterlace) deinterlace();
			pixelsMutex.lock();
			bNewFrameArrived = true;
			ofPixels * aux = currentPixels;
			currentPixels = backPixels;
			backPixels = aux;
			pixelsMutex.unlock();
		//}

		if (rightEyeFrame)
			rightEyeFrame->Release();

		frameCount++;
	}

#if 0	//No audio
	// Handle Audio Frame
	void*	audioFrameBytes;
	if (audioFrame)
	{
		if (audioOutputFile != -1)
		{
			audioFrame->GetBytes(&audioFrameBytes);
			write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
		}
	}
#endif
	return S_OK;
}
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
  IDeckLinkVideoFrame*        rightEyeFrame = NULL;
  IDeckLinkVideoFrame3DExtensions*  threeDExtensions = NULL;
  void*                frameBytes;
  void*                audioFrameBytes;

  int video_buffer_value;
  int audio_buffer_value;
  ioctl(g_video_sock, TIOCOUTQ, &video_buffer_value);
  ioctl(g_audio_sock, TIOCOUTQ, &audio_buffer_value);

  // void* streamFrameBuffer;
  // void* audioStreamFrameBuffer = ;

  // Handle Video Frame
  if (videoFrame)
  {
    // If 3D mode is enabled we retreive the 3D extensions interface which gives.
    // us access to the right eye frame by calling GetFrameForRightEye() .
    if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
      (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
    {
      rightEyeFrame = NULL;
    }

    if (threeDExtensions)
      threeDExtensions->Release();

    if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
    {
      printf("Frame received (#%lu) - No input signal detected\n", g_frameCount);
    }
    else
    {
      const char *timecodeString = NULL;
      if (g_config.m_timecodeFormat != 0)
      {
        IDeckLinkTimecode *timecode;
        if (videoFrame->GetTimecode(g_config.m_timecodeFormat, &timecode) == S_OK)
        {
          timecode->GetString(&timecodeString);
        }
      }

      if (video_buffer_value > 2000000)
      {
        printf("Video Drop!\n");
        goto bail;
      }

      printf("Frame received (#%lu) %lux%lu [%s] - %s - Size: %li bytes - Video buffer: %d bytes - Audio buffer: %d bytes\n",
        g_frameCount,
        videoFrame->GetWidth(),
        videoFrame->GetHeight(),
        timecodeString != NULL ? timecodeString : "No timecode",
        rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame",
        videoFrame->GetRowBytes() * videoFrame->GetHeight(),
        video_buffer_value,
        audio_buffer_value);

      videoFrame->GetBytes(&frameBytes);

      // struct stream_info info = { T_STREAM_VIDEO, videoFrame->GetRowBytes() * videoFrame->GetHeight() };
      // send(g_sock, &info, sizeof(stream_info), 0);
      ssize_t video_send_res = send(g_video_sock, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight(), 0);
      printf("Video Sent: %zd\n", video_send_res);
      // write(g_sock, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());

      // for (uint32_t i = 0; i < videoFrame->GetHeight(); i++) {
      //   for (uint32_t chunk = 0; chunk < chunk_num; chunk++) {
      //     struct stream_header header = { i, chunk };
      //     int chunk_size = videoFrame->GetRowBytes() / chunk_num;
      //     char buf[sizeof(stream_header) + chunk_size];
      //     memcpy(buf, &header, sizeof(stream_header));
      //     memcpy(buf + sizeof(stream_header),
      //            frameBytes + (videoFrame->GetRowBytes() * i) + (chunk_size * chunk),
      //            chunk_size);
      //     int sent_length = sendto(g_sock, buf, sizeof(buf), MSG_DONTWAIT, (struct sockaddr *) &g_addr, sizeof(g_addr));
      //     // printf("Send: [%u:%u] %ibytes\n", i, chunk, sent_length);
      //   }
      //   // printf("Frame send (#%4u)\n", i);
      //   // sendto(g_sock, buf, sizeof(buf), 0, (struct sockaddr *) &g_addr, sizeof(g_addr));
      // }

      if (timecodeString)
        free((void*)timecodeString);

      // if (g_videoOutputFile != -1)
      // {
      //   videoFrame->GetBytes(&frameBytes);
      //   write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
      //
      //   if (rightEyeFrame)
      //   {
      //     rightEyeFrame->GetBytes(&frameBytes);
      //     write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
      //   }
      // }
    }

    if (rightEyeFrame)
      rightEyeFrame->Release();

    g_frameCount++;
  }

  // Handle Audio Frame
  if (audioFrame)
  {
    audioFrame->GetBytes(&audioFrameBytes);
    // printf("Audio frame received Sample: %lu, Channel: %d, Depth: %d - Size: %li bytes\n",
    //   audioFrame->GetSampleFrameCount(),
    //   g_config.m_audioChannels,
    //   g_config.m_audioSampleDepth,
    //   audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8));

    if (audio_buffer_value > 60000) {
      printf("Audio Drop!\n");
      goto bail;
    }

    // struct stream_info info = { T_STREAM_AUDIO, audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8) };
    // send(g_audio_sock, &info, sizeof(stream_info), 0);
    ssize_t audo_send_res = send(g_audio_sock, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8), 0);
    printf("Audio Sent: %zd\n", audo_send_res);

    // printf("Sample Frame Count: %ld\n", audioFrame->GetSampleFrameCount());
    // if (g_audioOutputFile != -1)
    // {
    //   audioFrame->GetBytes(&audioFrameBytes);
    //   write(g_audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8));
    // }
  } else {
    printf("Audio frame missing! Maybe video frame delaying?\n");

    // g_deckLinkInput->StopStreams();
    //
    // result = g_deckLinkInput->EnableVideoInput(mode->GetDisplayMode(), pixelFormat, g_config.m_inputFlags);
    // if (result != S_OK)
    // {
    //   fprintf(stderr, "Failed to switch video mode\n");
    //   goto bail;
    // }
    //
    // g_deckLinkInput->StartStreams();
    // g_do_exit = true;
    // pthread_cond_signal(&g_sleepCond);
  }

  // if (g_config.m_maxFrames > 0 && videoFrame && g_frameCount >= g_config.m_maxFrames)
  // {
  //   g_do_exit = true;
  //   pthread_cond_signal(&g_sleepCond);
  // }

  // sleep(1);
bail:
  return S_OK;
}
示例#3
0
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
    IDeckLinkVideoFrame*				rightEyeFrame = NULL;
    IDeckLinkVideoFrame3DExtensions*	threeDExtensions = NULL;
    void*								frameBytes;
    void*								audioFrameBytes;

    // Handle Video Frame
    if (videoFrame)
    {
        // If 3D mode is enabled we retreive the 3D extensions interface which gives.
        // us access to the right eye frame by calling GetFrameForRightEye() .
        if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
             (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
        {
            rightEyeFrame = NULL;
        }

        if (threeDExtensions)
            threeDExtensions->Release();

        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
        {
            printf("Frame received (#%lu) - No input signal detected\n", g_frameCount);
        }
        else
        {
            const char *timecodeString = NULL;
            if (g_config.m_timecodeFormat != 0)
            {
                IDeckLinkTimecode *timecode;
                if (videoFrame->GetTimecode(g_config.m_timecodeFormat, &timecode) == S_OK)
                {
                    timecode->GetString(&timecodeString);
                }
            }

            int64_t timestampNow = bot_timestamp_now();

            if (g_config.m_lcmChannelName)
            {
                IDeckLinkMutableVideoFrame* outputFrame;
                g_deckLinkOutput->CreateVideoFrame(videoFrame->GetWidth(), videoFrame->GetHeight(), videoFrame->GetWidth()*4, bmdFormat8BitBGRA, bmdFrameFlagDefault, &outputFrame);
                HRESULT convertResult = g_conversionInst->ConvertFrame(videoFrame, outputFrame);

                frameConsumer.Queue.enqueue(FrameData(outputFrame, timestampNow));
            }

            static int64_t baseTime = timestampNow;
            static uint64_t frameCount = g_frameCount;
            double elapsedTime = (timestampNow - baseTime) * 1e-6;
            if (elapsedTime > 1.0)
            {
                printf("capturing at %.2f fps.\n", (g_frameCount - frameCount)/elapsedTime);
                baseTime = timestampNow;
                frameCount = g_frameCount;
            }

            if (timecodeString)
                free((void*)timecodeString);

            if (g_videoOutputFile != -1)
            {
                videoFrame->GetBytes(&frameBytes);
                write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());

                if (rightEyeFrame)
                {
                    rightEyeFrame->GetBytes(&frameBytes);
                    write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
                }
            }
        }

        if (rightEyeFrame)
            rightEyeFrame->Release();

        g_frameCount++;
    }

    // Handle Audio Frame
    if (audioFrame)
    {
        if (g_audioOutputFile != -1)
        {
            audioFrame->GetBytes(&audioFrameBytes);
            write(g_audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8));
        }
    }

    if (g_config.m_maxFrames > 0 && videoFrame && g_frameCount >= g_config.m_maxFrames)
    {
        g_do_exit = true;
        pthread_cond_signal(&g_sleepCond);
    }

    return S_OK;
}
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
    //TODO
    IDeckLinkVideoFrame*	                rightEyeFrame = NULL;
    IDeckLinkVideoFrame3DExtensions*        threeDExtensions = NULL;
    void*					frameBytes;
    void*					audioFrameBytes;

    // Handle Video Frame
    if(videoFrame)
    {
        // If 3D mode is enabled we retreive the 3D extensions interface which gives.
        // us access to the right eye frame by calling GetFrameForRightEye() .
        if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
             (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
        {
            rightEyeFrame = NULL;
        }

        if (threeDExtensions)
            threeDExtensions->Release();

        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
        {
            fprintf(stderr, "Frame received (#%lu) - No input signal detected\n", frameCount);
        }
        else
        {
            const char *timecodeString = NULL;
            if (g_timecodeFormat != 0)
            {
                IDeckLinkTimecode *timecode;
                if (videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK)
                {
                    timecode->GetString(&timecodeString);
                }
            }

            char timecodeStaticString[256];
            snprintf(timecodeStaticString, sizeof(timecodeStaticString), "%s", timecodeString);

            fprintf(stderr, "Frame received (#%lu) [%s] - %s - Size: %li bytes\n",
                    frameCount,
                    timecodeString != NULL ? timecodeString : "No timecode",
                    rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame",
                    videoFrame->GetRowBytes() * videoFrame->GetHeight());

            fprintf(stderr, "Time Code length: %lu\n", strlen(timecodeStaticString));

            if (timecodeString)
                free((void*)timecodeString);

            if (videoOutputFile != -1)
            {
                videoFrame->GetBytes(&frameBytes);
                write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());

                if (rightEyeFrame)
                {
                    rightEyeFrame->GetBytes(&frameBytes);
                    write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
                }
                write(videoOutputFile, timecodeStaticString, sizeof(timecodeStaticString));
            }
            else
            {
                fprintf(stderr, "No video output file. Dry run.\n");
                videoFrame->GetBytes(&frameBytes);
                if(rightEyeFrame)
                {
                    rightEyeFrame->GetBytes(&frameBytes);
                }
            }
        }

        if (rightEyeFrame)
            rightEyeFrame->Release();

        frameCount++;

        if (maxFrames > 0 && frameCount >= maxFrames)
        {
            sig_CaptureEnd(m_CamID);
        }
    }

    // Handle Audio Frame
//    if (audioFrame)
//    {
//        if (audioOutputFile != -1)
//        {
//            audioFrame->GetBytes(&audioFrameBytes);
//            write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
//        }
//    }
    return S_OK;
}
void TestPattern::StartRunning()
{
	HRESULT					result;
	unsigned long			audioSamplesPerFrame;
	IDeckLinkVideoFrame*	rightFrame;
	VideoFrame3D*			frame3D;

	m_frameWidth = m_displayMode->GetWidth();
	m_frameHeight = m_displayMode->GetHeight();
	m_displayMode->GetFrameRate(&m_frameDuration, &m_frameTimescale);

	// Calculate the number of frames per second, rounded up to the nearest integer.  For example, for NTSC (29.97 FPS), framesPerSecond == 30.
	m_framesPerSecond = (unsigned long)((m_frameTimescale + (m_frameDuration-1))  /  m_frameDuration);

	// Set the video output mode
	result = m_deckLinkOutput->EnableVideoOutput(m_displayMode->GetDisplayMode(), m_config->m_outputFlags);
	if (result != S_OK)
	{
		fprintf(stderr, "Failed to enable video output. Is another application using the card?\n");
		goto bail;
	}

	// Set the audio output mode
	result = m_deckLinkOutput->EnableAudioOutput(bmdAudioSampleRate48kHz, m_config->m_audioSampleDepth, m_config->m_audioChannels, bmdAudioOutputStreamContinuous);
	if (result != S_OK)
	{
		fprintf(stderr, "Failed to enable audio output\n");
		goto bail;
	}

	// Generate one second of audio
	m_audioBufferSampleLength = (unsigned long)((m_framesPerSecond * m_audioSampleRate * m_frameDuration) / m_frameTimescale);
	m_audioBuffer = valloc(m_audioBufferSampleLength * m_config->m_audioChannels * (m_config->m_audioSampleDepth / 8));

	if (m_audioBuffer == NULL)
	{
		fprintf(stderr, "Failed to allocate audio buffer memory\n");
		goto bail;
	}

	// Zero the buffer (interpreted as audio silence)
	memset(m_audioBuffer, 0x0, (m_audioBufferSampleLength * m_config->m_audioChannels * m_config->m_audioSampleDepth / 8));
	audioSamplesPerFrame = (unsigned long)((m_audioSampleRate * m_frameDuration) / m_frameTimescale);

	if (m_outputSignal == kOutputSignalPip)
		FillSine(m_audioBuffer, audioSamplesPerFrame, m_config->m_audioChannels, m_config->m_audioSampleDepth);
	else
		FillSine((void*)((unsigned long)m_audioBuffer + (audioSamplesPerFrame * m_config->m_audioChannels * m_config->m_audioSampleDepth / 8)), (m_audioBufferSampleLength - audioSamplesPerFrame), m_config->m_audioChannels, m_config->m_audioSampleDepth);

	// Generate a frame of black
	if (CreateFrame(&m_videoFrameBlack, FillBlack) != S_OK)
		goto bail;

	if (m_config->m_outputFlags & bmdVideoOutputDualStream3D)
	{
		frame3D = new VideoFrame3D(m_videoFrameBlack);
		m_videoFrameBlack->Release();
		m_videoFrameBlack = frame3D;
		frame3D = NULL;
	}

	// Generate a frame of colour bars
	if (CreateFrame(&m_videoFrameBars, FillForwardColourBars) != S_OK)
		goto bail;

	if (m_config->m_outputFlags & bmdVideoOutputDualStream3D)
	{
		if (CreateFrame(&rightFrame, FillReverseColourBars) != S_OK)
			goto bail;

		frame3D = new VideoFrame3D(m_videoFrameBars, rightFrame);
		m_videoFrameBars->Release();
		rightFrame->Release();
		m_videoFrameBars = frame3D;
		frame3D = NULL;
	}

	// Begin video preroll by scheduling a second of frames in hardware
	m_totalFramesScheduled = 0;
	m_totalFramesDropped = 0;
	m_totalFramesCompleted = 0;
	for (unsigned i = 0; i < m_framesPerSecond; i++)
		ScheduleNextFrame(true);

	// Begin audio preroll.  This will begin calling our audio callback, which will start the DeckLink output stream.
	m_audioBufferOffset = 0;
	if (m_deckLinkOutput->BeginAudioPreroll() != S_OK)
	{
		fprintf(stderr, "Failed to begin audio preroll\n");
		goto bail;
	}

	m_running = true;

	return;

bail:
	// *** Error-handling code.  Cleanup any resources that were allocated. *** //
	StopRunning();
}