Beispiel #1
0
  virtual HRESULT STDMETHODCALLTYPE VideoInputFrameArrived(
    IDeckLinkVideoInputFrame*videoFrame, IDeckLinkAudioInputPacket*audioFrame)
  {
    IDeckLinkVideoFrame*rightEyeFrame = NULL;
    IDeckLinkVideoFrame3DExtensions*    threeDExtensions = NULL;
    void*frameBytes;
    void*audioFrameBytes;

    // Handle Video Frame
    if (videoFrame) {
#if 0
      // If 3D mode is enabled we retreive the 3D extensions interface which gives.
      // us access to the right eye frame by calling GetFrameForRightEye() .
      if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions,
                                       (void **) &threeDExtensions) != S_OK) ||
           (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK)) {
        rightEyeFrame = NULL;
      }
#endif
      if (threeDExtensions) {
        threeDExtensions->Release();
      }

      if (videoFrame->GetFlags() & bmdFrameHasNoInputSource) {
      } else {
        long w=videoFrame->GetWidth();
        long h=videoFrame->GetHeight();
        videoFrame->GetBytes(&frameBytes);
        m_priv->setFrame(videoFrame->GetWidth(),
                         videoFrame->GetHeight(),
                         GEM_YUV,
                         (unsigned char*)frameBytes);
      }

      if (rightEyeFrame) {
        rightEyeFrame->Release();
      }

      m_frameCount++;
    }

    return S_OK;
  }
HRESULT ofxBlackmagicGrabber::VideoInputFrameArrived(IDeckLinkVideoInputFrame * videoFrame, IDeckLinkAudioInputPacket * audioFrame){
	IDeckLinkVideoFrame*	                rightEyeFrame = NULL;
	IDeckLinkVideoFrame3DExtensions*        threeDExtensions = NULL;

	// Handle Video Frame
	if(videoFrame)
	{
		// If 3D mode is enabled we retreive the 3D extensions interface which gives.
		// us access to the right eye frame by calling GetFrameForRightEye() .
		if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
			(threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
		{
			rightEyeFrame = NULL;
		}

		if (threeDExtensions)
			threeDExtensions->Release();

		if (videoFrame->GetFlags() & bmdFrameHasNoInputSource){
			ofLogError(LOG_NAME) <<  "Frame received (#" << frameCount << "- No input signal detected";
		}
		/*else
		{*/
			const char *timecodeString = NULL;
			if (g_timecodeFormat != 0)
			{
				IDeckLinkTimecode *timecode;
				if (videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK)
				{
					CFStringRef timecodeString;
					timecode->GetString(&timecodeString);
				}
			}

//			ofLogVerbose(LOG_NAME) << "Frame received (#" <<  frameCount
//					<< ") [" << (timecodeString != NULL ? timecodeString : "No timecode")
//					<< "] -" << (rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame")
//					<< "- Size: " << (videoFrame->GetRowBytes() * videoFrame->GetHeight()) << "bytes";

			if (timecodeString)
				free((void*)timecodeString);

			yuvToRGB(videoFrame);
			if(bDeinterlace) deinterlace();
			pixelsMutex.lock();
			bNewFrameArrived = true;
			ofPixels * aux = currentPixels;
			currentPixels = backPixels;
			backPixels = aux;
			pixelsMutex.unlock();
		//}

		if (rightEyeFrame)
			rightEyeFrame->Release();

		frameCount++;
	}

#if 0	//No audio
	// Handle Audio Frame
	void*	audioFrameBytes;
	if (audioFrame)
	{
		if (audioOutputFile != -1)
		{
			audioFrame->GetBytes(&audioFrameBytes);
			write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
		}
	}
#endif
	return S_OK;
}
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
    //TODO
    IDeckLinkVideoFrame*	                rightEyeFrame = NULL;
    IDeckLinkVideoFrame3DExtensions*        threeDExtensions = NULL;
    void*					frameBytes;
    void*					audioFrameBytes;

    // Handle Video Frame
    if(videoFrame)
    {
        // If 3D mode is enabled we retreive the 3D extensions interface which gives.
        // us access to the right eye frame by calling GetFrameForRightEye() .
        if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
             (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
        {
            rightEyeFrame = NULL;
        }

        if (threeDExtensions)
            threeDExtensions->Release();

        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
        {
            fprintf(stderr, "Frame received (#%lu) - No input signal detected\n", frameCount);
        }
        else
        {
            const char *timecodeString = NULL;
            if (g_timecodeFormat != 0)
            {
                IDeckLinkTimecode *timecode;
                if (videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK)
                {
                    timecode->GetString(&timecodeString);
                }
            }

            char timecodeStaticString[256];
            snprintf(timecodeStaticString, sizeof(timecodeStaticString), "%s", timecodeString);

            fprintf(stderr, "Frame received (#%lu) [%s] - %s - Size: %li bytes\n",
                    frameCount,
                    timecodeString != NULL ? timecodeString : "No timecode",
                    rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame",
                    videoFrame->GetRowBytes() * videoFrame->GetHeight());

            fprintf(stderr, "Time Code length: %lu\n", strlen(timecodeStaticString));

            if (timecodeString)
                free((void*)timecodeString);

            if (videoOutputFile != -1)
            {
                videoFrame->GetBytes(&frameBytes);
                write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());

                if (rightEyeFrame)
                {
                    rightEyeFrame->GetBytes(&frameBytes);
                    write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
                }
                write(videoOutputFile, timecodeStaticString, sizeof(timecodeStaticString));
            }
            else
            {
                fprintf(stderr, "No video output file. Dry run.\n");
                videoFrame->GetBytes(&frameBytes);
                if(rightEyeFrame)
                {
                    rightEyeFrame->GetBytes(&frameBytes);
                }
            }
        }

        if (rightEyeFrame)
            rightEyeFrame->Release();

        frameCount++;

        if (maxFrames > 0 && frameCount >= maxFrames)
        {
            sig_CaptureEnd(m_CamID);
        }
    }

    // Handle Audio Frame
//    if (audioFrame)
//    {
//        if (audioOutputFile != -1)
//        {
//            audioFrame->GetBytes(&audioFrameBytes);
//            write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
//        }
//    }
    return S_OK;
}
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
  IDeckLinkVideoFrame*        rightEyeFrame = NULL;
  IDeckLinkVideoFrame3DExtensions*  threeDExtensions = NULL;
  void*                frameBytes;
  void*                audioFrameBytes;

  int video_buffer_value;
  int audio_buffer_value;
  ioctl(g_video_sock, TIOCOUTQ, &video_buffer_value);
  ioctl(g_audio_sock, TIOCOUTQ, &audio_buffer_value);

  // void* streamFrameBuffer;
  // void* audioStreamFrameBuffer = ;

  // Handle Video Frame
  if (videoFrame)
  {
    // If 3D mode is enabled we retreive the 3D extensions interface which gives.
    // us access to the right eye frame by calling GetFrameForRightEye() .
    if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
      (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
    {
      rightEyeFrame = NULL;
    }

    if (threeDExtensions)
      threeDExtensions->Release();

    if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
    {
      printf("Frame received (#%lu) - No input signal detected\n", g_frameCount);
    }
    else
    {
      const char *timecodeString = NULL;
      if (g_config.m_timecodeFormat != 0)
      {
        IDeckLinkTimecode *timecode;
        if (videoFrame->GetTimecode(g_config.m_timecodeFormat, &timecode) == S_OK)
        {
          timecode->GetString(&timecodeString);
        }
      }

      if (video_buffer_value > 2000000)
      {
        printf("Video Drop!\n");
        goto bail;
      }

      printf("Frame received (#%lu) %lux%lu [%s] - %s - Size: %li bytes - Video buffer: %d bytes - Audio buffer: %d bytes\n",
        g_frameCount,
        videoFrame->GetWidth(),
        videoFrame->GetHeight(),
        timecodeString != NULL ? timecodeString : "No timecode",
        rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame",
        videoFrame->GetRowBytes() * videoFrame->GetHeight(),
        video_buffer_value,
        audio_buffer_value);

      videoFrame->GetBytes(&frameBytes);

      // struct stream_info info = { T_STREAM_VIDEO, videoFrame->GetRowBytes() * videoFrame->GetHeight() };
      // send(g_sock, &info, sizeof(stream_info), 0);
      ssize_t video_send_res = send(g_video_sock, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight(), 0);
      printf("Video Sent: %zd\n", video_send_res);
      // write(g_sock, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());

      // for (uint32_t i = 0; i < videoFrame->GetHeight(); i++) {
      //   for (uint32_t chunk = 0; chunk < chunk_num; chunk++) {
      //     struct stream_header header = { i, chunk };
      //     int chunk_size = videoFrame->GetRowBytes() / chunk_num;
      //     char buf[sizeof(stream_header) + chunk_size];
      //     memcpy(buf, &header, sizeof(stream_header));
      //     memcpy(buf + sizeof(stream_header),
      //            frameBytes + (videoFrame->GetRowBytes() * i) + (chunk_size * chunk),
      //            chunk_size);
      //     int sent_length = sendto(g_sock, buf, sizeof(buf), MSG_DONTWAIT, (struct sockaddr *) &g_addr, sizeof(g_addr));
      //     // printf("Send: [%u:%u] %ibytes\n", i, chunk, sent_length);
      //   }
      //   // printf("Frame send (#%4u)\n", i);
      //   // sendto(g_sock, buf, sizeof(buf), 0, (struct sockaddr *) &g_addr, sizeof(g_addr));
      // }

      if (timecodeString)
        free((void*)timecodeString);

      // if (g_videoOutputFile != -1)
      // {
      //   videoFrame->GetBytes(&frameBytes);
      //   write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
      //
      //   if (rightEyeFrame)
      //   {
      //     rightEyeFrame->GetBytes(&frameBytes);
      //     write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
      //   }
      // }
    }

    if (rightEyeFrame)
      rightEyeFrame->Release();

    g_frameCount++;
  }

  // Handle Audio Frame
  if (audioFrame)
  {
    audioFrame->GetBytes(&audioFrameBytes);
    // printf("Audio frame received Sample: %lu, Channel: %d, Depth: %d - Size: %li bytes\n",
    //   audioFrame->GetSampleFrameCount(),
    //   g_config.m_audioChannels,
    //   g_config.m_audioSampleDepth,
    //   audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8));

    if (audio_buffer_value > 60000) {
      printf("Audio Drop!\n");
      goto bail;
    }

    // struct stream_info info = { T_STREAM_AUDIO, audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8) };
    // send(g_audio_sock, &info, sizeof(stream_info), 0);
    ssize_t audo_send_res = send(g_audio_sock, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8), 0);
    printf("Audio Sent: %zd\n", audo_send_res);

    // printf("Sample Frame Count: %ld\n", audioFrame->GetSampleFrameCount());
    // if (g_audioOutputFile != -1)
    // {
    //   audioFrame->GetBytes(&audioFrameBytes);
    //   write(g_audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8));
    // }
  } else {
    printf("Audio frame missing! Maybe video frame delaying?\n");

    // g_deckLinkInput->StopStreams();
    //
    // result = g_deckLinkInput->EnableVideoInput(mode->GetDisplayMode(), pixelFormat, g_config.m_inputFlags);
    // if (result != S_OK)
    // {
    //   fprintf(stderr, "Failed to switch video mode\n");
    //   goto bail;
    // }
    //
    // g_deckLinkInput->StartStreams();
    // g_do_exit = true;
    // pthread_cond_signal(&g_sleepCond);
  }

  // if (g_config.m_maxFrames > 0 && videoFrame && g_frameCount >= g_config.m_maxFrames)
  // {
  //   g_do_exit = true;
  //   pthread_cond_signal(&g_sleepCond);
  // }

  // sleep(1);
bail:
  return S_OK;
}
Beispiel #5
0
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
    IDeckLinkVideoFrame*				rightEyeFrame = NULL;
    IDeckLinkVideoFrame3DExtensions*	threeDExtensions = NULL;
    void*								frameBytes;
    void*								audioFrameBytes;

    // Handle Video Frame
    if (videoFrame)
    {
        // If 3D mode is enabled we retreive the 3D extensions interface which gives.
        // us access to the right eye frame by calling GetFrameForRightEye() .
        if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
             (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
        {
            rightEyeFrame = NULL;
        }

        if (threeDExtensions)
            threeDExtensions->Release();

        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
        {
            printf("Frame received (#%lu) - No input signal detected\n", g_frameCount);
        }
        else
        {
            const char *timecodeString = NULL;
            if (g_config.m_timecodeFormat != 0)
            {
                IDeckLinkTimecode *timecode;
                if (videoFrame->GetTimecode(g_config.m_timecodeFormat, &timecode) == S_OK)
                {
                    timecode->GetString(&timecodeString);
                }
            }

            int64_t timestampNow = bot_timestamp_now();

            if (g_config.m_lcmChannelName)
            {
                IDeckLinkMutableVideoFrame* outputFrame;
                g_deckLinkOutput->CreateVideoFrame(videoFrame->GetWidth(), videoFrame->GetHeight(), videoFrame->GetWidth()*4, bmdFormat8BitBGRA, bmdFrameFlagDefault, &outputFrame);
                HRESULT convertResult = g_conversionInst->ConvertFrame(videoFrame, outputFrame);

                frameConsumer.Queue.enqueue(FrameData(outputFrame, timestampNow));
            }

            static int64_t baseTime = timestampNow;
            static uint64_t frameCount = g_frameCount;
            double elapsedTime = (timestampNow - baseTime) * 1e-6;
            if (elapsedTime > 1.0)
            {
                printf("capturing at %.2f fps.\n", (g_frameCount - frameCount)/elapsedTime);
                baseTime = timestampNow;
                frameCount = g_frameCount;
            }

            if (timecodeString)
                free((void*)timecodeString);

            if (g_videoOutputFile != -1)
            {
                videoFrame->GetBytes(&frameBytes);
                write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());

                if (rightEyeFrame)
                {
                    rightEyeFrame->GetBytes(&frameBytes);
                    write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
                }
            }
        }

        if (rightEyeFrame)
            rightEyeFrame->Release();

        g_frameCount++;
    }

    // Handle Audio Frame
    if (audioFrame)
    {
        if (g_audioOutputFile != -1)
        {
            audioFrame->GetBytes(&audioFrameBytes);
            write(g_audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8));
        }
    }

    if (g_config.m_maxFrames > 0 && videoFrame && g_frameCount >= g_config.m_maxFrames)
    {
        g_do_exit = true;
        pthread_cond_signal(&g_sleepCond);
    }

    return S_OK;
}