コード例 #1
0
ファイル: webcam.cpp プロジェクト: mhozza/HandControl
int Webcam::getFrame(ColorImage &image)
{
        int ret = 0;

        // Dequeue a buffer.
        ret = ioctl(dev, VIDIOC_DQBUF, &buf);
        if (ret < 0)
        {
                KError("Unable to dequeue buffer", errno);
                return EXIT_FAILURE;
        }

        // Save the image.
        //uchar jpegBuf1[buf.bytesused + 420];
        if (fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG)
        {
            /*
                if (mjpegToJpeg(mem[buf.index], jpegBuf1, (int) buf.bytesused) == EXIT_SUCCESS)
                        image.loadFromData(jpegBuf1, buf.bytesused+420);
            */
            return EXIT_FAILURE;
        }

        if (fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
        {
                image.setImage(yuvToRGB(mem[buf.index], currentWidth(), currentHeight()), currentWidth(), currentHeight());
        }

        // Requeue the buffer.
        ret = ioctl(dev, VIDIOC_QBUF, &buf);
        if (ret < 0)
        {
                KError("Unable to requeue buffer", errno);
                return EXIT_FAILURE;
        }
        if(!imageNotifier->isEnabled())
                imageNotifier->setEnabled(true);

        return EXIT_SUCCESS;
}
コード例 #2
0
HRESULT ofxBlackmagicGrabber::VideoInputFrameArrived(IDeckLinkVideoInputFrame * videoFrame, IDeckLinkAudioInputPacket * audioFrame){
	IDeckLinkVideoFrame*	                rightEyeFrame = NULL;
	IDeckLinkVideoFrame3DExtensions*        threeDExtensions = NULL;

	// Handle Video Frame
	if(videoFrame)
	{
		// If 3D mode is enabled we retreive the 3D extensions interface which gives.
		// us access to the right eye frame by calling GetFrameForRightEye() .
		if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
			(threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
		{
			rightEyeFrame = NULL;
		}

		if (threeDExtensions)
			threeDExtensions->Release();

		if (videoFrame->GetFlags() & bmdFrameHasNoInputSource){
			ofLogError(LOG_NAME) <<  "Frame received (#" << frameCount << "- No input signal detected";
		}
		/*else
		{*/
			const char *timecodeString = NULL;
			if (g_timecodeFormat != 0)
			{
				IDeckLinkTimecode *timecode;
				if (videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK)
				{
					CFStringRef timecodeString;
					timecode->GetString(&timecodeString);
				}
			}

//			ofLogVerbose(LOG_NAME) << "Frame received (#" <<  frameCount
//					<< ") [" << (timecodeString != NULL ? timecodeString : "No timecode")
//					<< "] -" << (rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame")
//					<< "- Size: " << (videoFrame->GetRowBytes() * videoFrame->GetHeight()) << "bytes";

			if (timecodeString)
				free((void*)timecodeString);

			yuvToRGB(videoFrame);
			if(bDeinterlace) deinterlace();
			pixelsMutex.lock();
			bNewFrameArrived = true;
			ofPixels * aux = currentPixels;
			currentPixels = backPixels;
			backPixels = aux;
			pixelsMutex.unlock();
		//}

		if (rightEyeFrame)
			rightEyeFrame->Release();

		frameCount++;
	}

#if 0	//No audio
	// Handle Audio Frame
	void*	audioFrameBytes;
	if (audioFrame)
	{
		if (audioOutputFile != -1)
		{
			audioFrame->GetBytes(&audioFrameBytes);
			write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
		}
	}
#endif
	return S_OK;
}