void ImageProvider::newImage(bool isFront, QImage image) { if (isFront) { frontImage = image; } else { rearImage = image; } emit newFrameReady(isFront); }
ALIGN_STACK_SSE void DirectShowCaptureInterface::memberCallback(DSCapDeviceId dev, FrameData data) { //SYNC_PRINT(("Received new frame in a member %d\n", dev)); mProtectFrame.lock(); DirectShowCameraDescriptor *camera = NULL; if (mCameras[0].deviceHandle == dev) camera = &mCameras[0]; else if (mCameras[1].deviceHandle == dev) camera = &mCameras[1]; else goto exit; { PreciseTimer timer = PreciseTimer::currentTime(); camera->gotBuffer = true; camera->timestamp = (data.timestamp + 5) / 10; delete_safe (camera->buffer); delete_safe (camera->buffer24); if (data.format.type == CAP_YUV) { if (mIsRgb) { camera->buffer24 = new RGB24Buffer(data.format.height, data.format.width, false); camera->buffer24->fillWithYUYV((uint8_t *)data.data); } else { camera->buffer = new G12Buffer(data.format.height, data.format.width, false); camera->buffer->fillWithYUYV((uint16_t *)data.data); } } else if (data.format.type == CAP_MJPEG) { MjpegDecoderLazy *lazyDecoder = new MjpegDecoderLazy; // don't place it at stack, it's too huge! if (mIsRgb) camera->buffer24 = lazyDecoder->decodeRGB24((uchar *)data.data); else camera->buffer = lazyDecoder->decode((uchar *)data.data); delete lazyDecoder; } else if (data.format.type == CAP_RGB) { if (mIsRgb) { camera->buffer24 = new RGB24Buffer(data.format.height, data.format.width, true); int w = camera->buffer24->w; int h = camera->buffer24->h; for (int i = 0; i < h; i++) { uint8_t *rgbData = ((uint8_t *)data.data) + 3 * (h - i - 1) * w; RGBColor *rgb24Data = &(camera->buffer24->element(i, 0)); for (int j = 0; j < w; j++) { RGBColor rgb(rgbData[2], rgbData[1], rgbData[0]); // the given data format has B,G,R order *rgb24Data++ = rgb; rgbData += 3; } } } else { camera->buffer = new G12Buffer(data.format.height, data.format.width, false); int w = camera->buffer->w; int h = camera->buffer->h; for (int i = 0; i < h; i++) { uint8_t *rgbData = ((uint8_t *)data.data) + 3 * (h - i - 1) * w; uint16_t *greyData = &(camera->buffer->element(i, 0)); for (int j = 0; j < w; j++) { RGBColor rgb(rgbData[2], rgbData[1], rgbData[0]); // the given data format has B,G,R order *greyData++ = rgb.luma12(); rgbData += 3; } } } } else { camera->buffer = new G12Buffer(data.format.height, data.format.width, false); } camera->decodeTime = timer.usecsToNow(); /* If both frames are in place */ if (mCameras[0].gotBuffer && mCameras[1].gotBuffer) { mCameras[0].gotBuffer = false; mCameras[1].gotBuffer = false; CaptureStatistics stats; int64_t desync = mCameras[0].timestamp - mCameras[1].timestamp; stats.values[CaptureStatistics::DESYNC_TIME] = desync > 0 ? desync : -desync; stats.values[CaptureStatistics::DECODING_TIME] = mCameras[0].decodeTime + mCameras[1].decodeTime; if (lastFrameTime.usecsTo(PreciseTimer()) != 0) { stats.values[CaptureStatistics::INTERFRAME_DELAY] = lastFrameTime.usecsToNow(); } lastFrameTime = PreciseTimer::currentTime(); frame_data_t frameData; frameData.timestamp = mCameras[0].timestamp / 2 + mCameras[1].timestamp / 2; newFrameReady(frameData); newStatisticsReady(stats); } else { frame_data_t frameData; frameData.timestamp = mCameras[0].timestamp; newFrameReady(frameData); //newStatisticsReady(stats); skippedCount++; } } exit: mProtectFrame.unlock(); }