void UpdateTime(float dt) { // проверяем наличие кадра if( VI.isFrameNew(dev) ) { // копируем содержимое кадра VI.getPixels(dev, CamImg, true); } }
// Initialize camera input bool CvCaptureCAM_DShow::open( int _index ) { close(); VI.setupDevice(_index); if( !VI.isDeviceSetup(_index) ) return false; index = _index; return true; }
IplImage* CvCaptureCAM_DShow::retrieveFrame() { if( !frame || VI.getWidth(index) != frame->width || VI.getHeight(index) != frame->height ) { cvReleaseImage( &frame ); int w = VI.getWidth(index), h = VI.getHeight(index); frame = cvCreateImage( cvSize(w,h), 8, 3 ); } VI.getPixels( index, (uchar*)frame->imageData, false, true ); return frame; }
double CvCaptureCAM_DShow::getProperty( int property_id ) { switch( property_id ) { case CV_CAP_PROP_FRAME_WIDTH: return VI.getWidth(index); case CV_CAP_PROP_FRAME_HEIGHT: return VI.getHeight(index); case CV_CAP_PROP_FOURCC: return 0; } return 0; }
void CvCaptureCAM_DShow::close() { if( index >= 0 ) { VI.stopDevice(index); index = -1; cvReleaseImage(&frame); } }
// Initialize camera input bool CvCaptureCAM_DShow::open( int _index ) { int try_index = _index; int devices = 0; close(); devices = VI.listDevices(true); if (devices == 0) return false; try_index = try_index < 0 ? 0 : (try_index > devices-1 ? devices-1 : try_index); if (OPENCV_FPS) { VI.setIdealFramerate(try_index, OPENCV_FPS); } VI.setupDevice(try_index); if( !VI.isDeviceSetup(try_index) ) return false; index = try_index; return true; }
bool CvCaptureCAM_DShow::setProperty( int property_id, double value ) { int width = 0, height = 0; switch( property_id ) { case CV_CAP_PROP_FRAME_WIDTH: width = cvRound(value); height = width*3/4; break; case CV_CAP_PROP_FRAME_HEIGHT: height = cvRound(value); width = height*4/3; default: return false; } if( width != VI.getWidth(index) || height != VI.getHeight(index) ) { VI.stopDevice(index); VI.setupDevice(index, width, height); } return VI.isDeviceSetup(index); }
//-------------------------------------------------------------------------------------- // Render a frame //-------------------------------------------------------------------------------------- void Render() { static DWORD dwTimeStart = GetTickCount(); gCbOneFrame.time = ( GetTickCount() - dwTimeStart ) / 1000.0f; if (gIsCameraDevice && gVideoInput.isFrameNew(kDeviceId)) { V(updateTextureFromCamera(0, kDeviceId)); } gContext->ClearRenderTargetView( PingPong::RTVs[PingPong::frontBufferIdx], kBlackColor ); ID3D11RenderTargetView* pRTVs[] = {PingPong::RTVs[PingPong::frontBufferIdx]}; gContext->OMSetRenderTargets( _countof(pRTVs), pRTVs, NULL ); gContext->UpdateSubresource( gCBOneFrame, 0, NULL, &gCbOneFrame, 0, 0 ); ID3D11Buffer* pCBuffers[] = {gCBOneFrame}; gContext->PSSetConstantBuffers( 0, _countof(pCBuffers), pCBuffers ); if (!gTextureSRVs.empty()) gContext->PSSetShaderResources( 0, gTextureSRVs.size(), &gTextureSRVs[0] ); ID3D11ShaderResourceView* pSRVs[] = {PingPong::SRVs[PingPong::backBufferIdx]}; gContext->PSSetShaderResources( 1, _countof(pSRVs), pSRVs ); ID3D11SamplerState* pSamplers[] = {gSamplerSmooth, gSamplerBlocky, gSamplerMirror}; gContext->PSSetSamplers( 0, _countof(pSamplers), pSamplers ); gContext->Draw( 3, 0 ); ID3D11ShaderResourceView* pZeroSRVs[8] = {NULL}; gContext->PSSetShaderResources( 1, _countof(pZeroSRVs), pZeroSRVs ); gContext->CopyResource(gBackbuffer, PingPong::TEXs[PingPong::frontBufferIdx]); gSwapChain->Present( 0, 0 ); std::swap(PingPong::frontBufferIdx, PingPong::backBufferIdx); }
// запуск получения кадров с видеокамеры void InitVideo() { // int numDevices = videoInput::listDevices(); VI.setUseCallback(true); VI.setupDevice(dev, ImageWidth, ImageHeight, VI_COMPOSITE); }
HRESULT updateTextureFromCamera( int textureIdx, int deviceId ) { hr = S_OK; bool isCreateTexture = false; if (gTextureSRVs[textureIdx] == NULL) { isCreateTexture = true; } if (isCreateTexture) { while (!gVideoInput.isFrameNew(kDeviceId)) { ::Sleep(30); } } int srcWidth = gVideoInput.getWidth(deviceId); int srcHeight = gVideoInput.getHeight(deviceId); BYTE* srcPixels = gVideoInput.getPixels(kDeviceId, true, true); // texture size is smaller than camera frame size in order to fix the black border issue int dstWidth = srcWidth - 1; int dstHeight = srcHeight - 1; CComPtr<ID3D11Texture2D> tex; if (isCreateTexture) { CD3D11_TEXTURE2D_DESC desc(DXGI_FORMAT_R8G8B8A8_UNORM, dstWidth, dstHeight); desc.MipLevels = 1; desc.Usage = D3D11_USAGE_DYNAMIC; desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; V_RETURN(gDevice->CreateTexture2D( &desc, NULL, &tex )); V_RETURN(gDevice->CreateShaderResourceView(tex, NULL, &gTextureSRVs[textureIdx])); } else { gTextureSRVs[textureIdx]->GetResource(reinterpret_cast<ID3D11Resource**>(&tex)); } struct ColorRGB { BYTE r,g,b; }; struct ColorRGBA { ColorRGB rgb; BYTE a; }; D3D11_MAPPED_SUBRESOURCE mappedRes; V_RETURN(gContext->Map(tex, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedRes)); ColorRGB* src = reinterpret_cast<ColorRGB*>(srcPixels); ColorRGBA* dest = reinterpret_cast<ColorRGBA*>(mappedRes.pData); UINT pitch = mappedRes.RowPitch / sizeof(ColorRGBA); for (int y = 0; y < dstHeight; y++) { for (int x = 0; x < dstWidth; x++) { dest[y * pitch + x].rgb = src[y * srcWidth + srcWidth - x]; dest[y * pitch + x].a = 255; } } gContext->Unmap(tex, 0); return hr; }
bool CvCaptureCAM_DShow::grabFrame() { return VI.isFrameNew(index); }