///////////////////////////////////////////// // IStream methods STDMETHODIMP KinectAudioStream::Read(void *pBuffer, ULONG cbBuffer, ULONG *pcbRead) { HRESULT hr = S_OK; if (pcbRead == NULL) { return E_INVALIDARG; } ULONG bytesPendingToRead = cbBuffer; while (bytesPendingToRead > 0 && IsCapturing()) { ReadOneBuffer((BYTE**)&pBuffer, &bytesPendingToRead); if (NULL == m_CurrentReadBuffer) //no data, wait ... { WaitForSingleObject(m_hDataReady, INFINITE); } } ULONG bytesRead = cbBuffer - bytesPendingToRead; m_BytesRead += bytesRead; *pcbRead = bytesRead; return hr; }
void GSplitter::OnMouseClick(GMouse &m) { if (m.Down()) { if (OverSplit(m.x, m.y)) { Capture(true); if (d->Vertical) { d->Bar.x1 = d->PosA.x2; d->Bar.y1 = d->PosA.y1; d->Bar.x2 = d->PosB.x1; d->Bar.y2 = d->PosA.y2; d->Offset = m.x - d->Bar.x1; } else { d->Bar.x1 = d->PosA.x1; d->Bar.y1 = d->PosA.y2; d->Bar.x2 = d->PosA.x2; d->Bar.y2 = d->PosB.y1; d->Offset = m.y - d->Bar.y1; } } } else if (IsCapturing()) { Capture(false); } }
/** * Handles "Binning" property. */ int CGigECamera::OnBinning( MM::PropertyBase* pProp, MM::ActionType eAct ) { int ret = DEVICE_OK; switch(eAct) { case MM::AfterSet: { if(IsCapturing()) return DEVICE_CAMERA_BUSY_ACQUIRING; // try to set the vertical and horizontal binning long binFactor; pProp->Get( binFactor ); if( nodes->isAvailable( BINNING_VERTICAL ) ) ret |= SetProperty( MM::g_Keyword_Binning_Vertical, CDeviceUtils::ConvertToString( binFactor ) ); if( nodes->isAvailable( BINNING_HORIZONTAL ) ) ret |= SetProperty( MM::g_Keyword_Binning_Horizontal, CDeviceUtils::ConvertToString( binFactor ) ); } break; case MM::BeforeGet: { // the user is requesting the current value for the property, so // either ask the 'hardware' or let the system return the value // cached in the property. ret=DEVICE_OK; } break; } return ret; } // OnBinning
void GSplitter::OnMouseMove(GMouse &m) { if (IsCapturing()) { if (d->Vertical) { d->Bar.Offset((m.x - d->Offset) - d->Bar.x1, 0); } else { d->Bar.Offset(0, (m.y - d->Offset) - d->Bar.y1); } int NewPos; if (d->Vertical) { NewPos = limit(d->Bar.x1-4, 4, X()-18); } else { NewPos = limit(d->Bar.y1, 4, Y()-18); } if (NewPos != d->SplitPos) { d->SplitPos = NewPos; CalcRegions(); Invalidate((GRect*)0, true); } } }
void AviVideoCapturing::RenderFrame() { if (IsCapturing()) { if (!aviGenerator->readOpenglPixelDataThreaded()) { StopCapturing(); } // LOG("Saved avi frame size %i %i", ih->biWidth, ih->biHeight); } }
void AviVideoCapturing::StopCapturing() { if (IsCapturing()) { capturing = false; globalRendering->isVideoCapturing = false; spring::SafeDelete(aviGenerator); //delete aviGenerator; //aviGenerator = NULL; } }
void GSlider::OnMouseMove(GMouse &m) { if (IsCapturing()) { int Rx = X() - 6; if (Rx > 0 && Max >= Min) { int x = m.x - Tx; int v = x * (Max-Min) / Rx; Value(v); } } }
void HP_IOCycleTelemetry::StopCapturing() { if(IsCapturing()) { CAGuard::Locker theItemGuard(mItemGuard); // toss the buffers for(UInt32 theBufferIndex = 0; theBufferIndex < kNumberBuffers; ++theBufferIndex) { delete mItemBuffers[theBufferIndex]; mItemBuffers[theBufferIndex] = NULL; } // set up the counters mCurrentWriteBuffer = 0; mCurrentWriteItem = 0; mCurrentReadBuffer = 0; } }
void HP_IOCycleTelemetry::StartCapturing() { if(!IsCapturing()) { CAGuard::Locker theItemGuard(mItemGuard); // allocate the buffers for(UInt32 theBufferIndex = 0; theBufferIndex < kNumberBuffers; ++theBufferIndex) { mItemBuffers[theBufferIndex] = new CAHALIOCycleRawTelemetryEvent[kNumberItemsPerBuffer]; memset(mItemBuffers[theBufferIndex], 0, kNumberItemsPerBuffer * sizeof(CAHALIOCycleRawTelemetryEvent)); } // set up the counters mCurrentWriteBuffer = 0; mCurrentWriteItem = 0; mCurrentReadBuffer = 0; } }
HRESULT CCapture::CheckDeviceLost(DEV_BROADCAST_HDR *pHdr, BOOL *pbDeviceLost) { if (pbDeviceLost == NULL) { return E_POINTER; } EnterCriticalSection(&m_critsec); DEV_BROADCAST_DEVICEINTERFACE *pDi = NULL; *pbDeviceLost = FALSE; if (!IsCapturing()) { goto done; } if (pHdr == NULL) { goto done; } if (pHdr->dbch_devicetype != DBT_DEVTYP_DEVICEINTERFACE) { goto done; } // Compare the device name with the symbolic link. pDi = (DEV_BROADCAST_DEVICEINTERFACE*)pHdr; if (m_pwszSymbolicLink) { if (_wcsicmp(m_pwszSymbolicLink, pDi->dbcc_name) == 0) { *pbDeviceLost = TRUE; } } done: LeaveCriticalSection(&m_critsec); return S_OK; }
int CGigECamera::OnBinningV( MM::PropertyBase* pProp, MM::ActionType eAct ) { int ret = DEVICE_OK; switch(eAct) { case MM::AfterSet: { if(IsCapturing()) return DEVICE_CAMERA_BUSY_ACQUIRING; long binFactor; pProp->Get( binFactor ); int64_t oldBin, oldh; nodes->get( oldBin, BINNING_VERTICAL ); nodes->get( oldh, HEIGHT ); if( binFactor != (long) oldBin ) { bool retval = nodes->set( binFactor, BINNING_VERTICAL ); if( retval == false ) { // set it back nodes->set( oldBin, BINNING_VERTICAL ); pProp->Set( (long) oldBin ); ret = DEVICE_INVALID_PROPERTY_VALUE; } else { // new limits int64_t high, low; high = nodes->getMax( HEIGHT ); low = nodes->getMin( HEIGHT ); SetPropertyLimits( MM::g_Keyword_Image_Height, (double) low, (double) high ); // new height int64_t dim; nodes->get( dim, HEIGHT ); if( dim == oldh ) // this camera doesn't auto-adjust its height w/ binning change { dim = dim * oldBin / binFactor; } SetProperty( MM::g_Keyword_Image_Height, CDeviceUtils::ConvertToString( (long) dim ) ); UpdateProperty( MM::g_Keyword_Image_Height ); LogMessage( (std::string) "setting v bin to " + boost::lexical_cast<std::string>( binFactor ) + " and height to " + boost::lexical_cast<std::string>( dim ) + " (oldBin: " + boost::lexical_cast<std::string>( oldBin ) + ") " + " new limits (" + boost::lexical_cast<std::string>( low ) + + " " + boost::lexical_cast<std::string>( high ) + ")", true ); if( nodes->isAvailable( HEIGHT_MAX ) ) { UpdateProperty( MM::g_Keyword_Image_Height_Max ); } OnPropertiesChanged(); ret = DEVICE_OK; } ResizeImageBuffer(); } } break; case MM::BeforeGet: { int64_t vBin; nodes->get( vBin, BINNING_VERTICAL ); pProp->Set( (long) vBin ); ret=DEVICE_OK; } break; } return ret; } // OnBinningV
/** * Handles "PixelType" property. */ int CGigECamera::OnPixelType(MM::PropertyBase* pProp, MM::ActionType eAct) { int ret = DEVICE_OK; switch(eAct) { case MM::AfterSet: { if(IsCapturing()) return DEVICE_CAMERA_BUSY_ACQUIRING; std::string displayName, pixelType; pProp->Get( displayName ); std::map<std::string, std::string>::iterator it = pixelFormatMap.find( displayName ); if( it == pixelFormatMap.end() ) { LogMessage( (std::string) "internal error: inconsistency in pixel type map (" + pixelType + ")", false ); return DEVICE_INTERNAL_INCONSISTENCY; } pixelType = it->second; if( pixelType.compare(g_PixelType_8bit) == 0 // || pixelType.compare(g_PixelType_8bitSigned) == 0 ) { if( nodes->set( pixelType, PIXEL_FORMAT ) ) { img_.Resize(img_.Width(), img_.Height(), 1); bitDepth_ = 8; ret=DEVICE_OK; } else ret = DEVICE_INVALID_PROPERTY_VALUE; } else if( pixelType.compare(g_PixelType_10bit) == 0 // || pixelType.compare(g_PixelType_10bitPacked) == 0 ) { if( nodes->set( pixelType, PIXEL_FORMAT ) ) { img_.Resize(img_.Width(), img_.Height(), 2); bitDepth_ = 10; ret=DEVICE_OK; } else ret = DEVICE_INVALID_PROPERTY_VALUE; } else if( pixelType.compare( g_PixelType_12bit ) == 0 // || pixelType.compare(g_PixelType_12bitPacked) == 0 ) { if( nodes->set( pixelType, PIXEL_FORMAT ) ) { img_.Resize(img_.Width(), img_.Height(), 2); bitDepth_ = 12; ret=DEVICE_OK; } else ret = DEVICE_INVALID_PROPERTY_VALUE; } else if( pixelType.compare( g_PixelType_14bit ) == 0 ) { if( nodes->set( pixelType, PIXEL_FORMAT ) ) { img_.Resize(img_.Width(), img_.Height(), 2); bitDepth_ = 14; ret=DEVICE_OK; } else ret = DEVICE_INVALID_PROPERTY_VALUE; } else if( pixelType.compare( g_PixelType_16bit ) == 0 ) { if( nodes->set( pixelType, PIXEL_FORMAT ) ) { img_.Resize(img_.Width(), img_.Height(), 2); bitDepth_ = 16; ret=DEVICE_OK; } else ret = DEVICE_INVALID_PROPERTY_VALUE; } else { ret = ERR_UNKNOWN_MODE; } if( ret != DEVICE_OK ) { // on error switch to default pixel type nodes->set( g_PixelType_8bit, PIXEL_FORMAT ); bitDepth_ = 8; img_.Resize( img_.Width(), img_.Height(), 1 ); pProp->Set( g_PixelType_8bit ); OnPropertiesChanged(); } } break; case MM::BeforeGet: { std::string s; nodes->get( s, PIXEL_FORMAT ); std::map<std::string, std::string>::iterator it = pixelFormatMap.find( s ); if( it != pixelFormatMap.end() ) pProp->Set( it->second.c_str() ); else pProp->Set( s.c_str() ); ret=DEVICE_OK; } break; } return ret; }
int CGigECamera::OnBinningH( MM::PropertyBase* pProp, MM::ActionType eAct ) { int ret = DEVICE_OK; switch(eAct) { case MM::AfterSet: { if(IsCapturing()) return DEVICE_CAMERA_BUSY_ACQUIRING; long binFactor; pProp->Get( binFactor ); int64_t oldBin, oldw; nodes->get( oldBin, BINNING_HORIZONTAL ); nodes->get( oldw, WIDTH ); if( binFactor != (long) oldBin ) { bool retval = nodes->set( (int64_t) binFactor, BINNING_HORIZONTAL ); if( retval == false ) { // set it back nodes->set( oldBin, BINNING_HORIZONTAL ); pProp->Set( (long) oldBin ); ret = DEVICE_INVALID_PROPERTY_VALUE; } else { int64_t dim; nodes->get( dim, WIDTH ); if( dim == oldw ) { dim = dim * oldBin / binFactor; } SetProperty( MM::g_Keyword_Image_Width, CDeviceUtils::ConvertToString( (long) dim ) ); int64_t high, low; high = nodes->getMax( WIDTH ); low = nodes->getMin( WIDTH ); SetPropertyLimits( MM::g_Keyword_Image_Width, (double) low, (double) high ); UpdateProperty( MM::g_Keyword_Image_Width ); LogMessage( (std::string) "setting h bin to " + boost::lexical_cast<std::string>( binFactor ) + " and width to " + boost::lexical_cast<std::string>( dim ) + " (oldBin: " + boost::lexical_cast<std::string>( oldBin ) + ") " + " new limits (" + boost::lexical_cast<std::string>( low ) + + " " + boost::lexical_cast<std::string>( high ) + ")", true ); if( nodes->isAvailable( WIDTH_MAX ) ) { UpdateProperty( MM::g_Keyword_Image_Width_Max ); } OnPropertiesChanged(); ret = DEVICE_OK; } ResizeImageBuffer(); } } break; case MM::BeforeGet: { int64_t hBin; nodes->get( hBin, BINNING_HORIZONTAL ); pProp->Set( (long) hBin ); ret=DEVICE_OK; } break; } return ret; } // OnBinningH
HRESULT CCapture::OnReadSample( HRESULT hrStatus, DWORD /*dwStreamIndex*/, DWORD /*dwStreamFlags*/, LONGLONG llTimeStamp, IMFSample *pSample // Can be NULL ) { EnterCriticalSection(&m_critsec); if (!IsCapturing()) { LeaveCriticalSection(&m_critsec); return S_OK; } HRESULT hr = S_OK; if (FAILED(hrStatus)) { hr = hrStatus; goto done; } if (pSample) { if (m_bFirstSample) { m_llBaseTime = llTimeStamp; m_bFirstSample = FALSE; } // rebase the time stamp llTimeStamp -= m_llBaseTime; hr = pSample->SetSampleTime(llTimeStamp); if (FAILED(hr)) { goto done; } hr = m_pWriter->WriteSample(0, pSample); if (FAILED(hr)) { goto done; } } // Read another sample. hr = m_pReader->ReadSample( (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, // actual NULL, // flags NULL, // timestamp NULL // sample ); done: if (FAILED(hr)) { NotifyError(hr); } LeaveCriticalSection(&m_critsec); return hr; }