HRESULT STDMETHODCALLTYPE XnVideoSource::GetMode( IPin *pPin, __out long *Mode ) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pPin); XN_METHOD_CHECK_POINTER(Mode); HRESULT hr = S_OK; // we have only 1 pin, make sure this is it XnVideoStream* pVideoStream = dynamic_cast<XnVideoStream*>(GetPin(0)); if (pPin != static_cast<IPin*>(pVideoStream)) { XN_METHOD_RETURN(E_FAIL); } *Mode = 0; if (pVideoStream->GetMirror()) *Mode |= VideoControlFlag_FlipHorizontal; if (pVideoStream->GetVerticalFlip()) *Mode |= VideoControlFlag_FlipVertical; XN_METHOD_RETURN(S_OK); }
HRESULT STDMETHODCALLTYPE XnVideoSource::GetMaxAvailableFrameRate( IPin *pPin, long iIndex, SIZE Dimensions, __out LONGLONG *MaxAvailableFrameRate ) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pPin); XN_METHOD_CHECK_POINTER(MaxAvailableFrameRate); HRESULT hr = S_OK; // we have only 1 pin, make sure this is it XnVideoStream* pVideoStream = dynamic_cast<XnVideoStream*>(GetPin(0)); if (pPin != static_cast<IPin*>(pVideoStream)) { XN_METHOD_RETURN(E_FAIL); } AM_MEDIA_TYPE* pMediaType; VIDEO_STREAM_CONFIG_CAPS vscc; hr = pVideoStream->GetStreamCaps(iIndex, &pMediaType, (BYTE*)&vscc); if (FAILED(hr)) XN_METHOD_RETURN(hr); CoTaskMemFree(pMediaType); if (Dimensions.cx != vscc.MaxOutputSize.cx || Dimensions.cy != vscc.MaxOutputSize.cy) XN_METHOD_RETURN(E_FAIL); *MaxAvailableFrameRate = vscc.MaxFrameInterval; XN_METHOD_RETURN(S_OK); }
HRESULT XnVideoSource::GetCapRange(const XnChar* strCap, long *pMin, long *pMax, long *pSteppingDelta, long *pDefault, long *pCapsFlags) { XN_METHOD_START; if (strCap == NULL || !m_image.IsCapabilitySupported(strCap)) { XN_METHOD_RETURN(E_PROP_ID_UNSUPPORTED); } XN_METHOD_CHECK_POINTER(pMin); XN_METHOD_CHECK_POINTER(pMax); XN_METHOD_CHECK_POINTER(pSteppingDelta); XN_METHOD_CHECK_POINTER(pDefault); XN_METHOD_CHECK_POINTER(pCapsFlags); xn::GeneralIntCapability cap = m_image.GetGeneralIntCap(strCap); XnInt32 nMin, nMax, nStep, nDefault; XnBool bIsAutoSupported; cap.GetRange(nMin, nMax, nStep, nDefault, bIsAutoSupported); *pMin = nMin; *pMax = nMax; *pSteppingDelta = nStep; *pDefault = nDefault; *pCapsFlags = bIsAutoSupported ? 0x01 : 0x02; XN_METHOD_RETURN(S_OK); }
HRESULT XnVideoSource::GetCap(const XnChar* strCap, long *lValue, long *Flags) { XN_METHOD_START; if (strCap == NULL || !m_image.IsCapabilitySupported(strCap)) { XN_METHOD_RETURN(E_PROP_ID_UNSUPPORTED); } XN_METHOD_CHECK_POINTER(lValue); XN_METHOD_CHECK_POINTER(Flags); xn::GeneralIntCapability cap = m_image.GetGeneralIntCap(strCap); XnInt32 nVal = cap.Get(); if (nVal == XN_AUTO_CONTROL) { XnInt32 nMin, nMax, nStep, nDefault; XnBool bIsAutoSupported; cap.GetRange(nMin, nMax, nStep, nDefault, bIsAutoSupported); *Flags = 0x01; *lValue = nDefault; } else { *Flags = 0x02; *lValue = nVal; } XN_METHOD_RETURN(S_OK); }
HRESULT STDMETHODCALLTYPE XnVideoStream::GetNumberOfCapabilities(int *piCount, int *piSize) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(piCount); XN_METHOD_CHECK_POINTER(piSize); *piCount = m_aSupportedModes.GetSize(); *piSize = sizeof(VIDEO_STREAM_CONFIG_CAPS); XN_METHOD_RETURN(S_OK); }
//IUnknown STDMETHODIMP XnVideoStream::NonDelegatingQueryInterface(REFIID riid, void **ppv) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(ppv); HRESULT hr = S_OK; // Standard OLE stuff if(riid == IID_IAMStreamConfig) { xnDumpFileWriteString(m_Dump, "\tPin query interface to IAMStreamConfig\n"); hr = GetInterface(static_cast<IAMStreamConfig*>(this), ppv); } else if(riid == IID_IKsPropertySet) { xnDumpFileWriteString(m_Dump, "\tPin query interface to IKsPropertySet\n"); hr = GetInterface(static_cast<IKsPropertySet*>(this), ppv); } else if(riid == IID_ISpecifyPropertyPages) { xnDumpFileWriteString(m_Dump, "\tPin query interface to ISpecifyPropertyPages\n"); hr = GetInterface(static_cast<ISpecifyPropertyPages*>(this), ppv); } else { OLECHAR strGuid[40]; StringFromGUID2(riid, strGuid, 40); xnDumpFileWriteString(m_Dump, "\tPin query interface to %S\n", strGuid); hr = CSourceStream::NonDelegatingQueryInterface(riid, ppv); } XN_METHOD_RETURN(hr); }
HRESULT STDMETHODCALLTYPE XnVideoSource::GetCaps(IPin *pPin, long *pCapsFlags) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pPin); XN_METHOD_CHECK_POINTER(pCapsFlags); // we have only 1 pin, make sure this is it if (pPin != static_cast<IPin*>(GetPin(0))) { XN_METHOD_RETURN(E_FAIL); } *pCapsFlags = VideoControlFlag_FlipHorizontal | VideoControlFlag_FlipVertical; XN_METHOD_RETURN(S_OK); }
HRESULT STDMETHODCALLTYPE XnVideoSource::SetMode( IPin *pPin, long Mode ) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pPin); HRESULT hr = S_OK; // we have only 1 pin, make sure this is it XnVideoStream* pVideoStream = dynamic_cast<XnVideoStream*>(GetPin(0)); if (pPin != static_cast<IPin*>(pVideoStream)) { XN_METHOD_RETURN(E_FAIL); } xnLogVerbose(XN_MASK_FILTER, "Setting flip mode to %d", Mode); hr = pVideoStream->SetMirror(Mode & VideoControlFlag_FlipHorizontal); if (FAILED(hr)) XN_METHOD_RETURN(hr); hr = pVideoStream->SetVerticalFlip(Mode & VideoControlFlag_FlipVertical); if (FAILED(hr)) XN_METHOD_RETURN(hr); XN_METHOD_RETURN(S_OK); }
HRESULT STDMETHODCALLTYPE XnVideoSource::GetCurrentActualFrameRate( IPin *pPin, __out LONGLONG *ActualFrameRate ) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pPin); XN_METHOD_CHECK_POINTER(ActualFrameRate); HRESULT hr = S_OK; // we have only 1 pin, make sure this is it XnVideoStream* pVideoStream = dynamic_cast<XnVideoStream*>(GetPin(0)); if (pPin != static_cast<IPin*>(pVideoStream)) { XN_METHOD_RETURN(E_FAIL); } *ActualFrameRate = (LONGLONG)(10000000.0 / pVideoStream->GetCurrentFPS() + 0.5); XN_METHOD_RETURN(S_OK); }
HRESULT STDMETHODCALLTYPE XnVideoStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pmt); XN_METHOD_CHECK_POINTER(pSCC); xnDumpFileWriteString(m_Dump, "\tCalling %s for %d\n", __FUNCTION__, iIndex); CMediaType mediaType; VIDEO_STREAM_CONFIG_CAPS* pvscc = (VIDEO_STREAM_CONFIG_CAPS*)pSCC; HRESULT hr = GetStreamCapability(iIndex, mediaType, *pvscc); if (FAILED(hr)) XN_METHOD_RETURN(hr); xnDumpFileWriteString(m_Dump, "\tReturning %dx%d@%d using %s\n", m_aSupportedModes[iIndex].OutputMode.nXRes, m_aSupportedModes[iIndex].OutputMode.nYRes, m_aSupportedModes[iIndex].OutputMode.nFPS, xnPixelFormatToString(m_aSupportedModes[iIndex].Format)); *pmt = CreateMediaType(&mediaType); XN_METHOD_RETURN(S_OK); }
STDMETHODIMP XnVideoSource::GetGainRange(XnInt32 *pnMin, XnInt32* pnMax, XnInt32* pnStep, XnInt32* pnDefault, XnBool* pbAutoSupported) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pnMin); XN_METHOD_CHECK_POINTER(pnMax); XN_METHOD_CHECK_POINTER(pnStep); XN_METHOD_CHECK_POINTER(pnDefault); XN_METHOD_CHECK_POINTER(pbAutoSupported); if (!m_image.IsCapabilitySupported(XN_CAPABILITY_GAIN)) { XN_METHOD_RETURN(E_PROP_ID_UNSUPPORTED); } xn::GeneralIntCapability cap = m_image.GetGainCap(); cap.GetRange(*pnMin, *pnMax, *pnStep, *pnDefault, *pbAutoSupported); XN_METHOD_RETURN(S_OK); }
STDMETHODIMP XnVideoSource::GetPowerLineFrequency(XnPowerLineFrequency *pnValue) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pnValue); if (!m_image.IsCapabilitySupported(XN_CAPABILITY_ANTI_FLICKER)) { XN_METHOD_RETURN(E_PROP_ID_UNSUPPORTED); } *pnValue = m_image.GetAntiFlickerCap().GetPowerLineFrequency(); XN_METHOD_RETURN(S_OK); }
// // DecideBufferSize // // This will always be called after the format has been successfully // negotiated. So we have a look at m_mt to see what size image we agreed. // Then we can ask for buffers of the correct size to contain them. // HRESULT XnVideoStream::DecideBufferSize(IMemAllocator *pIMemAlloc, ALLOCATOR_PROPERTIES *pProperties) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pIMemAlloc); XN_METHOD_CHECK_POINTER(pProperties); CAutoLock cAutoLock(m_pFilter->pStateLock()); HRESULT hr = S_OK; VIDEOINFO *pvi = (VIDEOINFO*)m_mt.Format(); pProperties->cBuffers = 3; pProperties->cbBuffer = pvi->bmiHeader.biSizeImage; ASSERT(pProperties->cbBuffer); // Ask the allocator to reserve us some sample memory. NOTE: the function // can succeed (that is return NOERROR) but still not have allocated the // memory that we requested, so we must check we got whatever we wanted ALLOCATOR_PROPERTIES Actual; hr = pIMemAlloc->SetProperties(pProperties,&Actual); if(FAILED(hr)) { XN_METHOD_RETURN(hr); } // Is this allocator unsuitable if(Actual.cbBuffer < pProperties->cbBuffer) { XN_METHOD_RETURN(E_FAIL); } // Make sure that we have only 3 buffers ASSERT(Actual.cBuffers == 3); XN_METHOD_RETURN(NOERROR); }
HRESULT STDMETHODCALLTYPE XnVideoSource::GetFrameRateList( IPin *pPin, long iIndex, SIZE Dimensions, __out long *ListSize, __out LONGLONG **FrameRates ) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pPin); XN_METHOD_CHECK_POINTER(ListSize); HRESULT hr = S_OK; // we have only 1 pin, make sure this is it XnVideoStream* pVideoStream = dynamic_cast<XnVideoStream*>(GetPin(0)); if (pPin != static_cast<IPin*>(pVideoStream)) { XN_METHOD_RETURN(E_FAIL); } AM_MEDIA_TYPE* pMediaType; VIDEO_STREAM_CONFIG_CAPS vscc; hr = pVideoStream->GetStreamCaps(iIndex, &pMediaType, (BYTE*)&vscc); if (FAILED(hr)) XN_METHOD_RETURN(hr); CoTaskMemFree(pMediaType); if (Dimensions.cx != vscc.MaxOutputSize.cx || Dimensions.cy != vscc.MaxOutputSize.cy) XN_METHOD_RETURN(E_FAIL); // we return 1 frame rate for each mode (this is the OpenNI way...) *ListSize = 1; if (FrameRates != NULL) { *FrameRates = (LONGLONG*)CoTaskMemAlloc(sizeof(LONGLONG)* (*ListSize)); (*FrameRates)[0] = vscc.MaxFrameInterval; } XN_METHOD_RETURN(S_OK); }
// // CheckMediaType // // Returns E_INVALIDARG if the mediatype is not acceptable // HRESULT XnVideoStream::CheckMediaType(const CMediaType *pMediaType) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pMediaType); int index = FindCapability(*pMediaType); if (index == -1 || // not found m_nPreferredMode >= 0 && index != m_nPreferredMode) { XN_METHOD_RETURN(E_INVALIDARG); } XN_METHOD_RETURN(S_OK); }
STDMETHODIMP XnVideoStream::GetPages(CAUUID *pPages) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pPages); pPages->cElems = 1; pPages->pElems = reinterpret_cast<GUID*>(CoTaskMemAlloc(sizeof(GUID)*pPages->cElems)); if (pPages->pElems == NULL) { XN_METHOD_RETURN(E_OUTOFMEMORY); } pPages->pElems[0] = CLSID_VideoStreamConfigPropertyPage; XN_METHOD_RETURN(S_OK); }
STDMETHODIMP XnVideoSource::GetLowLightCompensation(XnBool *pbValue) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pbValue); if (!m_image.IsCapabilitySupported(XN_CAPABILITY_LOW_LIGHT_COMPENSATION)) { XN_METHOD_RETURN(E_PROP_ID_UNSUPPORTED); } xn::GeneralIntCapability cap = m_image.GetLowLightCompensationCap(); *pbValue = (XnBool)cap.Get(); XN_METHOD_RETURN(S_OK); }
STDMETHODIMP XnVideoSource::GetGain(XnInt32 *pnValue) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pnValue); if (!m_image.IsCapabilitySupported(XN_CAPABILITY_GAIN)) { XN_METHOD_RETURN(E_PROP_ID_UNSUPPORTED); } xn::GeneralIntCapability cap = m_image.GetGainCap(); *pnValue = cap.Get(); XN_METHOD_RETURN(S_OK); }
STDMETHODIMP XnVideoSource::GetPages(CAUUID *pPages) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pPages); pPages->cElems = 3; pPages->pElems = reinterpret_cast<GUID*>(CoTaskMemAlloc(sizeof(GUID)*pPages->cElems)); if (pPages->pElems == NULL) { XN_METHOD_RETURN(E_OUTOFMEMORY); } pPages->pElems[0] = CLSID_VideoProcAmpPropertyPage; pPages->pElems[1] = CLSID_CameraControlPropertyPage; pPages->pElems[2] = CLSID_AdditionalOpenNIControlsPropertyPage; XN_METHOD_RETURN(S_OK); }
STDMETHODIMP XnVideoSource::GetLowLightCompensationDefault(XnBool* pbValue) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pbValue); if (!m_image.IsCapabilitySupported(XN_CAPABILITY_LOW_LIGHT_COMPENSATION)) { XN_METHOD_RETURN(E_PROP_ID_UNSUPPORTED); } XnInt32 nMin, nMax, nStep, nDefault; XnBool bAutoSupported; m_image.GetLowLightCompensationCap().GetRange(nMin, nMax, nStep, nDefault, bAutoSupported); *pbValue = nDefault; XN_METHOD_RETURN(S_OK); }
HRESULT STDMETHODCALLTYPE XnVideoStream::SetFormat(AM_MEDIA_TYPE *pmt) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pmt); if (pmt == NULL) { XN_METHOD_RETURN(E_INVALIDARG); } xnLogVerbose(XN_MASK_FILTER, "SetFormat was called"); // check if this format is supported CMediaType mediaType(*pmt); int index = FindCapability(mediaType); if (index == -1) { XN_METHOD_RETURN(VFW_E_INVALIDMEDIATYPE); } // keep previous one (so we can rollback) int prevPreferred = m_nPreferredMode; // set the preferred mode m_nPreferredMode = index; // try to reconnect (if needed) IPin* pin; ConnectedTo(&pin); if (pin) { IFilterGraph *pGraph = ((XnVideoSource*)m_pFilter)->GetGraph(); HRESULT hr = pGraph->Reconnect(this); if (FAILED(hr)) { // rollback m_nPreferredMode = prevPreferred; XN_METHOD_RETURN(hr); } } XN_METHOD_RETURN(S_OK); }
HRESULT STDMETHODCALLTYPE XnVideoStream::GetFormat(AM_MEDIA_TYPE **ppmt) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(ppmt); if (IsConnected()) { *ppmt = CreateMediaType(&m_mt); } else { int iIndex = (m_nPreferredMode >= 0) ? m_nPreferredMode : 0; CMediaType mediaType; VIDEO_STREAM_CONFIG_CAPS vscc; GetStreamCapability(iIndex, mediaType, vscc); *ppmt = CreateMediaType(&mediaType); } XN_METHOD_RETURN(S_OK); }
// // GetMediaType // // Preferred types should be ordered by quality, with zero as highest quality. // HRESULT XnVideoStream::GetMediaType(int iPosition, __inout CMediaType *pMediaType) { XN_METHOD_START; HRESULT hr = S_OK; XN_METHOD_CHECK_POINTER(pMediaType); if(iPosition < 0) { XN_METHOD_RETURN(E_INVALIDARG); } if (m_nPreferredMode >= 0) { // Once a mode was set using IAMStreamConfig::SetFormat, this should be the only mode if (iPosition == 0) { VIDEO_STREAM_CONFIG_CAPS vscc; hr = GetStreamCapability(m_nPreferredMode, *pMediaType, vscc); XN_METHOD_RETURN(hr); } else { XN_METHOD_RETURN(VFW_S_NO_MORE_ITEMS); } } else { // Have we run off the end of types? if (iPosition > int(m_aSupportedModes.GetSize())) { XN_METHOD_RETURN(VFW_S_NO_MORE_ITEMS); } VIDEO_STREAM_CONFIG_CAPS vscc; hr = GetStreamCapability(iPosition, *pMediaType, vscc); XN_METHOD_RETURN(hr); } }
HRESULT XnVideoStream::SetMediaType(const CMediaType* pMediaType) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pMediaType); Mode mode = MediaTypeToMode(*pMediaType); XnStatus nRetVal = m_imageGen.SetMapOutputMode(mode.OutputMode); if (nRetVal != XN_STATUS_OK) { XN_METHOD_RETURN(E_FAIL); } nRetVal = m_imageGen.SetPixelFormat(mode.Format); if (nRetVal != XN_STATUS_OK) { XN_METHOD_RETURN(E_FAIL); } HRESULT hr = CSourceStream::SetMediaType(pMediaType); XN_METHOD_RETURN(hr); }
STDMETHODIMP XnVideoSource::NonDelegatingQueryInterface(REFIID riid, void **ppv) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(ppv); HRESULT hr = S_OK; if (riid == IID_ISpecifyPropertyPages) { xnDumpFileWriteString(m_Dump, "Filter query interface to ISpecifyPropertyPages\n"); hr = GetInterface(static_cast<ISpecifyPropertyPages*>(this), ppv); } else if (riid == IID_IAMVideoControl) { xnDumpFileWriteString(m_Dump, "Filter query interface to IAMVideoControl\n"); hr = GetInterface(static_cast<IAMVideoControl*>(this), ppv); } else if (riid == IID_IAMVideoProcAmp) { xnDumpFileWriteString(m_Dump, "Filter query interface to IAMVideoProcAmp\n"); if (m_pVideoProcAmp == NULL) { m_pVideoProcAmp = new VideoProcAmp(this); if (m_pVideoProcAmp == NULL) { XN_METHOD_RETURN(E_OUTOFMEMORY); } } hr = GetInterface(static_cast<IAMVideoProcAmp*>(m_pVideoProcAmp), ppv); } else if (riid == IID_IAMCameraControl) { xnDumpFileWriteString(m_Dump, "Filter query interface to IAMCameraControl\n"); if (m_pCameraControl == NULL) { m_pCameraControl = new CameraControl(this); if (m_pCameraControl == NULL) { XN_METHOD_RETURN(E_OUTOFMEMORY); } } hr = GetInterface(static_cast<IAMCameraControl*>(m_pCameraControl), ppv); } else if (riid == IID_IAdditionalOpenNIControls) { xnDumpFileWriteString(m_Dump, "Filter query interface to IAdditionalControls\n"); hr = GetInterface(static_cast<IAdditionalControls*>(this), ppv); } else { OLECHAR strGuid[40]; StringFromGUID2(riid, strGuid, 40); xnDumpFileWriteString(m_Dump, "Filter query interface to %S\n", strGuid); hr = CSource::NonDelegatingQueryInterface(riid, ppv); } XN_METHOD_RETURN(hr); }
HRESULT XnVideoStream::FillBuffer(IMediaSample *pms) { XN_METHOD_START; XN_METHOD_CHECK_POINTER(pms); if (!m_imageGen.IsGenerating()) { XN_METHOD_RETURN(E_UNEXPECTED); } VIDEOINFOHEADER* videoInfo = (VIDEOINFOHEADER*)m_mt.Format(); bool bUpsideDown = videoInfo->bmiHeader.biHeight > 0; if (m_bFlipVertically) { bUpsideDown = !bUpsideDown; } BYTE *pData; long lDataLen; pms->GetPointer(&pData); lDataLen = pms->GetSize(); { CAutoLock cAutoLock(m_pFilter->pStateLock()); XnStatus nRetVal = XN_STATUS_OK; // ignore timeouts for(;;) { nRetVal = m_imageGen.WaitAndUpdateData(); if (nRetVal != XN_STATUS_WAIT_DATA_TIMEOUT) { break; } else { xnDumpFileWriteString(m_Dump, "\tTimeout during FillBuffer\n"); } } if (nRetVal != XN_STATUS_OK) XN_METHOD_RETURN(E_UNEXPECTED); } xn::ImageMetaData imageMD; m_imageGen.GetMetaData(imageMD); if (imageMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24) { const XnRGB24Pixel* pImage = imageMD.RGB24Data(); if (bUpsideDown) { // convert from left-to-right top-to-bottom RGB to left-to-right bottom-to-top BGR pImage += imageMD.XRes() * imageMD.YRes() - 1; for (XnUInt32 y = 0; y < imageMD.YRes(); ++y) { for (XnUInt32 x = 0; x < imageMD.XRes(); ++x, pImage -=1, pData += 3) { // translate RGB to BGR pData[0] = pImage->nBlue; pData[1] = pImage->nGreen; pData[2] = pImage->nRed; } } } else { for (XnUInt32 y = 0; y < imageMD.YRes(); ++y) { for (XnUInt32 x = 0; x < imageMD.XRes(); ++x, pImage += 1, pData += 3) { // translate RGB to BGR pData[0] = pImage->nBlue; pData[1] = pImage->nGreen; pData[2] = pImage->nRed; } } } } else if (imageMD.PixelFormat() == XN_PIXEL_FORMAT_MJPEG) { memcpy(pData, imageMD.Data(), imageMD.DataSize()); pms->SetActualDataLength(imageMD.DataSize()); } else { xnLogError(XN_MASK_FILTER, "Unsupported pixel format!"); XN_METHOD_RETURN(E_UNEXPECTED); } // The current time is the sample's start // CRefTime rtStart = m_rtSampleTime; // Increment to find the finish time // m_rtSampleTime += (LONG)m_iRepeatTime; // pms->SetTime((REFERENCE_TIME *) &rtStart,(REFERENCE_TIME *) &m_rtSampleTime); pms->SetSyncPoint(TRUE); xnFPSMarkFrame(&m_FPS); XN_METHOD_RETURN(NOERROR); }