HRESULT MyThresholdDxFilter::OnEffectSetting(ID3DX11Effect* pEffect, void* self) { if (pEffect == NULL || self == NULL ) return E_FAIL; HRESULT hr = S_OK; MyThresholdDxFilter* pSelf = (MyThresholdDxFilter*)(GSDXFilterBase*)self; if (pSelf->m_pD3DDisplay == NULL || pSelf->m_pInTextureList.size() <= 0) return E_FAIL; GSAutoLock lck2(pSelf->m_pInTextureList[0]->GetGSCritSec()); IGSCamera* pCamera = pSelf->m_pD3DDisplay->GetCamera(); if (pCamera == NULL) return E_FAIL; CAutoLock lck1(&pSelf->m_csRenderPara); ID3D11ShaderResourceView* pInTex = NULL; hr = pSelf->m_pInTextureList[0]->GetShaderResourceView(pInTex); if (pInTex == NULL) return E_FAIL; int _sampleType = 1; //linear sampler float threshold = pSelf->m_fThreshold; D3DXMATRIX matView = pCamera->GetViewMatrix(); D3DXMATRIX matProj = pCamera->GetProjMatrix(); D3DXMATRIX matWorld; D3DXMatrixIdentity(&matWorld); D3DXMATRIX matWorldViewProj = matWorld * matView * matProj; GSEffectSettingEntry fxEntries[] = { {"WorldViewProj", MATRIX, (void*)&matWorldViewProj, 0, sizeof(matWorldViewProj)}, {"g_sampleType", SCALAR, (void*)&_sampleType, 0, sizeof(_sampleType)}, {"g_Texture", RESOURCE, (void*)pInTex, 0, 0}, {"g_threshold", SCALAR, (void*)&threshold, 0, sizeof(threshold)} }; hr = pSelf->_SetEffectPara(pEffect, fxEntries, ARRAYSIZE(fxEntries)); return hr; }
STDMETHODIMP CTransformFilter::Stop() { CAutoLock lck1(&m_csFilter); if (m_State == State_Stopped) { return NOERROR; } // Succeed the Stop if we are not completely connected ASSERT(m_pInput == NULL || m_pOutput != NULL); if (m_pInput == NULL || m_pInput->IsConnected() == FALSE || m_pOutput->IsConnected() == FALSE) { m_State = State_Stopped; m_bEOSDelivered = FALSE; return NOERROR; } ASSERT(m_pInput); ASSERT(m_pOutput); // decommit the input pin before locking or we can deadlock m_pInput->Inactive(); // synchronize with Receive calls CAutoLock lck2(&m_csReceive); m_pOutput->Inactive(); // allow a class derived from CTransformFilter // to know about starting and stopping streaming HRESULT hr = StopStreaming(); if (SUCCEEDED(hr)) { // complete the state transition m_State = State_Stopped; m_bEOSDelivered = FALSE; } return hr; }
STDMETHODIMP CudaDecodeFilter::Stop() { CAutoLock lck1(&m_cStateLock); if (m_State == State_Stopped) { return NOERROR; } // Succeed the Stop if we are not completely connected ASSERT(m_CudaDecodeInputPin == NULL || m_paStreams[0] != NULL); if (m_CudaDecodeInputPin == NULL || m_CudaDecodeInputPin->IsConnected() == FALSE || m_paStreams[0]->IsConnected() == FALSE) { m_State = State_Stopped; m_EOSDelivered = FALSE; return NOERROR; } // Important!!! Refuse to receive any more samples m_MediaController->FlushAllPending(); // decommit the input pin before locking or we can deadlock m_CudaDecodeInputPin->Inactive(); // synchronize with Receive calls CAutoLock lck2(&m_csReceive); OutputPin()->BeginFlush(); OutputPin()->Inactive(); OutputPin()->EndFlush(); // allow a class derived from CTransformFilter // to know about starting and stopping streaming HRESULT hr = StopStreaming(); if (SUCCEEDED(hr)) { // complete the state transition m_State = State_Stopped; m_EOSDelivered = FALSE; } return hr; }
HRESULT SaveFrameFilter::myTransform(void* self, IMediaSample *pInSample, CMediaType* pInMT, IMediaSample *pOutSample, CMediaType* pOutMT) { if (self == NULL || pInSample == NULL || pInMT == NULL || pOutSample == NULL || pOutMT == NULL) { return E_FAIL; } SaveFrameFilter* pSelf = (SaveFrameFilter*)(GSMuxFilter*)self; if (IsEqualGUID(*pInMT->Type(), *pOutMT->Type()) && IsEqualGUID(*pInMT->Subtype(), *pOutMT->Subtype()) && pInMT->IsTemporalCompressed() == pOutMT->IsTemporalCompressed()) { if (pInMT->FormatType() == NULL || pOutMT->FormatType() == NULL || !IsEqualGUID(*pInMT->FormatType(), *pOutMT->FormatType())) { return E_FAIL; } if (IsEqualGUID(*pInMT->FormatType(), FORMAT_VideoInfo)) { VIDEOINFOHEADER* pInFormat = (VIDEOINFOHEADER*)pInMT->Format(); VIDEOINFOHEADER* pOutFormat = (VIDEOINFOHEADER*)pOutMT->Format(); if (pInFormat == NULL || pOutFormat == NULL) return E_FAIL; if (pInFormat->bmiHeader.biWidth != pOutFormat->bmiHeader.biWidth || pInFormat->bmiHeader.biHeight != pOutFormat->bmiHeader.biHeight) { return E_FAIL; } } else { return E_FAIL; } int camChannel; GUID guidSubType = pInMT->subtype; if (IsEqualGUID(guidSubType, MEDIASUBTYPE_RGB24)) { camChannel = 3; } else if(IsEqualGUID(guidSubType, MEDIASUBTYPE_RGB32) || IsEqualGUID(guidSubType, MEDIASUBTYPE_ARGB32)) { camChannel = 4; } BYTE* pInBuffer = NULL; BYTE* pOutBuffer = NULL; pInSample->GetPointer(&pInBuffer); pOutSample->GetPointer(&pOutBuffer); if (pInBuffer == NULL || pOutBuffer == NULL) return E_FAIL; //memcpy((void*)pOutBuffer, (void*)pInBuffer, pOutSample->GetSize()); VIDEOINFOHEADER* pInFormat = (VIDEOINFOHEADER*)pInMT->Format(); IplImage* cvImgSrc = cvCreateImageHeader(cvSize(pInFormat->bmiHeader.biWidth , pInFormat->bmiHeader.biHeight), 8, camChannel); cvImgSrc->imageData = (char*)pInBuffer; IplImage* cvImgDst = cvCreateImageHeader(cvSize(pInFormat->bmiHeader.biWidth , pInFormat->bmiHeader.biHeight), 8, camChannel); cvImgDst->imageData = (char*)pOutBuffer; cvCopy(cvImgSrc,cvImgDst); CAutoLock lck1(&pSelf->m_csRenderPara); if(pSelf->m_bIsSave) { char filename[MAX_PATH]; sprintf(filename,"C://SaveFrame//%s_%d.bmp",pSelf->m_saveFrameName,pSelf->m_nFrameCnt); if(pSelf->m_bIsTouch) { cvCircle(cvImgSrc,cvPoint(cvImgSrc->width/2,cvImgSrc->height/2),20,CV_RGB(255,255,255),5); } cvSaveImage(filename,cvImgSrc); } if(pSelf->m_bIsOneTouch) { char filename2[MAX_PATH]; sprintf(filename2,"C://SaveFrame//One_%s_%d.bmp",pSelf->m_saveFrameName,pSelf->m_nFrameCnt); if(pSelf->m_bIsTouch) { int rectSize = 20; cvRectangle(cvImgSrc,cvPoint(cvImgSrc->width/2,cvImgSrc->height/2),cvPoint(cvImgSrc->width/2+rectSize,cvImgSrc->height/2+rectSize),CV_RGB(255,255,255),5); } cvSaveImage(filename2,cvImgSrc); pSelf->m_bIsOneTouch = false; } cvReleaseImageHeader(&cvImgSrc); cvReleaseImageHeader(&cvImgDst); pSelf->m_nFrameCnt++; return S_OK; } else { return E_FAIL; } }