STDMETHODIMP CInternalPropertyPage::Show(UINT nCmdShow) { AFX_MANAGE_STATE(AfxGetStaticModuleState()); CAutoLock cAutoLock(this); if (!m_pWnd) { return E_UNEXPECTED; } if ((nCmdShow != SW_SHOW) && (nCmdShow != SW_SHOWNORMAL) && (nCmdShow != SW_HIDE)) { return E_INVALIDARG; } m_pWnd->ShowWindow(nCmdShow); m_pWnd->Invalidate(); return S_OK; }
STDMETHODIMP CStreamSwitcherFilter::Info(long lIndex, AM_MEDIA_TYPE** ppmt, DWORD* pdwFlags, LCID* plcid, DWORD* pdwGroup, WCHAR** ppszName, IUnknown** ppObject, IUnknown** ppUnk) { CAutoLock cAutoLock(&m_csPins); CBasePin* pPin = GetConnectedInputPin(lIndex); if (!pPin) { return E_INVALIDARG; } if (ppmt) { *ppmt = CreateMediaType(&m_pOutput->CurrentMediaType()); } if (pdwFlags) { *pdwFlags = (m_pInput == pPin) ? AMSTREAMSELECTINFO_EXCLUSIVE : 0; } if (plcid) { *plcid = 0; } if (pdwGroup) { *pdwGroup = 0; } if (ppszName) { *ppszName = (WCHAR*)CoTaskMemAlloc((wcslen(pPin->Name()) + 1) * sizeof(WCHAR)); if (*ppszName) { wcscpy_s(*ppszName, wcslen(pPin->Name()) + 1, pPin->Name()); } } if (ppObject) { *ppObject = NULL; } if (ppUnk) { *ppUnk = NULL; } return S_OK; }
// Ask for buffers of the size appropriate to the agreed media type HRESULT my12doomSourceStream::DecideBufferSize(IMemAllocator *pIMemAlloc, ALLOCATOR_PROPERTIES *pProperties) { if (pIMemAlloc == NULL) return E_POINTER; if (pProperties == NULL) return E_POINTER; CAutoLock cAutoLock(m_pFilter->pStateLock()); pProperties->cBuffers = 1; if (m_stream->CBR) pProperties->cbBuffer = m_stream->CBR; else pProperties->cbBuffer = m_stream->max_packet_size; ASSERT(pProperties->cbBuffer); // Ask the allocator to reserve us some sample memory, NOTE the function // can succeed (that is return NOERROR) but still not have allocated the // memory that we requested, so we must check we got whatever we wanted ALLOCATOR_PROPERTIES Actual; HRESULT hr = pIMemAlloc->SetProperties(pProperties,&Actual); if(FAILED(hr)) { return hr; } // Is this allocator unsuitable if(Actual.cbBuffer < pProperties->cbBuffer) { return E_FAIL; } // Make sure that we have only 1 buffer (we erase the ball in the // old buffer to save having to zero a 200k+ buffer every time // we draw a frame) ASSERT(Actual.cBuffers == 1); return NOERROR; }
// // GetMediaType // // Returns the supported media types in order of preferred types (starting with iPosition=0) // HRESULT CPushPinAudio::GetMediaType(int iPosition, CMediaType *pmt) { CheckPointer(pmt,E_POINTER); CAutoLock cAutoLock(m_pFilter->pStateLock()); if(iPosition < 0) return E_INVALIDARG; // Do we have more items to offer if (iPosition > 0) return VFW_S_NO_MORE_ITEMS; //WAVEFORMATEX *pwfxin = (WAVEFORMATEX *)m_mt.pbFormat; WAVEFORMATEXTENSIBLE *pwfx = (WAVEFORMATEXTENSIBLE *)pmt->AllocFormatBuffer(sizeof(WAVEFORMATEXTENSIBLE)); pwfx->Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE; pwfx->Format.cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX); pwfx->Format.nChannels = m_Wf.nChannels; pwfx->Format.nSamplesPerSec = m_Wf.nSamplesPerSec; pwfx->Format.wBitsPerSample = m_Wf.wBitsPerSample; pwfx->Format.nAvgBytesPerSec = pwfx->Format.nSamplesPerSec * pwfx->Format.wBitsPerSample * pwfx->Format.nChannels / 8; pwfx->Format.nBlockAlign = pwfx->Format.wBitsPerSample * pwfx->Format.nChannels / 8; pwfx->dwChannelMask = (1 << pwfx->Format.nChannels) - 1; pwfx->Samples.wValidBitsPerSample = pwfx->Format.wBitsPerSample; pwfx->SubFormat = MEDIASUBTYPE_PCM; pmt->SetFormat((BYTE*)pwfx, sizeof(WAVEFORMATEXTENSIBLE)); pmt->SetType(&MEDIATYPE_Audio); pmt->SetFormatType(&FORMAT_WaveFormatEx); pmt->SetTemporalCompression(FALSE); // Work out the GUID for the subtype from the header info. GUID SubTypeGUID = MEDIASUBTYPE_PCM; pmt->SetSubtype(&SubTypeGUID); pmt->SetSampleSize(1); return NOERROR; } // GetMediaType
STDMETHODIMP CDirectVobSub::get_TextSettings2(void* lf, int lflen, COLORREF* color1, COLORREF* color2, COLORREF* color3, COLORREF* color4, COLORREF* alpha1, COLORREF* alpha2, COLORREF* alpha3, COLORREF* alpha4, int* shadowDepthX, int* shadowDepthY, int* outlineWidthX, int* outlineWidthY, int* borderStyle) { CAutoLock cAutoLock(&m_propsLock); if (lf) { if (lflen == sizeof(LOGFONTA)) { strncpy_s(((LOGFONTA*)lf)->lfFaceName, LF_FACESIZE, CStringA(m_defStyle.fontName), _TRUNCATE); } else if (lflen == sizeof(LOGFONTW)) { wcsncpy_s(((LOGFONTW*)lf)->lfFaceName, LF_FACESIZE, CStringW(m_defStyle.fontName), _TRUNCATE); } else { return E_INVALIDARG; } ((LOGFONT*)lf)->lfCharSet = m_defStyle.charSet; ((LOGFONT*)lf)->lfItalic = m_defStyle.fItalic; ((LOGFONT*)lf)->lfHeight = (LONG)m_defStyle.fontSize; ((LOGFONT*)lf)->lfWeight = m_defStyle.fontWeight; ((LOGFONT*)lf)->lfStrikeOut = m_defStyle.fStrikeOut; ((LOGFONT*)lf)->lfUnderline = m_defStyle.fUnderline; } *color1 = m_defStyle.colors[0]; *color2 = m_defStyle.colors[1]; *color3 = m_defStyle.colors[2]; *color4 = m_defStyle.colors[3]; *alpha1 = m_defStyle.alpha[0]; *alpha2 = m_defStyle.alpha[1]; *alpha3 = m_defStyle.alpha[2]; *alpha4 = m_defStyle.alpha[3]; *shadowDepthX = m_defStyle.shadowDepthX; *shadowDepthY = m_defStyle.shadowDepthY; *outlineWidthX = m_defStyle.outlineWidthX; *outlineWidthY = m_defStyle.outlineWidthY; *borderStyle = m_defStyle.borderStyle; return S_OK; }
// See Directshow help topic for IAMStreamConfig for details on this method HRESULT UVCamStream::GetMediaType(int iPosition, CMediaType *pmt) { CheckPointer(pmt,E_POINTER); CAutoLock cAutoLock(m_pFilter->pStateLock()); if(iPosition < 0) return E_INVALIDARG; if(iPosition > 8) return VFW_S_NO_MORE_ITEMS; if(iPosition == 0) { *pmt = m_mt; return S_OK; } DECLARE_PTR(VIDEOINFOHEADER, pvi, pmt->AllocFormatBuffer(sizeof(VIDEOINFOHEADER))); ZeroMemory(pvi, sizeof(VIDEOINFOHEADER)); pvi->bmiHeader.biCompression = BI_RGB; pvi->bmiHeader.biBitCount = 24; pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); pvi->bmiHeader.biWidth = 80 * iPosition; pvi->bmiHeader.biHeight = 60 * iPosition; pvi->bmiHeader.biPlanes = 1; pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); pvi->bmiHeader.biClrImportant = 0; pvi->AvgTimePerFrame = 1000000; SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered. SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle pmt->SetType(&MEDIATYPE_Video); pmt->SetFormatType(&FORMAT_VideoInfo); pmt->SetTemporalCompression(FALSE); // Work out the GUID for the subtype from the header info. const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader); pmt->SetSubtype(&SubTypeGUID); pmt->SetSampleSize(pvi->bmiHeader.biSizeImage); return NOERROR; } // GetMediaType
HRESULT CPushPinBitmapSet::GetMediaType(CMediaType *pMediaType) { CAutoLock cAutoLock(m_pFilter->pStateLock()); CheckPointer(pMediaType, E_POINTER); // If the bitmap files were not loaded, just fail here. if (!m_bFilesLoaded) return E_FAIL; // Allocate enough room for the VIDEOINFOHEADER and the color tables VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER*)pMediaType->AllocFormatBuffer(SIZE_PREHEADER + m_cbBitmapInfo[m_iCurrentBitmap]); if (pvi == 0) return(E_OUTOFMEMORY); // Initialize the video info header ZeroMemory(pvi, pMediaType->cbFormat); pvi->AvgTimePerFrame = m_rtFrameLength; // Copy the header info from the current bitmap memcpy(&(pvi->bmiHeader), m_pBmi[m_iCurrentBitmap], m_cbBitmapInfo[m_iCurrentBitmap]); // Set image size for use in FillBuffer pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); // Clear source and target rectangles SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle pMediaType->SetType(&MEDIATYPE_Video); pMediaType->SetFormatType(&FORMAT_VideoInfo); pMediaType->SetTemporalCompression(FALSE); // Work out the GUID for the subtype from the header info. const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader); pMediaType->SetSubtype(&SubTypeGUID); pMediaType->SetSampleSize(pvi->bmiHeader.biSizeImage); return S_OK; }
HRESULT CTMReceiverOutputPin::GetMediaType(int iPosition, CMediaType *pmt){ CheckPointer(pmt,E_POINTER); CAutoLock cAutoLock(m_pFilter->pStateLock()); if(iPosition < 0) { return E_INVALIDARG; } if(iPosition > 0) { return VFW_S_NO_MORE_ITEMS; } VIDEOINFO* pvih = (VIDEOINFO*)pmt->AllocFormatBuffer(sizeof(VIDEOINFO)); LPBITMAPINFOHEADER lpBitmapInfoHeader = &(pvih->bmiHeader); lpBitmapInfoHeader->biSize = sizeof(BITMAPINFOHEADER); lpBitmapInfoHeader->biBitCount = 32; lpBitmapInfoHeader->biWidth = ((CTMReceiverSrc *)m_pFilter)->GetImageWidth()/4*4; lpBitmapInfoHeader->biHeight = ((CTMReceiverSrc *)m_pFilter)->GetImageHeight(); lpBitmapInfoHeader->biPlanes = 1; lpBitmapInfoHeader->biCompression = BI_RGB; lpBitmapInfoHeader->biSizeImage = ((CTMReceiverSrc *)m_pFilter)->GetImageWidth() / 4 * 4 * ((CTMReceiverSrc *)m_pFilter)->GetImageHeight() * 4; lpBitmapInfoHeader->biXPelsPerMeter = 0; lpBitmapInfoHeader->biYPelsPerMeter =0; lpBitmapInfoHeader->biClrUsed = 0; lpBitmapInfoHeader->biClrImportant = 0; pvih->AvgTimePerFrame = m_rtAvgTimePerFrame; pmt->SetFormatType(&FORMAT_VideoInfo); pmt->SetTemporalCompression(FALSE); SetRectEmpty(&(pvih->rcSource)); // we want the whole image area rendered. SetRectEmpty(&(pvih->rcTarget)); // no particular destination rectangle pmt->SetType(&MEDIATYPE_Video); // Work out the GUID for the subtype from the header info. const GUID SubTypeGUID = GetBitmapSubtype(&pvih->bmiHeader); pmt->SetSubtype(&SubTypeGUID); pmt->SetSampleSize(pvih->bmiHeader.biSizeImage); return S_OK; }
STDMETHODIMP TffdshowDecAudioInputPin::EndFlush(void) { DPRINTF(_l("TffdshowDecAudioInputPin::EndFlush")); CAutoLock cAutoLock(&m_csReceive); buf.clear(); newSrcBuffer.clear(); /*if (!IsConnected() || filter->m_pOutput == NULL || !filter->m_pOutput->IsConnected()) return VFW_E_NOT_CONNECTED; if (isActive()) return filter->m_pOutput->DeliverEndFlush(); else */ if (m_useBlock) { block(true); } return TinputPin::EndFlush(); }
//--------------------------------------------------------------------------- //! @brief バックバッファにメモリを割り当てる。 //! @param size : 割り当てるサイズ //---------------------------------------------------------------------------- void TBufferRenderer::AllocBackBuffer( size_t size ) { CAutoLock cAutoLock(&m_BufferLock); // クリティカルセクション BYTE *buff = NULL; FreeBackBuffer(); if( m_FrontBuffer == 1 ) { buff = m_Buffer[0] = reinterpret_cast<BYTE*>(CoTaskMemAlloc(size)); m_IsBufferOwner[0] = true; } else { buff = m_Buffer[1] = reinterpret_cast<BYTE*>(CoTaskMemAlloc(size)); m_IsBufferOwner[1] = true; } if( buff == NULL ) throw L"Cannot allocate memory in filter."; }
HRESULT StaticSourceVideoPin::DecideBufferSize(IMemAllocator * pAlloc, ALLOCATOR_PROPERTIES * pProperties) { CheckPointer(pAlloc, E_POINTER); CheckPointer(pProperties, E_POINTER); HRESULT hr = S_OK; ALLOCATOR_PROPERTIES aProp; CAutoLock cAutoLock(this->m_pLock); pProperties->cBuffers = 1; pProperties->cbBuffer = this->m_mt.lSampleSize; CHECK_SUCCEED(hr = pAlloc->SetProperties(pProperties, &aProp)); if(aProp.cbBuffer != pProperties->cbBuffer) CHECK_HR(hr = E_FAIL); done: return hr; }
HRESULT CLibraryStream::Read(PBYTE pbBuffer, DWORD dwBytesToRead, BOOL bAlign, LPDWORD pdwBytesRead) { *pdwBytesRead = dwBytesToRead; while (dwBytesToRead > m_len - m_pos) { Sleep(1); } { CAutoLock cAutoLock(&m_csLock); for (size_t i = 0; i < *pdwBytesRead; ++i) { *pbBuffer = m_buffer[m_pos + i]; pbBuffer++; } } return S_OK; }
// // SetMediaType // // Called when a media type is agreed between filters // HRESULT CPushPinAudio::SetMediaType(const CMediaType *pmt) { CAutoLock cAutoLock(m_pFilter->pStateLock()); // Pass the call up to my base class HRESULT hr = CSourceStream::SetMediaType(pmt); if(SUCCEEDED(hr)) { WAVEFORMATEX *pwfx = (WAVEFORMATEX *)m_mt.Format(); if (pwfx == NULL) return E_UNEXPECTED; // Save the current media type m_MediaType = *pmt; hr = S_OK; } return hr; } // SetMediaType
// // GetRecordStreamType // STDMETHODIMP CTSParserFilter::GetStreamType(WORD wProgramNumber, WORD wIndex, BYTE *pVal) { if(pVal == NULL) return E_INVALIDARG; CAutoLock cAutoLock(&m_ParserLock); HRESULT hr; REMUXER* pRemuxer; if ( m_pInputPin == NULL ) return E_FAIL; hr = m_pInputPin->GetParser( &pRemuxer ); if ( hr != NOERROR ) return hr; //ZQ TODO if ( StreamType( pRemuxer, wProgramNumber, wIndex, pVal ) ) return NO_ERROR; return E_INVALIDARG; }
HRESULT audioSource::SetMediaType(const CMediaType *pMediaType) { _RPT0(_CRT_WARN,"SetMediaType\n"); CAutoLock cAutoLock(m_pFilter->pStateLock()); if (!pMediaType) return E_INVALIDARG; subtype=*pMediaType->Subtype(); _RPT1(_CRT_WARN,"SetMediaType %s\n",(subtype == MEDIASUBTYPE_PCM)?"PCM":"FLOAT"); WAVEFORMATEX *fmt = (WAVEFORMATEX*) pMediaType->Format(); wordSize = fmt->nBlockAlign; HRESULT hr = CSourceStream::SetMediaType(pMediaType); _RPT2(_CRT_WARN,"SetMediaType hr %d %d\n",hr,FAILED(hr)); return hr; }
HRESULT CSubtitleSourcePreview::GetMediaType(CMediaType* pmt) { CAutoLock cAutoLock(pStateLock()); pmt->InitMediaType(); pmt->SetType(&MEDIATYPE_Video); pmt->SetSubtype(&MEDIASUBTYPE_RGB32); pmt->SetFormatType(&FORMAT_VideoInfo); VIDEOINFOHEADER* pvih = (VIDEOINFOHEADER*)pmt->AllocFormatBuffer(sizeof(VIDEOINFOHEADER)); memset(pvih, 0, pmt->FormatLength()); pvih->bmiHeader.biSize = sizeof(pvih->bmiHeader); pvih->bmiHeader.biWidth = _WIDTH; pvih->bmiHeader.biHeight = _HEIGHT; pvih->bmiHeader.biBitCount = 32; pvih->bmiHeader.biCompression = BI_RGB; pvih->bmiHeader.biPlanes = 1; pvih->bmiHeader.biSizeImage = pvih->bmiHeader.biWidth*abs(pvih->bmiHeader.biHeight)*pvih->bmiHeader.biBitCount>>3; return NOERROR; }
STDMETHODIMP CDirectVobSub::put_LoadSettings(int level, bool fExternalLoad, bool fWebLoad, bool fEmbeddedLoad) { AFX_MANAGE_STATE(AfxGetStaticModuleState()); CAutoLock cAutoLock(&m_propsLock); int i; bool b[3]; get_LoadSettings(&i, b, b + 1, b + 2); if (i == level && b[0] == fExternalLoad && b[1] == fWebLoad && b[2] == fEmbeddedLoad) { return S_FALSE; } theApp.WriteProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_LOADLEVEL), level & 3); theApp.WriteProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_EXTERNALLOAD), fExternalLoad); theApp.WriteProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_WEBLOAD), fWebLoad); theApp.WriteProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_EMBEDDEDLOAD), fEmbeddedLoad); return S_OK; }
STDMETHODIMP CDirectVobSub::put_ExtendPicture(int horizontal, int vertical, int resx2, int resx2minw, int resx2minh) { AFX_MANAGE_STATE(AfxGetStaticModuleState()); CAutoLock cAutoLock(&m_propsLock); int i[5]; get_ExtendPicture(i, i + 1, i + 2, i + 3, i + 4); if (i[0] == horizontal && i[1] == vertical && i[2] == resx2 && i[3] == resx2minw && i[4] == resx2minh) { return S_FALSE; } theApp.WriteProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_MOD32FIX), horizontal & 1); theApp.WriteProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_EXTPIC), vertical); theApp.WriteProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_RESX2), resx2 & 3); theApp.WriteProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_RESX2MINW), resx2minw); theApp.WriteProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_RESX2MINH), resx2minh); return S_OK; }
//**************************************************************************** // Initialise a CRateSourceStream object so that we have a pin. //**************************************************************************** // CRateSource::CRateSource(LPUNKNOWN lpunk, HRESULT *phr) : CSource(NAME("RateSource"), lpunk, CLSID_RateSource) { ASSERT(phr); CAutoLock cAutoLock(&m_cStateLock); m_paStreams = (CSourceStream **) new CRateSourceStream*[1]; if (m_paStreams == NULL) { *phr = E_OUTOFMEMORY; return; } m_paStreams[0] = new CRateSourceStream(phr, this, L"RateSource!"); if (m_paStreams[0] == NULL) { *phr = E_OUTOFMEMORY; return; } }
// This method is called after the pins are connected to allocate buffers to stream data HRESULT CVCamStream::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProperties) { // lock the buffer so that nothing changes during this state. The lock is removed when the current scope ends. CAutoLock cAutoLock(m_pFilter->pStateLock()); HRESULT hr = NOERROR; // make only one buffer, of "image size" VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)m_mt.Format(); pProperties->cBuffers = 1; pProperties->cbBuffer = pvi->bmiHeader.biSizeImage; ALLOCATOR_PROPERTIES Actual; hr = pAlloc->SetProperties(pProperties, &Actual); // error handing: make sure the properties are okay if (FAILED(hr)) return hr; if (Actual.cbBuffer < pProperties->cbBuffer) return E_FAIL; return NOERROR; } // DecideBufferSize
HRESULT CSubtitleSourceSSA::GetMediaType(CMediaType* pmt) { CAutoLock cAutoLock(pStateLock()); pmt->InitMediaType(); pmt->SetType(&MEDIATYPE_Subtitle); pmt->SetSubtype(&MEDIASUBTYPE_SSA); pmt->SetFormatType(&FORMAT_SubtitleInfo); CSimpleTextSubtitle sts; sts.Open(CString(m_fn), DEFAULT_CHARSET); sts.RemoveAll(); CFile f; TCHAR path[_MAX_PATH], fn[_MAX_PATH]; if (!GetTempPath(MAX_PATH, path) || !GetTempFileName(path, _T("mpc_sts"), 0, fn)) { return E_FAIL; } _tremove(fn); _tcscat_s(fn, _T(".ssa")); if (!sts.SaveAs(fn, EXTSSA, -1, CTextFile::UTF8) || !f.Open(fn, CFile::modeRead)) { return E_FAIL; } int len = (int)f.GetLength()-3; f.Seek(3, CFile::begin); SUBTITLEINFO* psi = (SUBTITLEINFO*)pmt->AllocFormatBuffer(sizeof(SUBTITLEINFO) + len); memset(psi, 0, pmt->FormatLength()); psi->dwOffset = sizeof(SUBTITLEINFO); strcpy_s(psi->IsoLang, "eng"); f.Read(pmt->pbFormat + psi->dwOffset, len); f.Close(); _tremove(fn); return NOERROR; }
HRESULT CSubtitleSourceARGB::GetMediaType(CMediaType* pmt) { CAutoLock cAutoLock(pStateLock()); pmt->InitMediaType(); pmt->SetType(&MEDIATYPE_Video); pmt->SetSubtype(&MEDIASUBTYPE_ARGB32); pmt->SetFormatType(&FORMAT_VideoInfo); VIDEOINFOHEADER* pvih = (VIDEOINFOHEADER*)pmt->AllocFormatBuffer(sizeof(VIDEOINFOHEADER)); ZeroMemory(pvih, pmt->FormatLength()); pvih->bmiHeader.biSize = sizeof(pvih->bmiHeader); // TODO: read w,h,fps from a config file or registry pvih->bmiHeader.biWidth = _WIDTH; pvih->bmiHeader.biHeight = _HEIGHT; pvih->bmiHeader.biBitCount = 32; pvih->bmiHeader.biCompression = BI_RGB; pvih->bmiHeader.biPlanes = 1; pvih->bmiHeader.biSizeImage = pvih->bmiHeader.biWidth * abs(pvih->bmiHeader.biHeight) * pvih->bmiHeader.biBitCount >> 3; return NOERROR; }
STDMETHODIMP CDirectVobSub::get_LoadSettings(int* level, bool* fExternalLoad, bool* fWebLoad, bool* fEmbeddedLoad) { AFX_MANAGE_STATE(AfxGetStaticModuleState()); CAutoLock cAutoLock(&m_propsLock); if (level) { *level = theApp.GetProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_LOADLEVEL), 0) & 3; } if (fExternalLoad) { *fExternalLoad = !!theApp.GetProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_EXTERNALLOAD), 1); } if (fWebLoad) { *fWebLoad = !!theApp.GetProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_WEBLOAD), 0); } if (fEmbeddedLoad) { *fEmbeddedLoad = !!theApp.GetProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_EMBEDDEDLOAD), 1); } return S_OK; }
STDMETHODIMP CInternalPropertyPage::GetPageInfo(PROPPAGEINFO* pPageInfo) { CAutoLock cAutoLock(this); CheckPointer(pPageInfo, E_POINTER); LPOLESTR pszTitle; HRESULT hr = AMGetWideString(CStringW(GetWindowTitle()), &pszTitle); if (FAILED(hr)) { return hr; } pPageInfo->cb = sizeof(PROPPAGEINFO); pPageInfo->pszTitle = pszTitle; pPageInfo->pszDocString = nullptr; pPageInfo->pszHelpFile = nullptr; pPageInfo->dwHelpContext = 0; pPageInfo->size = GetWindowSize(); return S_OK; }
void CBaseMuxerInputPin::PushPacket(CAutoPtr<MuxerPacket> pPacket) { for (int i = 0; m_pFilter->IsActive() && !m_bFlushing && !m_evAcceptPacket.Wait(1) && i < 1000; i++) { ; } if (!m_pFilter->IsActive() || m_bFlushing) { return; } CAutoLock cAutoLock(&m_csQueue); m_queue.AddTail(pPacket); if (m_queue.GetCount() >= MAXQUEUESIZE) { m_evAcceptPacket.Reset(); } }
//---------------------------------------------------------------------------- //! @brief バックバッファを設定します。 //! @param buff : バックバッファ用バッファへのポインタ //! @param size : バッファのサイズを渡す変数へのポインタ。@n //! buffがNULLの時、ここに欲しいサイズが返る //! @return エラーコード //---------------------------------------------------------------------------- HRESULT TBufferRenderer::SetBackBuffer( BYTE *buff, long *size ) { if( m_State == State_Running ) return S_FALSE; CAutoLock cAutoLock(&m_BufferLock); // クリティカルセクション if( buff == NULL && size != NULL ) { *size = GetBufferSize(); return S_OK; } if( buff == NULL || size == NULL ) return E_POINTER; if( (*size) != GetBufferSize() ) return E_INVALIDARG; FreeBackBuffer(); SetBackBuffer(buff); return S_OK; }
// Queue a new packet at the end of the list void CPacketQueue::Queue(Packet *pPacket) { CAutoLock cAutoLock(this); if (pPacket) m_dataSize += pPacket->GetDataSize(); m_queue.push_back(pPacket); #ifdef DEBUG if (m_queue.size() > MAX_PACKETS_IN_QUEUE && !m_bWarnedFull) { DbgLog((LOG_TRACE, 20, L"CPacketQueue::Queue() - Queue is Full (%d elements)", m_queue.size())); m_bWarnedFull = true; } else if (m_queue.size() > 10*MAX_PACKETS_IN_QUEUE && !m_bWarnedExtreme) { DbgLog((LOG_TRACE, 20, L"CPacketQueue::Queue() - Queue is Extremely Full (%d elements)", m_queue.size())); m_bWarnedExtreme = true; } else if (m_queue.size() < MAX_PACKETS_IN_QUEUE/2) { m_bWarnedFull = m_bWarnedExtreme = false; } #endif }
STDMETHODIMP TffdshowDecAudioInputPin::NewSegment(REFERENCE_TIME tStart, REFERENCE_TIME tStop, double dRate) { DPRINTF(_l("TffdshowDecAudioInputPin::NewSegment")); CAutoLock cAutoLock(&m_csReceive); buf.clear(); newSrcBuffer.clear(); jitter = 0; insample_rtStart = REFTIME_INVALID; insample_rtStop = REFTIME_INVALID; // Tell the parser that the position has changed so the parsing should take this into account // (it won't receive the next expected buffer) if (audioParser) { audioParser->NewSegment(); } HRESULT hr = TinputPin::NewSegment(tStart, tStop, dRate); if (hr == S_OK && codec) { codec->onSeek(tStart); } return hr; }
HRESULT StreamOutputPin::DecideBufferSize(IMemAllocator * pAlloc, ALLOCATOR_PROPERTIES * ppropInputRequest) { CheckPointer(pAlloc, E_POINTER); CheckPointer(ppropInputRequest, E_POINTER); HRESULT hr = S_OK; ALLOCATOR_PROPERTIES aProp; CAutoLock cAutoLock(this->m_pLock); ppropInputRequest->cBuffers = 1; ppropInputRequest->cbAlign = 1; ppropInputRequest->cbBuffer = HMCFilter::OutBufferSize; CHECK_SUCCEED(hr = pAlloc->SetProperties(ppropInputRequest, &aProp)); if(aProp.cbBuffer < ppropInputRequest->cbBuffer) CHECK_HR(hr = E_FAIL); done: return hr; }
STDMETHODIMP_(void) CSubPicAllocatorPresenterImpl::SetSubPicProvider(ISubPicProvider* pSubPicProvider) { CAutoLock cAutoLock(&m_csSubPicProvider); m_pSubPicProvider = pSubPicProvider; // Reset the default state to be sure text subtitles will be displayed right. // Subtitles with specific requirements will adapt those values later. if (m_pAllocator) { m_pAllocator->SetMaxTextureSize(m_maxSubtitleTextureSize); m_pAllocator->SetCurSize(m_windowRect.Size()); m_pAllocator->SetCurVidRect(m_videoRect); m_pAllocator->FreeStatic(); } if (m_pSubPicQueue) { m_pSubPicQueue->SetSubPicProvider(pSubPicProvider); } Paint(false); }