static HRESULT WINAPI AVIDec_CompleteConnect(TransformFilter *tf, PIN_DIRECTION dir, IPin *pin) { AVIDecImpl* This = impl_from_TransformFilter(tf); TRACE("(%p)\n", This); return S_OK; }
static HRESULT WINAPI ACMWrapper_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE * pmt) { ACMWrapperImpl* This = impl_from_TransformFilter(tf); MMRESULT res; TRACE("(%p)->(%i %p)\n", This, dir, pmt); if (dir != PINDIR_INPUT) return S_OK; /* Check root (GUID w/o FOURCC) */ if ((IsEqualIID(&pmt->majortype, &MEDIATYPE_Audio)) && (!memcmp(((const char *)&pmt->subtype)+4, ((const char *)&MEDIATYPE_Audio)+4, sizeof(GUID)-4)) && (IsEqualIID(&pmt->formattype, &FORMAT_WaveFormatEx))) { HACMSTREAM drv; WAVEFORMATEX *wfx = (WAVEFORMATEX*)pmt->pbFormat; AM_MEDIA_TYPE* outpmt = &This->tf.pmt; if (!wfx || wfx->wFormatTag == WAVE_FORMAT_PCM || wfx->wFormatTag == WAVE_FORMAT_EXTENSIBLE) return VFW_E_TYPE_NOT_ACCEPTED; FreeMediaType(outpmt); This->pWfIn = (LPWAVEFORMATEX)pmt->pbFormat; /* HACK */ /* TRACE("ALIGN = %d\n", pACMWrapper->pWfIn->nBlockAlign); */ /* pACMWrapper->pWfIn->nBlockAlign = 1; */ /* Set output audio data to PCM */ CopyMediaType(outpmt, pmt); outpmt->subtype.Data1 = WAVE_FORMAT_PCM; This->pWfOut = (WAVEFORMATEX*)outpmt->pbFormat; This->pWfOut->wFormatTag = WAVE_FORMAT_PCM; This->pWfOut->wBitsPerSample = 16; This->pWfOut->nBlockAlign = This->pWfOut->wBitsPerSample * This->pWfOut->nChannels / 8; This->pWfOut->cbSize = 0; This->pWfOut->nAvgBytesPerSec = This->pWfOut->nChannels * This->pWfOut->nSamplesPerSec * (This->pWfOut->wBitsPerSample/8); if (!(res = acmStreamOpen(&drv, NULL, This->pWfIn, This->pWfOut, NULL, 0, 0, 0))) { This->has = drv; TRACE("Connection accepted\n"); return S_OK; } else FIXME("acmStreamOpen returned %d\n", res); FreeMediaType(outpmt); TRACE("Unable to find a suitable ACM decompressor\n"); } TRACE("Connection refused\n"); return VFW_E_TYPE_NOT_ACCEPTED; }
static HRESULT WINAPI AVIDec_NotifyDrop(TransformFilter *pTransformFilter, IBaseFilter *sender, Quality qm) { AVIDecImpl *This = impl_from_TransformFilter(pTransformFilter); EnterCriticalSection(&This->tf.filter.csFilter); if (qm.Late > 0) This->late = qm.Late + qm.TimeStamp; else This->late = -1; LeaveCriticalSection(&This->tf.filter.csFilter); return S_OK; }
static HRESULT WINAPI QTVDecoder_StopStreaming(TransformFilter* pTransformFilter) { QTVDecoderImpl* This = impl_from_TransformFilter(pTransformFilter); TRACE("(%p)->()\n", This); if (This->decompressionSession) ICMDecompressionSessionRelease(This->decompressionSession); This->decompressionSession = NULL; return S_OK; }
static HRESULT WINAPI QTVDecoder_Receive(TransformFilter *tf, IMediaSample *pSample) { QTVDecoderImpl* This = impl_from_TransformFilter(tf); HRESULT hr; DWORD cbSrcStream; LPBYTE pbSrcStream; ICMFrameTimeRecord frameTime = {{0}}; TimeValue time = 1; TimeScale timeScale = 1; OSStatus err = noErr; LONGLONG tStart, tStop; hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) { ERR("Cannot get pointer to sample data (%x)\n", hr); goto error; } cbSrcStream = IMediaSample_GetActualDataLength(pSample); if (IMediaSample_GetTime(pSample, &tStart, &tStop) != S_OK) tStart = tStop = 0; time = tStart; frameTime.recordSize = sizeof(ICMFrameTimeRecord); *(TimeValue64 *)&frameTime.value = tStart; frameTime.scale = 1; frameTime.rate = fixed1; frameTime.duration = tStop - tStart; frameTime.frameNumber = 0; frameTime.flags = icmFrameTimeIsNonScheduledDisplayTime; err = ICMDecompressionSessionDecodeFrame(This->decompressionSession, (UInt8 *)pbSrcStream, cbSrcStream, NULL, &frameTime, pSample); if (err != noErr) { ERR("Error with ICMDecompressionSessionDecodeFrame\n"); hr = E_FAIL; goto error; } ICMDecompressionSessionSetNonScheduledDisplayTime(This->decompressionSession, time, timeScale, 0); ICMDecompressionSessionFlush(This->decompressionSession); hr = This->decodeHR; error: return hr; }
static HRESULT WINAPI QTVDecoder_BreakConnect(TransformFilter *tf, PIN_DIRECTION dir) { QTVDecoderImpl *This = impl_from_TransformFilter(tf); TRACE("(%p)->()\n", This); if (This->hImageDescription) DisposeHandle((Handle)This->hImageDescription); if (This->outputBufferAttributes) CFRelease(This->outputBufferAttributes); This->hImageDescription = NULL; This->outputBufferAttributes = NULL; return S_OK; }
static HRESULT WINAPI AVIDec_DecideBufferSize(TransformFilter *tf, IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *ppropInputRequest) { AVIDecImpl *pAVI = impl_from_TransformFilter(tf); ALLOCATOR_PROPERTIES actual; if (!ppropInputRequest->cbAlign) ppropInputRequest->cbAlign = 1; if (ppropInputRequest->cbBuffer < pAVI->pBihOut->biSizeImage) ppropInputRequest->cbBuffer = pAVI->pBihOut->biSizeImage; if (!ppropInputRequest->cBuffers) ppropInputRequest->cBuffers = 1; return IMemAllocator_SetProperties(pAlloc, ppropInputRequest, &actual); }
static HRESULT WINAPI AVIDec_StartStreaming(TransformFilter* pTransformFilter) { AVIDecImpl* This = impl_from_TransformFilter(pTransformFilter); DWORD result; TRACE("(%p)->()\n", This); This->late = -1; result = ICDecompressBegin(This->hvid, This->pBihIn, This->pBihOut); if (result != ICERR_OK) { ERR("Cannot start processing (%d)\n", result); return E_FAIL; } return S_OK; }
static HRESULT WINAPI ACMWrapper_DecideBufferSize(TransformFilter *tf, IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *ppropInputRequest) { ACMWrapperImpl *pACM = impl_from_TransformFilter(tf); ALLOCATOR_PROPERTIES actual; if (!ppropInputRequest->cbAlign) ppropInputRequest->cbAlign = 1; if (ppropInputRequest->cbBuffer < pACM->pWfOut->nAvgBytesPerSec / 2) ppropInputRequest->cbBuffer = pACM->pWfOut->nAvgBytesPerSec / 2; if (!ppropInputRequest->cBuffers) ppropInputRequest->cBuffers = 1; return IMemAllocator_SetProperties(pAlloc, ppropInputRequest, &actual); }
static HRESULT WINAPI ACMWrapper_BreakConnect(TransformFilter *tf, PIN_DIRECTION dir) { ACMWrapperImpl *This = impl_from_TransformFilter(tf); TRACE("(%p)->(%i)\n", This,dir); if (dir == PINDIR_INPUT) { if (This->has) acmStreamClose(This->has, 0); This->has = 0; This->lasttime_real = This->lasttime_sent = -1; } return S_OK; }
static HRESULT WINAPI AVIDec_StopStreaming(TransformFilter* pTransformFilter) { AVIDecImpl* This = impl_from_TransformFilter(pTransformFilter); DWORD result; TRACE("(%p)->()\n", This); if (!This->hvid) return S_OK; result = ICDecompressEnd(This->hvid); if (result != ICERR_OK) { ERR("Cannot stop processing (%d)\n", result); return E_FAIL; } return S_OK; }
static HRESULT WINAPI QTVDecoder_DecideBufferSize(TransformFilter *tf, IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *ppropInputRequest) { QTVDecoderImpl *This = impl_from_TransformFilter(tf); ALLOCATOR_PROPERTIES actual; TRACE("()\n"); if (!ppropInputRequest->cbAlign) ppropInputRequest->cbAlign = 1; if (ppropInputRequest->cbBuffer < This->outputSize) ppropInputRequest->cbBuffer = This->outputSize + ppropInputRequest->cbAlign; if (!ppropInputRequest->cBuffers) ppropInputRequest->cBuffers = 1; return IMemAllocator_SetProperties(pAlloc, ppropInputRequest, &actual); }
static HRESULT WINAPI QTVDecoder_StartStreaming(TransformFilter* pTransformFilter) { QTVDecoderImpl* This = impl_from_TransformFilter(pTransformFilter); OSErr err = noErr; ICMDecompressionSessionOptionsRef sessionOptions = NULL; ICMDecompressionTrackingCallbackRecord trackingCallbackRecord; TRACE("(%p)->()\n", This); trackingCallbackRecord.decompressionTrackingCallback = trackingCallback; trackingCallbackRecord.decompressionTrackingRefCon = (void*)This; err = ICMDecompressionSessionCreate(NULL, This->hImageDescription, sessionOptions, This->outputBufferAttributes, &trackingCallbackRecord, &This->decompressionSession); if (err != noErr) { ERR("Error with ICMDecompressionSessionCreate %i\n",err); return E_FAIL; } return S_OK; }
static HRESULT WINAPI AVIDec_BreakConnect(TransformFilter *tf, PIN_DIRECTION dir) { AVIDecImpl *This = impl_from_TransformFilter(tf); TRACE("(%p)->()\n", This); if (dir == PINDIR_INPUT) { if (This->hvid) ICClose(This->hvid); if (This->pBihIn) CoTaskMemFree(This->pBihIn); if (This->pBihOut) CoTaskMemFree(This->pBihOut); This->hvid = NULL; This->pBihIn = NULL; This->pBihOut = NULL; } return S_OK; }
static HRESULT WINAPI ACMWrapper_CompleteConnect(TransformFilter *tf, PIN_DIRECTION dir, IPin *pin) { ACMWrapperImpl* This = impl_from_TransformFilter(tf); MMRESULT res; HACMSTREAM drv; TRACE("(%p)\n", This); if (dir != PINDIR_INPUT) return S_OK; if (!(res = acmStreamOpen(&drv, NULL, This->pWfIn, This->pWfOut, NULL, 0, 0, 0))) { This->has = drv; TRACE("Connection accepted\n"); return S_OK; } FIXME("acmStreamOpen returned %d\n", res); TRACE("Unable to find a suitable ACM decompressor\n"); return VFW_E_TYPE_NOT_ACCEPTED; }
static HRESULT WINAPI AVIDec_Receive(TransformFilter *tf, IMediaSample *pSample) { AVIDecImpl* This = impl_from_TransformFilter(tf); AM_MEDIA_TYPE amt; HRESULT hr; DWORD res; IMediaSample* pOutSample = NULL; DWORD cbDstStream; LPBYTE pbDstStream; DWORD cbSrcStream; LPBYTE pbSrcStream; LONGLONG tStart, tStop; DWORD flags = 0; EnterCriticalSection(&This->tf.csReceive); hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) { ERR("Cannot get pointer to sample data (%x)\n", hr); goto error; } cbSrcStream = IMediaSample_GetActualDataLength(pSample); TRACE("Sample data ptr = %p, size = %d\n", pbSrcStream, cbSrcStream); hr = IPin_ConnectionMediaType(This->tf.ppPins[0], &amt); if (FAILED(hr)) { ERR("Unable to retrieve media type\n"); goto error; } /* Update input size to match sample size */ This->pBihIn->biSizeImage = cbSrcStream; hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &pOutSample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Unable to get delivery buffer (%x)\n", hr); goto error; } hr = IMediaSample_SetActualDataLength(pOutSample, 0); assert(hr == S_OK); hr = IMediaSample_GetPointer(pOutSample, &pbDstStream); if (FAILED(hr)) { ERR("Unable to get pointer to buffer (%x)\n", hr); goto error; } cbDstStream = IMediaSample_GetSize(pOutSample); if (cbDstStream < This->pBihOut->biSizeImage) { ERR("Sample size is too small %d < %d\n", cbDstStream, This->pBihOut->biSizeImage); hr = E_FAIL; goto error; } if (IMediaSample_IsPreroll(pSample) == S_OK) flags |= ICDECOMPRESS_PREROLL; if (IMediaSample_IsSyncPoint(pSample) != S_OK) flags |= ICDECOMPRESS_NOTKEYFRAME; hr = IMediaSample_GetTime(pSample, &tStart, &tStop); if (hr == S_OK && AVIDec_DropSample(This, tStart)) flags |= ICDECOMPRESS_HURRYUP; res = ICDecompress(This->hvid, flags, This->pBihIn, pbSrcStream, This->pBihOut, pbDstStream); if (res != ICERR_OK) ERR("Error occurred during the decompression (%x)\n", res); /* Drop sample if its intended to be dropped */ if (flags & ICDECOMPRESS_HURRYUP) { hr = S_OK; goto error; } IMediaSample_SetActualDataLength(pOutSample, This->pBihOut->biSizeImage); IMediaSample_SetPreroll(pOutSample, (IMediaSample_IsPreroll(pSample) == S_OK)); IMediaSample_SetDiscontinuity(pOutSample, (IMediaSample_IsDiscontinuity(pSample) == S_OK)); IMediaSample_SetSyncPoint(pOutSample, (IMediaSample_IsSyncPoint(pSample) == S_OK)); if (hr == S_OK) IMediaSample_SetTime(pOutSample, &tStart, &tStop); else if (hr == VFW_S_NO_STOP_TIME) IMediaSample_SetTime(pOutSample, &tStart, NULL); else IMediaSample_SetTime(pOutSample, NULL, NULL); if (IMediaSample_GetMediaTime(pSample, &tStart, &tStop) == S_OK) IMediaSample_SetMediaTime(pOutSample, &tStart, &tStop); else IMediaSample_SetMediaTime(pOutSample, NULL, NULL); LeaveCriticalSection(&This->tf.csReceive); hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], pOutSample); EnterCriticalSection(&This->tf.csReceive); if (hr != S_OK && hr != VFW_E_NOT_CONNECTED) ERR("Error sending sample (%x)\n", hr); error: if (pOutSample) IMediaSample_Release(pOutSample); LeaveCriticalSection(&This->tf.csReceive); return hr; }
static HRESULT WINAPI ACMWrapper_Receive(TransformFilter *tf, IMediaSample *pSample) { ACMWrapperImpl* This = impl_from_TransformFilter(tf); AM_MEDIA_TYPE amt; IMediaSample* pOutSample = NULL; DWORD cbDstStream, cbSrcStream; LPBYTE pbDstStream; LPBYTE pbSrcStream = NULL; ACMSTREAMHEADER ash; BOOL unprepare_header = FALSE, preroll; MMRESULT res; HRESULT hr; LONGLONG tStart = -1, tStop = -1, tMed; LONGLONG mtStart = -1, mtStop = -1, mtMed; EnterCriticalSection(&This->tf.csReceive); hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) { ERR("Cannot get pointer to sample data (%x)\n", hr); LeaveCriticalSection(&This->tf.csReceive); return hr; } preroll = (IMediaSample_IsPreroll(pSample) == S_OK); IMediaSample_GetTime(pSample, &tStart, &tStop); if (IMediaSample_GetMediaTime(pSample, &mtStart, &mtStop) != S_OK) mtStart = mtStop = -1; cbSrcStream = IMediaSample_GetActualDataLength(pSample); /* Prevent discontinuities when codecs 'absorb' data but not give anything back in return */ if (IMediaSample_IsDiscontinuity(pSample) == S_OK) { This->lasttime_real = tStart; This->lasttime_sent = tStart; } else if (This->lasttime_real == tStart) tStart = This->lasttime_sent; else WARN("Discontinuity\n"); tMed = tStart; mtMed = mtStart; TRACE("Sample data ptr = %p, size = %d\n", pbSrcStream, cbSrcStream); hr = IPin_ConnectionMediaType(This->tf.ppPins[0], &amt); if (FAILED(hr)) { ERR("Unable to retrieve media type\n"); LeaveCriticalSection(&This->tf.csReceive); return hr; } ash.pbSrc = pbSrcStream; ash.cbSrcLength = cbSrcStream; while(hr == S_OK && ash.cbSrcLength) { hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &pOutSample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Unable to get delivery buffer (%x)\n", hr); LeaveCriticalSection(&This->tf.csReceive); return hr; } IMediaSample_SetPreroll(pOutSample, preroll); hr = IMediaSample_SetActualDataLength(pOutSample, 0); assert(hr == S_OK); hr = IMediaSample_GetPointer(pOutSample, &pbDstStream); if (FAILED(hr)) { ERR("Unable to get pointer to buffer (%x)\n", hr); goto error; } cbDstStream = IMediaSample_GetSize(pOutSample); ash.cbStruct = sizeof(ash); ash.fdwStatus = 0; ash.dwUser = 0; ash.pbDst = pbDstStream; ash.cbDstLength = cbDstStream; if ((res = acmStreamPrepareHeader(This->has, &ash, 0))) { ERR("Cannot prepare header %d\n", res); goto error; } unprepare_header = TRUE; if (IMediaSample_IsDiscontinuity(pSample) == S_OK) { res = acmStreamConvert(This->has, &ash, ACM_STREAMCONVERTF_START); IMediaSample_SetDiscontinuity(pOutSample, TRUE); /* One sample could be converted to multiple packets */ IMediaSample_SetDiscontinuity(pSample, FALSE); } else { res = acmStreamConvert(This->has, &ash, 0); IMediaSample_SetDiscontinuity(pOutSample, FALSE); } if (res) { if(res != MMSYSERR_MOREDATA) ERR("Cannot convert data header %d\n", res); goto error; } TRACE("used in %u/%u, used out %u/%u\n", ash.cbSrcLengthUsed, ash.cbSrcLength, ash.cbDstLengthUsed, ash.cbDstLength); hr = IMediaSample_SetActualDataLength(pOutSample, ash.cbDstLengthUsed); assert(hr == S_OK); /* Bug in acm codecs? It apparently uses the input, but doesn't necessarily output immediately */ if (!ash.cbSrcLengthUsed) { WARN("Sample was skipped? Outputted: %u\n", ash.cbDstLengthUsed); ash.cbSrcLength = 0; goto error; } TRACE("Sample start time: %u.%03u\n", (DWORD)(tStart/10000000), (DWORD)((tStart/10000)%1000)); if (ash.cbSrcLengthUsed == cbSrcStream) { IMediaSample_SetTime(pOutSample, &tStart, &tStop); tStart = tMed = tStop; } else if (tStop != tStart) { tMed = tStop - tStart; tMed = tStart + tMed * ash.cbSrcLengthUsed / cbSrcStream; IMediaSample_SetTime(pOutSample, &tStart, &tMed); tStart = tMed; } else { ERR("No valid timestamp found\n"); IMediaSample_SetTime(pOutSample, NULL, NULL); } if (mtStart < 0) { IMediaSample_SetMediaTime(pOutSample, NULL, NULL); } else if (ash.cbSrcLengthUsed == cbSrcStream) { IMediaSample_SetMediaTime(pOutSample, &mtStart, &mtStop); mtStart = mtMed = mtStop; } else if (mtStop >= mtStart) { mtMed = mtStop - mtStart; mtMed = mtStart + mtMed * ash.cbSrcLengthUsed / cbSrcStream; IMediaSample_SetMediaTime(pOutSample, &mtStart, &mtMed); mtStart = mtMed; } else { IMediaSample_SetMediaTime(pOutSample, NULL, NULL); } TRACE("Sample stop time: %u.%03u\n", (DWORD)(tStart/10000000), (DWORD)((tStart/10000)%1000)); LeaveCriticalSection(&This->tf.csReceive); hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], pOutSample); EnterCriticalSection(&This->tf.csReceive); if (hr != S_OK && hr != VFW_E_NOT_CONNECTED) { if (FAILED(hr)) ERR("Error sending sample (%x)\n", hr); goto error; } error: if (unprepare_header && (res = acmStreamUnprepareHeader(This->has, &ash, 0))) ERR("Cannot unprepare header %d\n", res); unprepare_header = FALSE; ash.pbSrc += ash.cbSrcLengthUsed; ash.cbSrcLength -= ash.cbSrcLengthUsed; IMediaSample_Release(pOutSample); pOutSample = NULL; } This->lasttime_real = tStop; This->lasttime_sent = tMed; LeaveCriticalSection(&This->tf.csReceive); return hr; }
static HRESULT WINAPI AVIDec_EndFlush(TransformFilter *pTransformFilter) { AVIDecImpl* This = impl_from_TransformFilter(pTransformFilter); This->late = -1; return S_OK; }
static HRESULT WINAPI QTVDecoder_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE * pmt) { QTVDecoderImpl* This = impl_from_TransformFilter(tf); HRESULT hr = VFW_E_TYPE_NOT_ACCEPTED; OSErr err = noErr; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; CFNumberRef n = NULL; TRACE("(%p)->(%p)\n", This, pmt); if (dir != PINDIR_INPUT) return S_OK; FreeMediaType(outpmt); CopyMediaType(outpmt, pmt); if (This->hImageDescription) DisposeHandle((Handle)This->hImageDescription); This->hImageDescription = (ImageDescriptionHandle) NewHandleClear(sizeof(ImageDescription)); if (This->hImageDescription != NULL) { (**This->hImageDescription).idSize = sizeof(ImageDescription); (**This->hImageDescription).spatialQuality = codecNormalQuality; (**This->hImageDescription).frameCount = 1; (**This->hImageDescription).clutID = -1; } else { ERR("Failed to create ImageDescription\n"); goto failed; } /* Check root (GUID w/o FOURCC) */ if ((IsEqualIID(&pmt->majortype, &MEDIATYPE_Video)) && (!memcmp(((const char *)&pmt->subtype)+4, ((const char *)&MEDIATYPE_Video)+4, sizeof(GUID)-4))) { VIDEOINFOHEADER *format1 = (VIDEOINFOHEADER *)outpmt->pbFormat; VIDEOINFOHEADER2 *format2 = (VIDEOINFOHEADER2 *)outpmt->pbFormat; BITMAPINFOHEADER *bmi; OSType fourCC; DecompressorComponent dc; OSType format; DWORD outputWidth, outputHeight, outputDepth; if (IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo)) bmi = &format1->bmiHeader; else if (IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo2)) bmi = &format2->bmiHeader; else goto failed; TRACE("Fourcc: %s\n", debugstr_an((const char *)&pmt->subtype.Data1, 4)); fourCC = ((const char *)&pmt->subtype.Data1)[3] | (((const char *)&pmt->subtype.Data1)[2]<<8) | (((const char *)&pmt->subtype.Data1)[1]<<16) | (((const char *)&pmt->subtype.Data1)[0]<<24); err = FindCodec(fourCC,NULL,NULL,&dc); if (err != noErr || dc == 0x0) { TRACE("Codec not found\n"); goto failed; } outputWidth = bmi->biWidth; outputHeight = bmi->biHeight; (**This->hImageDescription).cType = fourCC; (**This->hImageDescription).width = outputWidth; (**This->hImageDescription).height = outputHeight; (**This->hImageDescription).depth = bmi->biBitCount; (**This->hImageDescription).hRes = 72<<16; (**This->hImageDescription).vRes = 72<<16; if (This->outputBufferAttributes) CFRelease(This->outputBufferAttributes); This->outputBufferAttributes = CFDictionaryCreateMutable(NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); if (!This->outputBufferAttributes) { ERR("Failed to create outputBufferAttributes\n"); goto failed; } n = CFNumberCreate(NULL, kCFNumberIntType, &outputWidth); CFDictionaryAddValue(This->outputBufferAttributes, kCVPixelBufferWidthKey, n); CFRelease(n); n = CFNumberCreate(NULL, kCFNumberIntType, &outputHeight); CFDictionaryAddValue(This->outputBufferAttributes, kCVPixelBufferHeightKey, n); CFRelease(n); /* yes this looks wrong. but 32ARGB is 24 RGB with an alpha channel */ format = k32ARGBPixelFormat; n = CFNumberCreate(NULL, kCFNumberIntType, &format); CFDictionaryAddValue(This->outputBufferAttributes, kCVPixelBufferPixelFormatTypeKey, n); CFRelease(n); CFDictionaryAddValue(This->outputBufferAttributes, kCVPixelBufferCGBitmapContextCompatibilityKey, kCFBooleanTrue); CFDictionaryAddValue(This->outputBufferAttributes, kCVPixelBufferCGImageCompatibilityKey, kCFBooleanTrue); outputDepth = 3; This->outputSize = outputWidth * outputHeight * outputDepth; bmi->biCompression = BI_RGB; bmi->biBitCount = 24; outpmt->subtype = MEDIASUBTYPE_RGB24; return S_OK; } failed: if (This->hImageDescription) { DisposeHandle((Handle)This->hImageDescription); This->hImageDescription = NULL; } if (This->outputBufferAttributes) { CFRelease(This->outputBufferAttributes); This->outputBufferAttributes = NULL; } TRACE("Connection refused\n"); return hr; }
static HRESULT WINAPI AVIDec_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE * pmt) { AVIDecImpl* This = impl_from_TransformFilter(tf); HRESULT hr = VFW_E_TYPE_NOT_ACCEPTED; TRACE("(%p)->(%p)\n", This, pmt); if (dir != PINDIR_INPUT) return S_OK; /* Check root (GUID w/o FOURCC) */ if ((IsEqualIID(&pmt->majortype, &MEDIATYPE_Video)) && (!memcmp(((const char *)&pmt->subtype)+4, ((const char *)&MEDIATYPE_Video)+4, sizeof(GUID)-4))) { VIDEOINFOHEADER *format1 = (VIDEOINFOHEADER *)pmt->pbFormat; VIDEOINFOHEADER2 *format2 = (VIDEOINFOHEADER2 *)pmt->pbFormat; BITMAPINFOHEADER *bmi; if (IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo)) bmi = &format1->bmiHeader; else if (IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo2)) bmi = &format2->bmiHeader; else goto failed; TRACE("Fourcc: %s\n", debugstr_an((const char *)&pmt->subtype.Data1, 4)); This->hvid = ICLocate(pmt->majortype.Data1, pmt->subtype.Data1, bmi, NULL, ICMODE_DECOMPRESS); if (This->hvid) { AM_MEDIA_TYPE* outpmt = &This->tf.pmt; const CLSID* outsubtype; DWORD bih_size; DWORD output_depth = bmi->biBitCount; DWORD result; FreeMediaType(outpmt); switch(bmi->biBitCount) { case 32: outsubtype = &MEDIASUBTYPE_RGB32; break; case 24: outsubtype = &MEDIASUBTYPE_RGB24; break; case 16: outsubtype = &MEDIASUBTYPE_RGB565; break; case 8: outsubtype = &MEDIASUBTYPE_RGB8; break; default: WARN("Non standard input depth %d, forced output depth to 32\n", bmi->biBitCount); outsubtype = &MEDIASUBTYPE_RGB32; output_depth = 32; break; } /* Copy bitmap header from media type to 1 for input and 1 for output */ bih_size = bmi->biSize + bmi->biClrUsed * 4; This->pBihIn = CoTaskMemAlloc(bih_size); if (!This->pBihIn) { hr = E_OUTOFMEMORY; goto failed; } This->pBihOut = CoTaskMemAlloc(bih_size); if (!This->pBihOut) { hr = E_OUTOFMEMORY; goto failed; } memcpy(This->pBihIn, bmi, bih_size); memcpy(This->pBihOut, bmi, bih_size); /* Update output format as non compressed bitmap */ This->pBihOut->biCompression = 0; This->pBihOut->biBitCount = output_depth; This->pBihOut->biSizeImage = This->pBihOut->biWidth * This->pBihOut->biHeight * This->pBihOut->biBitCount / 8; TRACE("Size: %u\n", This->pBihIn->biSize); result = ICDecompressQuery(This->hvid, This->pBihIn, This->pBihOut); if (result != ICERR_OK) { ERR("Unable to found a suitable output format (%d)\n", result); goto failed; } /* Update output media type */ CopyMediaType(outpmt, pmt); outpmt->subtype = *outsubtype; if (IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo)) memcpy(&(((VIDEOINFOHEADER *)outpmt->pbFormat)->bmiHeader), This->pBihOut, This->pBihOut->biSize); else if (IsEqualIID(&pmt->formattype, &FORMAT_VideoInfo2)) memcpy(&(((VIDEOINFOHEADER2 *)outpmt->pbFormat)->bmiHeader), This->pBihOut, This->pBihOut->biSize); else assert(0); TRACE("Connection accepted\n"); return S_OK; } TRACE("Unable to find a suitable VFW decompressor\n"); } failed: TRACE("Connection refused\n"); return hr; }