/* Helper that buffers data and/or calls installed sample callbacks */ static void SampleGrabber_callback(SG_Impl *This, IMediaSample *sample) { double time = 0.0; REFERENCE_TIME tStart, tEnd; if (This->bufferLen >= 0) { BYTE *data = 0; LONG size = IMediaSample_GetActualDataLength(sample); if (size >= 0 && SUCCEEDED(IMediaSample_GetPointer(sample, &data))) { if (!data) size = 0; EnterCriticalSection(&This->filter.csFilter); if (This->bufferLen != size) { if (This->bufferData) CoTaskMemFree(This->bufferData); This->bufferData = size ? CoTaskMemAlloc(size) : NULL; This->bufferLen = size; } if (size) CopyMemory(This->bufferData, data, size); LeaveCriticalSection(&This->filter.csFilter); } } if (!This->grabberIface) return; if (SUCCEEDED(IMediaSample_GetTime(sample, &tStart, &tEnd))) time = 1e-7 * tStart; switch (This->grabberMethod) { case 0: { ULONG ref = IMediaSample_AddRef(sample); ISampleGrabberCB_SampleCB(This->grabberIface, time, sample); ref = IMediaSample_Release(sample) + 1 - ref; if (ref) { ERR("(%p) Callback referenced sample %p by %u\n", This, sample, ref); /* ugly as hell but some apps are sooo buggy */ while (ref--) IMediaSample_Release(sample); } } break; case 1: { BYTE *data = 0; LONG size = IMediaSample_GetActualDataLength(sample); if (size && SUCCEEDED(IMediaSample_GetPointer(sample, &data)) && data) ISampleGrabberCB_BufferCB(This->grabberIface, time, data, size); } break; case -1: break; default: FIXME("unsupported method %d\n", This->grabberMethod); /* do not bother us again */ This->grabberMethod = -1; } }
static GstFlowReturn request_buffer(GstPad *pad, guint64 ofs, guint size, GstCaps *caps, GstBuffer **buf) { GstTfImpl *This = gst_pad_get_element_private(pad); IMediaSample *sample; BYTE *ptr; HRESULT hr; TRACE("Requesting buffer\n"); hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &sample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Could not get output buffer: %08x\n", hr); return GST_FLOW_WRONG_STATE; } IMediaSample_SetActualDataLength(sample, size); IMediaSample_GetPointer(sample, &ptr); *buf = gst_app_buffer_new(ptr, size, release_sample, sample); if (!*buf) { IMediaSample_Release(sample); ERR("Out of memory\n"); return GST_FLOW_ERROR; } if (!caps) caps = gst_pad_get_caps_reffed(This->my_sink); gst_buffer_set_caps(*buf, caps); return GST_FLOW_OK; }
GstFlowReturn got_data(GstPad *pad, GstObject *parent, GstBuffer *buf) { GstTfImpl *This = gst_pad_get_element_private(pad); IMediaSample *sample = (IMediaSample *) gst_mini_object_get_qdata(GST_MINI_OBJECT(buf), g_quark_from_static_string(media_quark_string)); REFERENCE_TIME tStart, tStop; HRESULT hr; TRACE("%p, %p\n", pad, buf); if(!sample){ GstMapInfo info; BYTE *ptr; gst_buffer_map(buf, &info, GST_MAP_READ); hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &sample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Could not get output buffer: %08x\n", hr); return GST_FLOW_FLUSHING; } IMediaSample_SetActualDataLength(sample, info.size); IMediaSample_GetPointer(sample, &ptr); memcpy(ptr, info.data, info.size); gst_buffer_unmap(buf, &info); } if (GST_BUFFER_PTS_IS_VALID(buf) && GST_BUFFER_DURATION_IS_VALID(buf)) { tStart = buf->pts / 100; tStop = tStart + buf->duration / 100; IMediaSample_SetTime(sample, &tStart, &tStop); } else IMediaSample_SetTime(sample, NULL, NULL); if (GST_BUFFER_OFFSET_IS_VALID(buf) && GST_BUFFER_OFFSET_END_IS_VALID(buf)) { tStart = buf->offset / 100; tStop = buf->offset_end / 100; IMediaSample_SetMediaTime(sample, &tStart, &tStop); } else IMediaSample_SetMediaTime(sample, NULL, NULL); IMediaSample_SetDiscontinuity(sample, GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT)); IMediaSample_SetPreroll(sample, GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_LIVE)); IMediaSample_SetSyncPoint(sample, !GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)); IMediaSample_SetActualDataLength(sample, gst_buffer_get_size(buf)); hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], sample); IMediaSample_Release(sample); gst_buffer_unref(buf); if (FAILED(hr)) return GST_FLOW_FLUSHING; return GST_FLOW_OK; }
static HRESULT WINAPI VideoRenderer_GetStaticImage(BaseControlVideo* iface, LONG *pBufferSize, LONG *pDIBImage) { VideoRendererImpl *This = impl_from_BaseControlVideo(iface); BITMAPINFOHEADER *bmiHeader; LONG needed_size; AM_MEDIA_TYPE *amt = &This->renderer.pInputPin->pin.mtCurrent; char *ptr; FIXME("(%p/%p)->(%p, %p): partial stub\n", This, iface, pBufferSize, pDIBImage); EnterCriticalSection(&This->renderer.filter.csFilter); if (!This->renderer.pMediaSample) { LeaveCriticalSection(&This->renderer.filter.csFilter); return (This->renderer.filter.state == State_Paused ? E_UNEXPECTED : VFW_E_NOT_PAUSED); } if (IsEqualIID(&amt->formattype, &FORMAT_VideoInfo)) { bmiHeader = &((VIDEOINFOHEADER *)amt->pbFormat)->bmiHeader; } else if (IsEqualIID(&amt->formattype, &FORMAT_VideoInfo2)) { bmiHeader = &((VIDEOINFOHEADER2 *)amt->pbFormat)->bmiHeader; } else { FIXME("Unknown type %s\n", debugstr_guid(&amt->subtype)); LeaveCriticalSection(&This->renderer.filter.csFilter); return VFW_E_RUNTIME_ERROR; } needed_size = bmiHeader->biSize; needed_size += IMediaSample_GetActualDataLength(This->renderer.pMediaSample); if (!pDIBImage) { *pBufferSize = needed_size; LeaveCriticalSection(&This->renderer.filter.csFilter); return S_OK; } if (needed_size < *pBufferSize) { ERR("Buffer too small %u/%u\n", needed_size, *pBufferSize); LeaveCriticalSection(&This->renderer.filter.csFilter); return E_FAIL; } *pBufferSize = needed_size; memcpy(pDIBImage, bmiHeader, bmiHeader->biSize); IMediaSample_GetPointer(This->renderer.pMediaSample, (BYTE **)&ptr); memcpy((char *)pDIBImage + bmiHeader->biSize, ptr, IMediaSample_GetActualDataLength(This->renderer.pMediaSample)); LeaveCriticalSection(&This->renderer.filter.csFilter); return S_OK; }
static HRESULT WINAPI DSoundRender_DoRenderSample(BaseRenderer *iface, IMediaSample * pSample) { DSoundRenderImpl *This = impl_from_BaseRenderer(iface); LPBYTE pbSrcStream = NULL; LONG cbSrcStream = 0; REFERENCE_TIME tStart, tStop; HRESULT hr; TRACE("%p %p\n", iface, pSample); /* Slightly incorrect, Pause completes when a frame is received so we should signal * pause completion here, but for sound playing a single frame doesn't make sense */ hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) { ERR("Cannot get pointer to sample data (%x)\n", hr); return hr; } hr = IMediaSample_GetTime(pSample, &tStart, &tStop); if (FAILED(hr)) { ERR("Cannot get sample time (%x)\n", hr); tStart = tStop = -1; } IMediaSample_IsDiscontinuity(pSample); if (IMediaSample_IsPreroll(pSample) == S_OK) { TRACE("Preroll!\n"); return S_OK; } cbSrcStream = IMediaSample_GetActualDataLength(pSample); TRACE("Sample data ptr = %p, size = %d\n", pbSrcStream, cbSrcStream); hr = DSoundRender_SendSampleData(This, tStart, tStop, pbSrcStream, cbSrcStream); if (This->renderer.filter.state == State_Running && This->renderer.filter.pClock && tStart >= 0) { REFERENCE_TIME jitter, now = 0; Quality q; IReferenceClock_GetTime(This->renderer.filter.pClock, &now); jitter = now - This->renderer.filter.rtStreamStart - tStart; if (jitter <= -DSoundRenderer_Max_Fill) jitter += DSoundRenderer_Max_Fill; else if (jitter < 0) jitter = 0; q.Type = (jitter > 0 ? Famine : Flood); q.Proportion = 1000; q.Late = jitter; q.TimeStamp = tStart; IQualityControl_Notify((IQualityControl *)This->renderer.qcimpl, (IBaseFilter*)This, q); } return hr; }
static HRESULT WINAPI Gstreamer_transform_ProcessData(TransformFilter *iface, IMediaSample *sample) { GstTfImpl *This = (GstTfImpl*)iface; REFERENCE_TIME tStart, tStop; BYTE *data; GstBuffer *buf; HRESULT hr; DWORD bufsize; int ret; TRACE("%p, %p\n", This, sample); mark_wine_thread(); EnterCriticalSection(&This->tf.csReceive); IMediaSample_GetPointer(sample, &data); IMediaSample_AddRef(sample); bufsize = IMediaSample_GetActualDataLength(sample); buf = gst_buffer_new_wrapped_full(0, data, bufsize, 0, bufsize, sample, release_sample_wrapper); if (!buf) { IMediaSample_Release(sample); LeaveCriticalSection(&This->tf.csReceive); return S_OK; } IMediaSample_AddRef(sample); gst_mini_object_set_qdata(GST_MINI_OBJECT(buf), g_quark_from_static_string(media_quark_string), sample, release_sample_wrapper); buf->duration = buf->pts = -1; hr = IMediaSample_GetTime(sample, &tStart, &tStop); if (SUCCEEDED(hr)) { buf->pts = tStart * 100; if (hr == S_OK) buf->duration = (tStop - tStart)*100; } if (IMediaSample_GetMediaTime(sample, &tStart, &tStop) == S_OK) { buf->offset = tStart * 100; buf->offset_end = tStop * 100; } if (IMediaSample_IsDiscontinuity(sample) == S_OK) GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DISCONT); if (IMediaSample_IsPreroll(sample) == S_OK) GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_LIVE); if (IMediaSample_IsSyncPoint(sample) != S_OK) GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT); LeaveCriticalSection(&This->tf.csReceive); ret = gst_pad_push(This->my_src, buf); if (ret) WARN("Sending returned: %i\n", ret); if (ret == GST_FLOW_FLUSHING) return VFW_E_WRONG_STATE; return S_OK; }
static HRESULT WINAPI VideoRenderer_DoRenderSample(BaseRenderer* iface, IMediaSample * pSample) { VideoRendererImpl *This = impl_from_BaseRenderer(iface); LPBYTE pbSrcStream = NULL; LONG cbSrcStream = 0; HRESULT hr; TRACE("(%p)->(%p)\n", This, pSample); hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) { ERR("Cannot get pointer to sample data (%x)\n", hr); return hr; } cbSrcStream = IMediaSample_GetActualDataLength(pSample); TRACE("val %p %d\n", pbSrcStream, cbSrcStream); #if 0 /* For debugging purpose */ { int i; for(i = 0; i < cbSrcStream; i++) { if ((i!=0) && !(i%16)) TRACE("\n"); TRACE("%02x ", pbSrcStream[i]); } TRACE("\n"); } #endif SetEvent(This->hEvent); if (This->renderer.filter.state == State_Paused) { VideoRenderer_SendSampleData(This, pbSrcStream, cbSrcStream); SetEvent(This->hEvent); if (This->renderer.filter.state == State_Paused) { /* Flushing */ return S_OK; } if (This->renderer.filter.state == State_Stopped) { return VFW_E_WRONG_STATE; } } else { VideoRenderer_SendSampleData(This, pbSrcStream, cbSrcStream); } return S_OK; }
static HRESULT WINAPI QTVDecoder_Receive(TransformFilter *tf, IMediaSample *pSample) { QTVDecoderImpl* This = impl_from_TransformFilter(tf); HRESULT hr; DWORD cbSrcStream; LPBYTE pbSrcStream; ICMFrameTimeRecord frameTime = {{0}}; TimeValue time = 1; TimeScale timeScale = 1; OSStatus err = noErr; LONGLONG tStart, tStop; hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) { ERR("Cannot get pointer to sample data (%x)\n", hr); goto error; } cbSrcStream = IMediaSample_GetActualDataLength(pSample); if (IMediaSample_GetTime(pSample, &tStart, &tStop) != S_OK) tStart = tStop = 0; time = tStart; frameTime.recordSize = sizeof(ICMFrameTimeRecord); *(TimeValue64 *)&frameTime.value = tStart; frameTime.scale = 1; frameTime.rate = fixed1; frameTime.duration = tStop - tStart; frameTime.frameNumber = 0; frameTime.flags = icmFrameTimeIsNonScheduledDisplayTime; err = ICMDecompressionSessionDecodeFrame(This->decompressionSession, (UInt8 *)pbSrcStream, cbSrcStream, NULL, &frameTime, pSample); if (err != noErr) { ERR("Error with ICMDecompressionSessionDecodeFrame\n"); hr = E_FAIL; goto error; } ICMDecompressionSessionSetNonScheduledDisplayTime(This->decompressionSession, time, timeScale, 0); ICMDecompressionSessionFlush(This->decompressionSession); hr = This->decodeHR; error: return hr; }
static HRESULT WINAPI Gstreamer_transform_ProcessData(TransformFilter *iface, IMediaSample *sample) { GstTfImpl *This = (GstTfImpl*)iface; REFERENCE_TIME tStart, tStop; BYTE *data; GstBuffer *buf; HRESULT hr; int ret; TRACE("Reading %p\n", sample); EnterCriticalSection(&This->tf.filter.csFilter); IMediaSample_GetPointer(sample, &data); buf = gst_app_buffer_new(data, IMediaSample_GetActualDataLength(sample), release_sample, sample); if (!buf) { LeaveCriticalSection(&This->tf.filter.csFilter); return S_OK; } gst_buffer_set_caps(buf, gst_pad_get_caps_reffed(This->my_src)); IMediaSample_AddRef(sample); buf->duration = buf->timestamp = -1; hr = IMediaSample_GetTime(sample, &tStart, &tStop); if (SUCCEEDED(hr)) { buf->timestamp = tStart * 100; if (hr == S_OK) buf->duration = (tStop - tStart)*100; } if (IMediaSample_GetMediaTime(sample, &tStart, &tStop) == S_OK) { buf->offset = tStart * 100; buf->offset_end = tStop * 100; } if (IMediaSample_IsDiscontinuity(sample) == S_OK) GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DISCONT); if (IMediaSample_IsPreroll(sample) == S_OK) GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_PREROLL); if (IMediaSample_IsSyncPoint(sample) != S_OK) GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT); LeaveCriticalSection(&This->tf.filter.csFilter); ret = gst_pad_push(This->my_src, buf); if (ret) WARN("Sending returned: %i\n", ret); if (ret == GST_FLOW_ERROR) return E_FAIL; if (ret == GST_FLOW_WRONG_STATE) return VFW_E_WRONG_STATE; if (ret == GST_FLOW_RESEND) return S_FALSE; return S_OK; }
static HRESULT WINAPI DSoundRender_Receive(BaseInputPin *pin, IMediaSample * pSample) { DSoundRenderImpl *This = (DSoundRenderImpl*)pin->pin.pinInfo.pFilter; LPBYTE pbSrcStream = NULL; LONG cbSrcStream = 0; REFERENCE_TIME tStart, tStop; HRESULT hr; AM_MEDIA_TYPE *amt; TRACE("%p %p\n", pin, pSample); /* Slightly incorrect, Pause completes when a frame is received so we should signal * pause completion here, but for sound playing a single frame doesn't make sense */ EnterCriticalSection(&This->filter.csFilter); if (This->pInputPin->end_of_stream || This->pInputPin->flushing) { LeaveCriticalSection(&This->filter.csFilter); return S_FALSE; } if (This->filter.state == State_Stopped) { LeaveCriticalSection(&This->filter.csFilter); return VFW_E_WRONG_STATE; } if (IMediaSample_GetMediaType(pSample, &amt) == S_OK) { AM_MEDIA_TYPE *orig = &This->pInputPin->pin.mtCurrent; WAVEFORMATEX *origfmt = (WAVEFORMATEX *)orig->pbFormat; WAVEFORMATEX *newfmt = (WAVEFORMATEX *)amt->pbFormat; if (origfmt->wFormatTag == newfmt->wFormatTag && origfmt->nChannels == newfmt->nChannels && origfmt->nBlockAlign == newfmt->nBlockAlign && origfmt->wBitsPerSample == newfmt->wBitsPerSample && origfmt->cbSize == newfmt->cbSize) { if (origfmt->nSamplesPerSec != newfmt->nSamplesPerSec) { hr = IDirectSoundBuffer_SetFrequency(This->dsbuffer, newfmt->nSamplesPerSec); if (FAILED(hr)) { LeaveCriticalSection(&This->filter.csFilter); return VFW_E_TYPE_NOT_ACCEPTED; } FreeMediaType(orig); CopyMediaType(orig, amt); IMediaSample_SetMediaType(pSample, NULL); } } else { LeaveCriticalSection(&This->filter.csFilter); return VFW_E_TYPE_NOT_ACCEPTED; } } hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) { ERR("Cannot get pointer to sample data (%x)\n", hr); LeaveCriticalSection(&This->filter.csFilter); return hr; } hr = IMediaSample_GetTime(pSample, &tStart, &tStop); if (FAILED(hr)) ERR("Cannot get sample time (%x)\n", hr); else MediaSeekingPassThru_RegisterMediaTime(This->seekthru_unk, tStart); if (This->rtLastStop != tStart && (IMediaSample_IsDiscontinuity(pSample) == S_FALSE)) WARN("Unexpected discontinuity: Last: %u.%03u, tStart: %u.%03u\n", (DWORD)(This->rtLastStop / 10000000), (DWORD)((This->rtLastStop / 10000)%1000), (DWORD)(tStart / 10000000), (DWORD)((tStart / 10000)%1000)); This->rtLastStop = tStop; if (IMediaSample_IsPreroll(pSample) == S_OK) { TRACE("Preroll!\n"); LeaveCriticalSection(&This->filter.csFilter); return S_OK; } if (This->filter.state == State_Paused) { SetEvent(This->state_change); LeaveCriticalSection(&This->filter.csFilter); WaitForSingleObject(This->blocked, INFINITE); EnterCriticalSection(&This->filter.csFilter); if (This->filter.state == State_Stopped) { LeaveCriticalSection(&This->filter.csFilter); return VFW_E_WRONG_STATE; } if (This->filter.state == State_Paused) { /* Assuming we return because of flushing */ TRACE("Flushing\n"); LeaveCriticalSection(&This->filter.csFilter); return S_OK; } SetEvent(This->state_change); } cbSrcStream = IMediaSample_GetActualDataLength(pSample); TRACE("Sample data ptr = %p, size = %d\n", pbSrcStream, cbSrcStream); #if 0 /* For debugging purpose */ { int i; for(i = 0; i < cbSrcStream; i++) { if ((i!=0) && !(i%16)) TRACE("\n"); TRACE("%02x ", pbSrcStream[i]); } TRACE("\n"); } #endif hr = DSoundRender_SendSampleData(This, pbSrcStream, cbSrcStream); SetEvent(This->state_change); LeaveCriticalSection(&This->filter.csFilter); return hr; }
static HRESULT ACMWrapper_ProcessSampleData(TransformFilterImpl* pTransformFilter, LPBYTE data, DWORD size) { ACMWrapperImpl* This = (ACMWrapperImpl*)pTransformFilter; AM_MEDIA_TYPE amt; HRESULT hr; IMediaSample* pSample = NULL; DWORD cbDstStream; LPBYTE pbDstStream; ACMSTREAMHEADER ash; DWORD offset = 0; BOOL stop = FALSE; BOOL unprepare_header = FALSE; MMRESULT res; TRACE("(%p)->(%p,%ld)\n", This, data, size); hr = IPin_ConnectionMediaType(This->tf.ppPins[0], &amt); if (FAILED(hr)) { ERR("Unable to retrieve media type\n"); goto error; } while(!stop) { DWORD rem_buf = This->max_size - This->current_size; DWORD rem_smp = size - offset; DWORD copy_size = min(rem_buf, rem_smp); memcpy(This->buffer + This->current_size, data + offset, copy_size); This->current_size += copy_size; offset += copy_size; if (offset == size) stop = TRUE; if (This->current_size < This->max_size) break; hr = OutputPin_GetDeliveryBuffer((OutputPin*)This->tf.ppPins[1], &pSample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Unable to get delivery buffer (%lx)\n", hr); goto error; } hr = IMediaSample_SetActualDataLength(pSample, 0); assert(hr == S_OK); hr = IMediaSample_GetPointer(pSample, &pbDstStream); if (FAILED(hr)) { ERR("Unable to get pointer to buffer (%lx)\n", hr); goto error; } cbDstStream = IMediaSample_GetSize(pSample); ash.cbStruct = sizeof(ash); ash.fdwStatus = 0; ash.dwUser = 0; ash.pbSrc = This->buffer; ash.cbSrcLength = This->current_size; ash.pbDst = pbDstStream; ash.cbDstLength = cbDstStream; if ((res = acmStreamPrepareHeader(This->has, &ash, 0))) { ERR("Cannot prepare header %d\n", res); goto error; } unprepare_header = TRUE; if ((res = acmStreamConvert(This->has, &ash, This->reinit_codec ? ACM_STREAMCONVERTF_START : 0))) { ERR("Cannot convert data header %d\n", res); goto error; } This->reinit_codec = FALSE; TRACE("used in %lu, used out %lu\n", ash.cbSrcLengthUsed, ash.cbDstLengthUsed); hr = IMediaSample_SetActualDataLength(pSample, ash.cbDstLengthUsed); assert(hr == S_OK); if (ash.cbSrcLengthUsed < ash.cbSrcLength) { This->current_size = ash.cbSrcLength - ash.cbSrcLengthUsed; memmove(This->buffer, This->buffer + ash.cbSrcLengthUsed, This->current_size); } else This->current_size = 0; hr = OutputPin_SendSample((OutputPin*)This->tf.ppPins[1], pSample); if (hr != S_OK && hr != VFW_E_NOT_CONNECTED) { ERR("Error sending sample (%lx)\n", hr); goto error; } error: if (unprepare_header && (res = acmStreamUnprepareHeader(This->has, &ash, 0))) ERR("Cannot unprepare header %d\n", res); if (pSample) IMediaSample_Release(pSample); } return hr; }
static void trackingCallback( void *decompressionTrackingRefCon, OSStatus result, ICMDecompressionTrackingFlags decompressionTrackingFlags, CVPixelBufferRef pixelBuffer, TimeValue64 displayTime, TimeValue64 displayDuration, ICMValidTimeFlags validTimeFlags, void *reserved, void *sourceFrameRefCon ) { QTVDecoderImpl *This = (QTVDecoderImpl*)decompressionTrackingRefCon; IMediaSample *pSample = (IMediaSample*)sourceFrameRefCon; HRESULT hr = S_OK; IMediaSample* pOutSample = NULL; LPBYTE pbDstStream; DWORD cbDstStream; if (result != noErr) { ERR("Error from Codec, no frame decompressed\n"); return; } if (!pixelBuffer) { ERR("No pixel buffer, no frame decompressed\n"); return; } EnterCriticalSection(&This->tf.csReceive); hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &pOutSample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Unable to get delivery buffer (%x)\n", hr); goto error; } hr = IMediaSample_SetActualDataLength(pOutSample, 0); assert(hr == S_OK); hr = IMediaSample_GetPointer(pOutSample, &pbDstStream); if (FAILED(hr)) { ERR("Unable to get pointer to buffer (%x)\n", hr); goto error; } cbDstStream = IMediaSample_GetSize(pOutSample); if (cbDstStream < This->outputSize) { ERR("Sample size is too small %d < %d\n", cbDstStream, This->outputSize); hr = E_FAIL; goto error; } hr = AccessPixelBufferPixels(pixelBuffer, pbDstStream); if (FAILED(hr)) goto error; IMediaSample_SetActualDataLength(pOutSample, This->outputSize); IMediaSample_SetPreroll(pOutSample, (IMediaSample_IsPreroll(pSample) == S_OK)); IMediaSample_SetDiscontinuity(pOutSample, (IMediaSample_IsDiscontinuity(pSample) == S_OK)); IMediaSample_SetSyncPoint(pOutSample, (IMediaSample_IsSyncPoint(pSample) == S_OK)); if (!validTimeFlags) IMediaSample_SetTime(pOutSample, NULL, NULL); else { LONGLONG tStart, tStop; if (validTimeFlags & kICMValidTime_DisplayTimeStampIsValid) tStart = displayTime; else tStart = 0; if (validTimeFlags & kICMValidTime_DisplayDurationIsValid) tStop = tStart + displayDuration; else tStop = tStart; IMediaSample_SetTime(pOutSample, &tStart, &tStop); } LeaveCriticalSection(&This->tf.csReceive); hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], pOutSample); EnterCriticalSection(&This->tf.csReceive); if (hr != S_OK && hr != VFW_E_NOT_CONNECTED) ERR("Error sending sample (%x)\n", hr); error: LeaveCriticalSection(&This->tf.csReceive); if (pOutSample) IMediaSample_Release(pOutSample); This->decodeHR = hr; }
static HRESULT AVIDec_ProcessSampleData(InputPin *pin, IMediaSample *pSample) { AVIDecImpl* This = (AVIDecImpl *)pin->pin.pinInfo.pFilter; AM_MEDIA_TYPE amt; HRESULT hr; DWORD res; IMediaSample* pOutSample = NULL; DWORD cbDstStream; LPBYTE pbDstStream; DWORD cbSrcStream; LPBYTE pbSrcStream; LONGLONG tStart, tStop; EnterCriticalSection(&This->tf.csFilter); if (This->tf.state == State_Stopped) { LeaveCriticalSection(&This->tf.csFilter); return VFW_E_WRONG_STATE; } if (pin->end_of_stream || pin->flushing) { LeaveCriticalSection(&This->tf.csFilter); return S_FALSE; } hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) { ERR("Cannot get pointer to sample data (%x)\n", hr); goto error; } cbSrcStream = IMediaSample_GetActualDataLength(pSample); TRACE("Sample data ptr = %p, size = %d\n", pbSrcStream, cbSrcStream); hr = IPin_ConnectionMediaType(This->tf.ppPins[0], &amt); if (FAILED(hr)) { ERR("Unable to retrieve media type\n"); goto error; } /* Update input size to match sample size */ This->pBihIn->biSizeImage = cbSrcStream; hr = OutputPin_GetDeliveryBuffer((OutputPin*)This->tf.ppPins[1], &pOutSample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Unable to get delivery buffer (%x)\n", hr); goto error; } hr = IMediaSample_SetActualDataLength(pOutSample, 0); assert(hr == S_OK); hr = IMediaSample_GetPointer(pOutSample, &pbDstStream); if (FAILED(hr)) { ERR("Unable to get pointer to buffer (%x)\n", hr); goto error; } cbDstStream = IMediaSample_GetSize(pOutSample); if (cbDstStream < This->pBihOut->biSizeImage) { ERR("Sample size is too small %d < %d\n", cbDstStream, This->pBihOut->biSizeImage); hr = E_FAIL; goto error; } res = ICDecompress(This->hvid, 0, This->pBihIn, pbSrcStream, This->pBihOut, pbDstStream); if (res != ICERR_OK) ERR("Error occurred during the decompression (%x)\n", res); IMediaSample_SetActualDataLength(pOutSample, This->pBihOut->biSizeImage); IMediaSample_SetPreroll(pOutSample, (IMediaSample_IsPreroll(pSample) == S_OK)); IMediaSample_SetDiscontinuity(pOutSample, (IMediaSample_IsDiscontinuity(pSample) == S_OK)); IMediaSample_SetSyncPoint(pOutSample, (IMediaSample_IsSyncPoint(pSample) == S_OK)); if (IMediaSample_GetTime(pSample, &tStart, &tStop) == S_OK) IMediaSample_SetTime(pOutSample, &tStart, &tStop); else IMediaSample_SetTime(pOutSample, NULL, NULL); LeaveCriticalSection(&This->tf.csFilter); hr = OutputPin_SendSample((OutputPin*)This->tf.ppPins[1], pOutSample); if (hr != S_OK && hr != VFW_E_NOT_CONNECTED) ERR("Error sending sample (%x)\n", hr); IMediaSample_Release(pOutSample); return hr; error: if (pOutSample) IMediaSample_Release(pOutSample); LeaveCriticalSection(&This->tf.csFilter); return hr; }
static HRESULT WINAPI AVICompressorIn_Receive(BaseInputPin *base, IMediaSample *pSample) { AVICompressor *This = impl_from_BasePin(&base->pin); VIDEOINFOHEADER *src_videoinfo; REFERENCE_TIME start, stop; IMediaSample *out_sample; AM_MEDIA_TYPE *mt; IMediaSample2 *sample2; DWORD comp_flags = 0; BOOL is_preroll; BOOL sync_point; BYTE *ptr, *buf; DWORD res; HRESULT hres; TRACE("(%p)->(%p)\n", base, pSample); if(!This->hic) { FIXME("Driver not loaded\n"); return E_UNEXPECTED; } hres = IMediaSample_QueryInterface(pSample, &IID_IMediaSample2, (void**)&sample2); if(SUCCEEDED(hres)) { FIXME("Use IMediaSample2\n"); IMediaSample2_Release(sample2); } is_preroll = IMediaSample_IsPreroll(pSample) == S_OK; sync_point = IMediaSample_IsSyncPoint(pSample) == S_OK; hres = IMediaSample_GetTime(pSample, &start, &stop); if(FAILED(hres)) { WARN("GetTime failed: %08x\n", hres); return hres; } hres = IMediaSample_GetMediaType(pSample, &mt); if(FAILED(hres)) return hres; hres = IMediaSample_GetPointer(pSample, &ptr); if(FAILED(hres)) { WARN("GetPointer failed: %08x\n", hres); return hres; } hres = BaseOutputPinImpl_GetDeliveryBuffer(This->out, &out_sample, &start, &stop, 0); if(FAILED(hres)) return hres; hres = IMediaSample_GetPointer(out_sample, &buf); if(FAILED(hres)) return hres; if((This->driver_flags & VIDCF_TEMPORAL) && !(This->driver_flags & VIDCF_FASTTEMPORALC)) FIXME("Unsupported temporal compression\n"); src_videoinfo = (VIDEOINFOHEADER*)This->in->pin.mtCurrent.pbFormat; This->videoinfo->bmiHeader.biSizeImage = This->max_frame_size; res = ICCompress(This->hic, sync_point ? ICCOMPRESS_KEYFRAME : 0, &This->videoinfo->bmiHeader, buf, &src_videoinfo->bmiHeader, ptr, 0, &comp_flags, This->frame_cnt, 0, 0, NULL, NULL); if(res != ICERR_OK) { WARN("ICCompress failed: %d\n", res); IMediaSample_Release(out_sample); return E_FAIL; } IMediaSample_SetActualDataLength(out_sample, This->videoinfo->bmiHeader.biSizeImage); IMediaSample_SetPreroll(out_sample, is_preroll); IMediaSample_SetSyncPoint(out_sample, (comp_flags&AVIIF_KEYFRAME) != 0); IMediaSample_SetDiscontinuity(out_sample, (IMediaSample_IsDiscontinuity(pSample) == S_OK)); if (IMediaSample_GetMediaTime(pSample, &start, &stop) == S_OK) IMediaSample_SetMediaTime(out_sample, &start, &stop); else IMediaSample_SetMediaTime(out_sample, NULL, NULL); hres = BaseOutputPinImpl_Deliver(This->out, out_sample); if(FAILED(hres)) WARN("Deliver failed: %08x\n", hres); IMediaSample_Release(out_sample); This->frame_cnt++; return hres; }
static DWORD WINAPI ReadThread(LPVOID lParam) { Capture * capBox = lParam; HRESULT hr; IMediaSample *pSample = NULL; unsigned long framecount = 0; unsigned char *pTarget, *pInput, *pOutput; hr = V4l_Prepare(capBox); if (FAILED(hr)) goto fail; pOutput = CoTaskMemAlloc(capBox->width * capBox->height * capBox->bitDepth / 8); capBox->curframe = 0; do { V4l_FreeFrame(capBox); } while (capBox->curframe != 0); while (1) { EnterCriticalSection(&capBox->CritSect); if (capBox->stopped) break; hr = OutputPin_GetDeliveryBuffer((OutputPin *)capBox->pOut, &pSample, NULL, NULL, 0); if (SUCCEEDED(hr)) { int len; if (!capBox->swresize) len = capBox->height * capBox->width * capBox->bitDepth / 8; else len = capBox->outputheight * capBox->outputwidth * capBox->bitDepth / 8; IMediaSample_SetActualDataLength(pSample, len); len = IMediaSample_GetActualDataLength(pSample); TRACE("Data length: %d KB\n", len / 1024); IMediaSample_GetPointer(pSample, &pTarget); /* FIXME: Check return values.. */ V4l_GetFrame(capBox, &pInput); capBox->renderer(capBox, pOutput, pInput); Resize(capBox, pTarget, pOutput); hr = OutputPin_SendSample((OutputPin *)capBox->pOut, pSample); TRACE("%p -> Frame %lu: %x\n", capBox, ++framecount, hr); IMediaSample_Release(pSample); V4l_FreeFrame(capBox); } LeaveCriticalSection(&capBox->CritSect); if (FAILED(hr) && hr != VFW_E_NOT_CONNECTED) { ERR("Received error: %x\n", hr); goto cfail; } } LeaveCriticalSection(&capBox->CritSect); CoTaskMemFree(pOutput); return 0; cfail: CoTaskMemFree(pOutput); V4l_Unprepare(capBox); LeaveCriticalSection(&capBox->CritSect); fail: capBox->thread = 0; capBox->stopped = 1; FIXME("Stop IFilterGraph\n"); return 0; }
static HRESULT WINAPI AVIDec_Receive(TransformFilter *tf, IMediaSample *pSample) { AVIDecImpl* This = (AVIDecImpl *)tf; AM_MEDIA_TYPE amt; HRESULT hr; DWORD res; IMediaSample* pOutSample = NULL; DWORD cbDstStream; LPBYTE pbDstStream; DWORD cbSrcStream; LPBYTE pbSrcStream; LONGLONG tStart, tStop; DWORD flags = 0; EnterCriticalSection(&This->tf.csReceive); hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) { ERR("Cannot get pointer to sample data (%x)\n", hr); goto error; } cbSrcStream = IMediaSample_GetActualDataLength(pSample); TRACE("Sample data ptr = %p, size = %d\n", pbSrcStream, cbSrcStream); hr = IPin_ConnectionMediaType(This->tf.ppPins[0], &amt); if (FAILED(hr)) { ERR("Unable to retrieve media type\n"); goto error; } /* Update input size to match sample size */ This->pBihIn->biSizeImage = cbSrcStream; hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &pOutSample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Unable to get delivery buffer (%x)\n", hr); goto error; } hr = IMediaSample_SetActualDataLength(pOutSample, 0); assert(hr == S_OK); hr = IMediaSample_GetPointer(pOutSample, &pbDstStream); if (FAILED(hr)) { ERR("Unable to get pointer to buffer (%x)\n", hr); goto error; } cbDstStream = IMediaSample_GetSize(pOutSample); if (cbDstStream < This->pBihOut->biSizeImage) { ERR("Sample size is too small %d < %d\n", cbDstStream, This->pBihOut->biSizeImage); hr = E_FAIL; goto error; } if (IMediaSample_IsPreroll(pSample) == S_OK) flags |= ICDECOMPRESS_PREROLL; if (IMediaSample_IsSyncPoint(pSample) != S_OK) flags |= ICDECOMPRESS_NOTKEYFRAME; hr = IMediaSample_GetTime(pSample, &tStart, &tStop); if (hr == S_OK && AVIDec_DropSample(This, tStart)) flags |= ICDECOMPRESS_HURRYUP; res = ICDecompress(This->hvid, flags, This->pBihIn, pbSrcStream, This->pBihOut, pbDstStream); if (res != ICERR_OK) ERR("Error occurred during the decompression (%x)\n", res); /* Drop sample if its intended to be dropped */ if (flags & ICDECOMPRESS_HURRYUP) { hr = S_OK; goto error; } IMediaSample_SetActualDataLength(pOutSample, This->pBihOut->biSizeImage); IMediaSample_SetPreroll(pOutSample, (IMediaSample_IsPreroll(pSample) == S_OK)); IMediaSample_SetDiscontinuity(pOutSample, (IMediaSample_IsDiscontinuity(pSample) == S_OK)); IMediaSample_SetSyncPoint(pOutSample, (IMediaSample_IsSyncPoint(pSample) == S_OK)); if (hr == S_OK) IMediaSample_SetTime(pOutSample, &tStart, &tStop); else if (hr == VFW_S_NO_STOP_TIME) IMediaSample_SetTime(pOutSample, &tStart, NULL); else IMediaSample_SetTime(pOutSample, NULL, NULL); if (IMediaSample_GetMediaTime(pSample, &tStart, &tStop) == S_OK) IMediaSample_SetMediaTime(pOutSample, &tStart, &tStop); else IMediaSample_SetMediaTime(pOutSample, NULL, NULL); LeaveCriticalSection(&This->tf.csReceive); hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], pOutSample); EnterCriticalSection(&This->tf.csReceive); if (hr != S_OK && hr != VFW_E_NOT_CONNECTED) ERR("Error sending sample (%x)\n", hr); error: if (pOutSample) IMediaSample_Release(pOutSample); LeaveCriticalSection(&This->tf.csReceive); return hr; }
static HRESULT WINAPI ACMWrapper_Receive(TransformFilter *tf, IMediaSample *pSample) { ACMWrapperImpl* This = (ACMWrapperImpl*)tf; AM_MEDIA_TYPE amt; IMediaSample* pOutSample = NULL; DWORD cbDstStream, cbSrcStream; LPBYTE pbDstStream; LPBYTE pbSrcStream = NULL; ACMSTREAMHEADER ash; BOOL unprepare_header = FALSE, preroll; MMRESULT res; HRESULT hr; LONGLONG tStart = -1, tStop = -1, tMed; EnterCriticalSection(&This->tf.filter.csFilter); hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) { ERR("Cannot get pointer to sample data (%x)\n", hr); LeaveCriticalSection(&This->tf.filter.csFilter); return hr; } preroll = (IMediaSample_IsPreroll(pSample) == S_OK); IMediaSample_GetTime(pSample, &tStart, &tStop); cbSrcStream = IMediaSample_GetActualDataLength(pSample); /* Prevent discontinuities when codecs 'absorb' data but not give anything back in return */ if (IMediaSample_IsDiscontinuity(pSample) == S_OK) { This->lasttime_real = tStart; This->lasttime_sent = tStart; } else if (This->lasttime_real == tStart) tStart = This->lasttime_sent; else WARN("Discontinuity\n"); tMed = tStart; TRACE("Sample data ptr = %p, size = %d\n", pbSrcStream, cbSrcStream); hr = IPin_ConnectionMediaType(This->tf.ppPins[0], &amt); if (FAILED(hr)) { ERR("Unable to retrieve media type\n"); LeaveCriticalSection(&This->tf.filter.csFilter); return hr; } ash.pbSrc = pbSrcStream; ash.cbSrcLength = cbSrcStream; while(hr == S_OK && ash.cbSrcLength) { hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &pOutSample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Unable to get delivery buffer (%x)\n", hr); LeaveCriticalSection(&This->tf.filter.csFilter); return hr; } IMediaSample_SetPreroll(pOutSample, preroll); hr = IMediaSample_SetActualDataLength(pOutSample, 0); assert(hr == S_OK); hr = IMediaSample_GetPointer(pOutSample, &pbDstStream); if (FAILED(hr)) { ERR("Unable to get pointer to buffer (%x)\n", hr); goto error; } cbDstStream = IMediaSample_GetSize(pOutSample); ash.cbStruct = sizeof(ash); ash.fdwStatus = 0; ash.dwUser = 0; ash.pbDst = pbDstStream; ash.cbDstLength = cbDstStream; if ((res = acmStreamPrepareHeader(This->has, &ash, 0))) { ERR("Cannot prepare header %d\n", res); goto error; } unprepare_header = TRUE; if (IMediaSample_IsDiscontinuity(pSample) == S_OK) { res = acmStreamConvert(This->has, &ash, ACM_STREAMCONVERTF_START); IMediaSample_SetDiscontinuity(pOutSample, TRUE); /* One sample could be converted to multiple packets */ IMediaSample_SetDiscontinuity(pSample, FALSE); } else { res = acmStreamConvert(This->has, &ash, 0); IMediaSample_SetDiscontinuity(pOutSample, FALSE); } if (res) { if(res != MMSYSERR_MOREDATA) ERR("Cannot convert data header %d\n", res); goto error; } TRACE("used in %u/%u, used out %u/%u\n", ash.cbSrcLengthUsed, ash.cbSrcLength, ash.cbDstLengthUsed, ash.cbDstLength); hr = IMediaSample_SetActualDataLength(pOutSample, ash.cbDstLengthUsed); assert(hr == S_OK); /* Bug in acm codecs? It apparantly uses the input, but doesn't necessarily output immediately kl*/ if (!ash.cbSrcLengthUsed) { WARN("Sample was skipped? Outputted: %u\n", ash.cbDstLengthUsed); ash.cbSrcLength = 0; goto error; } TRACE("Sample start time: %u.%03u\n", (DWORD)(tStart/10000000), (DWORD)((tStart/10000)%1000)); if (ash.cbSrcLengthUsed == cbSrcStream) { IMediaSample_SetTime(pOutSample, &tStart, &tStop); tStart = tMed = tStop; } else if (tStop != tStart) { tMed = tStop - tStart; tMed = tStart + tMed * ash.cbSrcLengthUsed / cbSrcStream; IMediaSample_SetTime(pOutSample, &tStart, &tMed); tStart = tMed; } else { ERR("No valid timestamp found\n"); IMediaSample_SetTime(pOutSample, NULL, NULL); } TRACE("Sample stop time: %u.%03u\n", (DWORD)(tStart/10000000), (DWORD)((tStart/10000)%1000)); LeaveCriticalSection(&This->tf.filter.csFilter); hr = BaseOutputPinImpl_Deliver((BaseOutputPin*)This->tf.ppPins[1], pOutSample); EnterCriticalSection(&This->tf.filter.csFilter); if (hr != S_OK && hr != VFW_E_NOT_CONNECTED) { if (FAILED(hr)) ERR("Error sending sample (%x)\n", hr); goto error; } error: if (unprepare_header && (res = acmStreamUnprepareHeader(This->has, &ash, 0))) ERR("Cannot unprepare header %d\n", res); unprepare_header = FALSE; ash.pbSrc += ash.cbSrcLengthUsed; ash.cbSrcLength -= ash.cbSrcLengthUsed; if (pOutSample) IMediaSample_Release(pOutSample); pOutSample = NULL; } This->lasttime_real = tStop; This->lasttime_sent = tMed; LeaveCriticalSection(&This->tf.filter.csFilter); return hr; }
static DWORD WINAPI QTSplitter_thread(LPVOID data) { QTSplitter *This = (QTSplitter *)data; HRESULT hr = S_OK; TimeValue next_time; CVPixelBufferRef pixelBuffer = NULL; OSStatus err; TimeRecord tr; WaitForSingleObject(This->runEvent, -1); EnterCriticalSection(&This->csReceive); if (!This->pQTMovie) { LeaveCriticalSection(&This->csReceive); return 0; } This->state = State_Running; /* Prime the pump: Needed for MPEG streams */ GetMovieNextInterestingTime(This->pQTMovie, nextTimeEdgeOK | nextTimeStep, 0, NULL, This->movie_time, 1, &next_time, NULL); GetMovieTime(This->pQTMovie, &tr); if (This->pAudio_Pin) QT_Create_Extract_Session(This); LeaveCriticalSection(&This->csReceive); do { LONGLONG tStart=0, tStop=0; LONGLONG mStart=0, mStop=0; float time; EnterCriticalSection(&This->csReceive); if (!This->pQTMovie) { LeaveCriticalSection(&This->csReceive); return 0; } GetMovieNextInterestingTime(This->pQTMovie, nextTimeStep, 0, NULL, This->movie_time, 1, &next_time, NULL); if (next_time == -1) { TRACE("No next time\n"); LeaveCriticalSection(&This->csReceive); break; } tr.value = SInt64ToWide(next_time); SetMovieTime(This->pQTMovie, &tr); MoviesTask(This->pQTMovie,0); QTVisualContextTask(This->vContext); TRACE("In loop at time %ld\n",This->movie_time); TRACE("In Next time %ld\n",next_time); mStart = This->movie_time; mStop = next_time; time = (float)(This->movie_time - This->movie_start) / This->movie_scale; tStart = time * 10000000; time = (float)(next_time - This->movie_start) / This->movie_scale; tStop = time * 10000000; /* Deliver Audio */ if (This->pAudio_Pin && This->pAudio_Pin->pin.pin.pConnectedTo && This->aSession) { int data_size=0; BYTE* ptr; IMediaSample *sample = NULL; AudioBufferList aData; UInt32 flags; UInt32 frames; WAVEFORMATEX* pvi; float duration; pvi = (WAVEFORMATEX*)This->pAudio_Pin->pmt->pbFormat; hr = BaseOutputPinImpl_GetDeliveryBuffer(&This->pAudio_Pin->pin, &sample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Audio: Unable to get delivery buffer (%x)\n", hr); goto audio_error; } hr = IMediaSample_GetPointer(sample, &ptr); if (FAILED(hr)) { ERR("Audio: Unable to get pointer to buffer (%x)\n", hr); goto audio_error; } duration = (float)next_time / This->movie_scale; time = (float)This->movie_time / This->movie_scale; duration -= time; frames = pvi->nSamplesPerSec * duration; TRACE("Need audio for %f seconds (%li frames)\n",duration,frames); data_size = IMediaSample_GetSize(sample); if (data_size < frames * pvi->nBlockAlign) FIXME("Audio buffer is too small\n"); aData.mNumberBuffers = 1; aData.mBuffers[0].mNumberChannels = pvi->nChannels; aData.mBuffers[0].mDataByteSize = data_size; aData.mBuffers[0].mData = ptr; err = MovieAudioExtractionFillBuffer(This->aSession, &frames, &aData, &flags); if (frames == 0) { TimeRecord etr; /* Ran out of frames, Restart the extraction session */ TRACE("Restarting extraction session\n"); MovieAudioExtractionEnd(This->aSession); This->aSession = NULL; QT_Create_Extract_Session(This); etr = tr; etr.value = SInt64ToWide(This->movie_time); MovieAudioExtractionSetProperty(This->aSession, kQTPropertyClass_MovieAudioExtraction_Movie, kQTMovieAudioExtractionMoviePropertyID_CurrentTime, sizeof(TimeRecord), &etr ); frames = pvi->nSamplesPerSec * duration; aData.mNumberBuffers = 1; aData.mBuffers[0].mNumberChannels = pvi->nChannels; aData.mBuffers[0].mDataByteSize = data_size; aData.mBuffers[0].mData = ptr; MovieAudioExtractionFillBuffer(This->aSession, &frames, &aData, &flags); } TRACE("Got %i frames\n",(int)frames); IMediaSample_SetActualDataLength(sample, frames * pvi->nBlockAlign); IMediaSample_SetMediaTime(sample, &mStart, &mStop); IMediaSample_SetTime(sample, &tStart, &tStop); hr = OutputQueue_Receive(This->pAudio_Pin->queue, sample); TRACE("Audio Delivered (%x)\n",hr); audio_error: if (sample) IMediaSample_Release(sample); } else TRACE("Audio Pin not connected or no Audio\n"); /* Deliver Video */ if (This->pVideo_Pin && QTVisualContextIsNewImageAvailable(This->vContext,0)) { err = QTVisualContextCopyImageForTime(This->vContext, NULL, NULL, &pixelBuffer); if (err == noErr) { int data_size=0; BYTE* ptr; IMediaSample *sample = NULL; hr = BaseOutputPinImpl_GetDeliveryBuffer(&This->pVideo_Pin->pin, &sample, NULL, NULL, 0); if (FAILED(hr)) { ERR("Video: Unable to get delivery buffer (%x)\n", hr); goto video_error; } data_size = IMediaSample_GetSize(sample); if (data_size < This->outputSize) { ERR("Sample size is too small %d < %d\n", data_size, This->outputSize) ; hr = E_FAIL; goto video_error; } hr = IMediaSample_GetPointer(sample, &ptr); if (FAILED(hr)) { ERR("Video: Unable to get pointer to buffer (%x)\n", hr); goto video_error; } hr = AccessPixelBufferPixels( pixelBuffer, ptr); if (FAILED(hr)) { ERR("Failed to access Pixels\n"); goto video_error; } IMediaSample_SetActualDataLength(sample, This->outputSize); IMediaSample_SetMediaTime(sample, &mStart, &mStop); IMediaSample_SetTime(sample, &tStart, &tStop); hr = OutputQueue_Receive(This->pVideo_Pin->queue, sample); TRACE("Video Delivered (%x)\n",hr); video_error: if (sample) IMediaSample_Release(sample); if (pixelBuffer) CVPixelBufferRelease(pixelBuffer); } } else TRACE("No video to deliver\n"); This->movie_time = next_time; LeaveCriticalSection(&This->csReceive); } while (hr == S_OK); This->state = State_Stopped; if (This->pAudio_Pin) OutputQueue_EOS(This->pAudio_Pin->queue); if (This->pVideo_Pin) OutputQueue_EOS(This->pVideo_Pin->queue); return hr; }
static HRESULT FFMVWrapper_ProcessReceive( CTransformBaseImpl* pImpl, IMediaSample* pSampIn ) { CFFMVWrapperImpl* This = pImpl->m_pUserData; BYTE* pDataIn = NULL; LONG lDataInLen; IMediaSample* pSampOut = NULL; BYTE* pOutBuf; HRESULT hr; AVFrame tmp_pic; AVPicture dst_pic; int nOut, got_pic; LONG width, height; REFERENCE_TIME rtStart, rtStop, rtNow; BOOL skip; TRACE("(%p)\n",This); if ( This == NULL || !This->ctx.codec || This->m_pbiIn == NULL || This->m_pbiOut == NULL ) return E_UNEXPECTED; hr = IMediaSample_GetPointer( pSampIn, &pDataIn ); if ( FAILED(hr) ) return hr; lDataInLen = IMediaSample_GetActualDataLength( pSampIn ); if ( lDataInLen < 0 ) return E_FAIL; EnterCriticalSection( &This->m_cs ); if ( !This->ctx.codec ) { hr = E_UNEXPECTED; goto failed; } if ( IMediaSample_IsDiscontinuity( pSampIn ) == S_OK ) avcodec_flush_buffers( &This->ctx ); width = This->m_pbiIn->bmiHeader.biWidth; height = (This->m_pbiIn->bmiHeader.biHeight < 0) ? -This->m_pbiIn->bmiHeader.biHeight : This->m_pbiIn->bmiHeader.biHeight; while ( TRUE ) { nOut = avcodec_decode_video( &This->ctx, &tmp_pic, &got_pic, (void*)pDataIn, lDataInLen ); if ( nOut < 0 ) { TRACE("decoding error\n"); goto fail; } TRACE("used %d of %d bytes\n", nOut, lDataInLen); if ( nOut > lDataInLen ) { WARN("arrgh - FFmpeg read too much\n"); nOut = lDataInLen; } pDataIn += nOut; lDataInLen -= nOut; if (!got_pic) { TRACE("no frame decoded\n"); if (lDataInLen) continue; LeaveCriticalSection( &This->m_cs ); return S_OK; } TRACE("frame decoded\n"); This->rtInternal ++; hr = IMediaSample_GetTime( pSampIn, &rtStart, &rtStop ); if ( hr == S_OK ) { /* if the parser gives us a timestamp, the data * we got from it should be a single frame */ if ( lDataInLen ) { ERR("excessive data in compressed frame\n"); lDataInLen = 0; } } else { /* compute our own timestamp */ rtStart = This->rtCur; This->rtCur = This->rtInternal * (REFERENCE_TIME)QUARTZ_TIMEUNITS * This->ctx.frame_rate_base / This->ctx.frame_rate; rtStop = This->rtCur; } TRACE("frame start=%lld, stop=%lld\n", rtStart, rtStop); skip = FALSE; hr = IReferenceClock_GetTime(pImpl->basefilter.pClock, &rtNow); if (SUCCEEDED(hr)) { rtNow -= pImpl->basefilter.rtStart; TRACE("time=%lld\n", rtNow); if (rtStart < rtNow + SKIP_TIME) { skip = TRUE; if ( ++This->skipFrames >= MAX_SKIP ) { This->skipFrames = 0; TRACE("frame late, but max skip exceeded\n"); skip = FALSE; } } } if (skip) { TRACE("skipping late frame\n"); if ( lDataInLen == 0 ) { LeaveCriticalSection( &This->m_cs ); return S_OK; } } else { /* process frame */ hr = IMemAllocator_GetBuffer( pImpl->m_pOutPinAllocator, &pSampOut, &rtStart, &rtStop, 0 ); if ( FAILED(hr) ) goto failed; hr = IMediaSample_GetPointer( pSampOut, &pOutBuf ); if ( FAILED(hr) ) goto failed; dst_pic.data[0] = ( This->m_pOutBuf != NULL ) ? This->m_pOutBuf : pOutBuf; dst_pic.linesize[0] = DIBWIDTHBYTES(This->m_pbiOut->bmiHeader); /* convert to RGB (or BGR) */ switch (This->m_pbiOut->bmiHeader.biBitCount) { case 24: img_convert( &dst_pic, PIX_FMT_BGR24, (AVPicture*)&tmp_pic, This->ctx.pix_fmt, width, height ); break; case 32: /* RGBA32 is misnamed (is actually cpu-endian ARGB, which means BGRA on x86), * might get renamed in future ffmpeg snapshots */ img_convert( &dst_pic, PIX_FMT_RGBA32, (AVPicture*)&tmp_pic, This->ctx.pix_fmt, width, height ); break; default: TRACE("bad bpp\n"); goto fail; } if ( This->m_pOutBuf != NULL ) memcpy( pOutBuf, This->m_pOutBuf, This->m_pbiOut->bmiHeader.biSizeImage ); IMediaSample_SetActualDataLength( pSampOut, This->m_pbiOut->bmiHeader.biSizeImage ); /* FIXME: discontinuity and sync point */ LeaveCriticalSection( &This->m_cs ); hr = CPinBaseImpl_SendSample( &pImpl->pOutPin->pin, pSampOut ); if ( FAILED(hr) ) return hr; IMediaSample_Release( pSampOut ); pSampOut = NULL; if ( lDataInLen == 0 ) return S_OK; EnterCriticalSection( &This->m_cs ); if ( !This->ctx.codec ) { hr = E_UNEXPECTED; goto failed; } } } fail: hr = E_FAIL; failed: LeaveCriticalSection( &This->m_cs ); return hr; }