IFACEMETHODIMP CGeometricMediaSource::SetRate(BOOL fThin, float flRate) { if (fThin) { return MF_E_THINNING_UNSUPPORTED; } if (!IsRateSupported(flRate, &flRate)) { return MF_E_UNSUPPORTED_RATE; } AutoLock lock(_critSec); HRESULT hr = S_OK; if (flRate == _flRate) { return S_OK; } ComPtr<CSourceOperation> spSetRateOp; spSetRateOp.Attach(new CSetRateOperation(fThin, flRate)); if (!spSetRateOp) { hr = E_OUTOFMEMORY; } if (SUCCEEDED(hr)) { // Queue asynchronous stop hr = QueueOperation(spSetRateOp.Get()); } return hr; }
HRESULT CMediaStream::CreateInstance(StspStreamDescription *pStreamDescription, IBufferPacket *pAttributesBuffer, CMediaSource *pSource, CMediaStream **ppStream) { if (pStreamDescription == nullptr || pSource == nullptr || ppStream == nullptr) { return E_INVALIDARG; } HRESULT hr = S_OK; try { ComPtr<CMediaStream> spStream; spStream.Attach(new (std::nothrow) CMediaStream(pSource)); if (!spStream) { Throw(E_OUTOFMEMORY); } spStream->Initialize(pStreamDescription, pAttributesBuffer); (*ppStream) = spStream.Detach(); } catch(Exception ^exc) { hr = exc->HResult; } TRACEHR_RET(hr); }
ComPtr<CGeometricMediaSource> CGeometricMediaSource::CreateInstance() { ComPtr<CGeometricMediaSource> spSource; spSource.Attach(new(std::nothrow) CGeometricMediaSource()); if (!spSource) { throw ref new OutOfMemoryException(); } spSource->Initialize(); return spSource; }
ComPtr<HDMediaSource> HDMediaSource::CreateMediaSource(int QueueSize) { ComPtr<HDMediaSource> sp; auto p = new (std::nothrow)HDMediaSource(QueueSize); if (p != nullptr) { p->AddRef(); sp.Attach(p); } return sp; }
/* static */ HRESULT CMarker::Create( MFSTREAMSINK_MARKER_TYPE eMarkerType, const PROPVARIANT *pvarMarkerValue, // Can be NULL. const PROPVARIANT *pvarContextValue, // Can be NULL. IMarker **ppMarker ) { if (ppMarker == nullptr) { return E_POINTER; } HRESULT hr = S_OK; ComPtr<CMarker> spMarker; spMarker.Attach(new (std::nothrow) CMarker(eMarkerType)); if (spMarker == nullptr) { hr = E_OUTOFMEMORY; } // Copy the marker data. if (SUCCEEDED(hr)) { if (pvarMarkerValue) { hr = PropVariantCopy(&spMarker->_varMarkerValue, pvarMarkerValue); } } if (SUCCEEDED(hr)) { if (pvarContextValue) { hr = PropVariantCopy(&spMarker->_varContextValue, pvarContextValue); } } if (SUCCEEDED(hr)) { *ppMarker = spMarker.Detach(); } return hr; }
IFACEMETHODIMP CGeometricMediaSource::Stop() { HRESULT hr = S_OK; ComPtr<CSourceOperation> spStopOp; spStopOp.Attach(new(std::nothrow) CSourceOperation(CSourceOperation::Operation_Stop)); if (!spStopOp) { hr = E_OUTOFMEMORY; } if (SUCCEEDED(hr)) { // Queue asynchronous stop hr = QueueOperation(spStopOp.Get()); } return hr; }
HRESULT CBufferPacket::CreateInstance(IBufferPacket **ppBufferPacket) { if (ppBufferPacket == nullptr) { return E_INVALIDARG; } ComPtr<IBufferPacket> spPacket; HRESULT hr = S_OK; spPacket.Attach(new CBufferPacket()); if (!spPacket) { hr = E_OUTOFMEMORY; } if (SUCCEEDED(hr)) { *ppBufferPacket = spPacket.Detach(); } TRACEHR_RET(hr); }
bool CProcessorHD::Render(CRect src, CRect dst, ID3D11Resource* target, CRenderBuffer** views, DWORD flags, UINT frameIdx, UINT rotation, float contrast, float brightness) { HRESULT hr; CSingleLock lock(m_section); // restore processor if it was lost if (!m_pVideoProcessor && !OpenProcessor()) return false; if (!views[2]) return false; RECT sourceRECT = { src.x1, src.y1, src.x2, src.y2 }; RECT dstRECT = { dst.x1, dst.y1, dst.x2, dst.y2 }; D3D11_VIDEO_FRAME_FORMAT dxvaFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; unsigned int providedPast = 0; for (int i = 3; i < 8; i++) { if (views[i]) providedPast++; } unsigned int providedFuture = 0; for (int i = 1; i >= 0; i--) { if (views[i]) providedFuture++; } int futureFrames = std::min(providedFuture, m_rateCaps.FutureFrames); int pastFrames = std::min(providedPast, m_rateCaps.PastFrames); D3D11_VIDEO_PROCESSOR_STREAM stream_data = { 0 }; stream_data.Enable = TRUE; stream_data.PastFrames = pastFrames; stream_data.FutureFrames = futureFrames; if (pastFrames) stream_data.ppPastSurfaces = new ID3D11VideoProcessorInputView*[pastFrames]; if (futureFrames) stream_data.ppFutureSurfaces = new ID3D11VideoProcessorInputView*[futureFrames]; std::vector<ComPtr<ID3D11VideoProcessorInputView>> comViews; int start = 2 - futureFrames; int end = 2 + pastFrames; int count = 0; for (int i = start; i <= end; i++) { if (!views[i]) continue; ComPtr<ID3D11VideoProcessorInputView> view; view.Attach(GetInputView(views[i])); if (i > 2) { // frames order should be { ?, T-3, T-2, T-1 } stream_data.ppPastSurfaces[2 + pastFrames - i] = view.Get(); } else if (i == 2) { stream_data.pInputSurface = view.Get(); } else if (i < 2) { // frames order should be { T+1, T+2, T+3, .. } stream_data.ppFutureSurfaces[1 - i] = view.Get(); } if (view) { count++; comViews.push_back(view); } } if (count != pastFrames + futureFrames + 1) { CLog::LogF(LOGERROR, "incomplete views set."); ReleaseStream(stream_data); return false; } if (flags & RENDER_FLAG_FIELD0 && flags & RENDER_FLAG_TOP) dxvaFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST; else if (flags & RENDER_FLAG_FIELD1 && flags & RENDER_FLAG_BOT) dxvaFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST; if (flags & RENDER_FLAG_FIELD0 && flags & RENDER_FLAG_BOT) dxvaFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST; if (flags & RENDER_FLAG_FIELD1 && flags & RENDER_FLAG_TOP) dxvaFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST; bool frameProgressive = dxvaFrameFormat == D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; // Progressive or Interlaced video at normal rate. stream_data.InputFrameOrField = frameIdx; stream_data.OutputIndex = flags & RENDER_FLAG_FIELD1 && !frameProgressive ? 1 : 0; // input format m_pVideoContext->VideoProcessorSetStreamFrameFormat(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, dxvaFrameFormat); // Source rect m_pVideoContext->VideoProcessorSetStreamSourceRect(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, TRUE, &sourceRECT); // Stream dest rect m_pVideoContext->VideoProcessorSetStreamDestRect(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, TRUE, &dstRECT); // Output rect m_pVideoContext->VideoProcessorSetOutputTargetRect(m_pVideoProcessor.Get(), TRUE, &dstRECT); ComPtr<ID3D11VideoContext1> videoCtx1; if (SUCCEEDED(m_pVideoContext.As(&videoCtx1))) { videoCtx1->VideoProcessorSetStreamColorSpace1(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, GetDXGIColorSpace(views[2])); // TODO select color space depend on real output format DXGI_COLOR_SPACE_TYPE colorSpace = DX::Windowing()->UseLimitedColor() ? DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709 : DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709; videoCtx1->VideoProcessorSetOutputColorSpace1(m_pVideoProcessor.Get(), colorSpace); // makes target available for processing in shaders videoCtx1->VideoProcessorSetOutputShaderUsage(m_pVideoProcessor.Get(), 1); } else { // input colorspace bool isBT601 = views[2]->color_space == AVCOL_SPC_BT470BG || views[2]->color_space == AVCOL_SPC_SMPTE170M; D3D11_VIDEO_PROCESSOR_COLOR_SPACE colorSpace { 0, // 0 - Playback, 1 - Processing views[2]->full_range ? 0 : 1, // 0 - Full (0-255), 1 - Limited (16-235) (RGB) isBT601 ? 1 : 0, // 0 - BT.601, 1 - BT.709 0, // 0 - Conventional YCbCr, 1 - xvYCC views[2]->full_range ? 2 : 1, // 0 - driver defaults, 2 - Full range [0-255], 1 - Studio range [16-235] (YUV) }; m_pVideoContext->VideoProcessorSetStreamColorSpace(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, &colorSpace); // Output color space // don't apply any color range conversion, this will be fixed at later stage. colorSpace.Usage = 0; // 0 - playback, 1 - video processing colorSpace.RGB_Range = DX::Windowing()->UseLimitedColor() ? 1 : 0; // 0 - 0-255, 1 - 16-235 colorSpace.YCbCr_Matrix = 1; // 0 - BT.601, 1 = BT.709 colorSpace.YCbCr_xvYCC = 1; // 0 - Conventional YCbCr, 1 - xvYCC colorSpace.Nominal_Range = 0; // 2 - 0-255, 1 = 16-235, 0 - undefined m_pVideoContext->VideoProcessorSetOutputColorSpace(m_pVideoProcessor.Get(), &colorSpace); } // brightness ApplyFilter(D3D11_VIDEO_PROCESSOR_FILTER_BRIGHTNESS, brightness, 0, 100, 50); // contrast ApplyFilter(D3D11_VIDEO_PROCESSOR_FILTER_CONTRAST, contrast, 0, 100, 50); // unused filters ApplyFilter(D3D11_VIDEO_PROCESSOR_FILTER_HUE, 50, 0, 100, 50); ApplyFilter(D3D11_VIDEO_PROCESSOR_FILTER_SATURATION, 50, 0, 100, 50); // Rotation m_pVideoContext->VideoProcessorSetStreamRotation(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, rotation != 0 , static_cast<D3D11_VIDEO_PROCESSOR_ROTATION>(rotation / 90)); // create output view for surface. D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC OutputViewDesc = { D3D11_VPOV_DIMENSION_TEXTURE2D, { 0 }}; ComPtr<ID3D11VideoProcessorOutputView> pOutputView; hr = m_pVideoDevice->CreateVideoProcessorOutputView(target, m_pEnumerator.Get(), &OutputViewDesc, pOutputView.GetAddressOf()); if (S_OK != hr) CLog::LogF(FAILED(hr) ? LOGERROR : LOGWARNING, "video device returns result '%x' while creating processor output view.", hr); if (SUCCEEDED(hr)) { hr = m_pVideoContext->VideoProcessorBlt(m_pVideoProcessor.Get(), pOutputView.Get(), frameIdx, 1, &stream_data); if (S_OK != hr) { CLog::LogF(FAILED(hr) ? LOGERROR : LOGWARNING, "video device returns result '%x' while VideoProcessorBlt execution.", hr); } } ReleaseStream(stream_data); return !FAILED(hr); }