Пример #1
0
int CDecDXVA2::get_dxva2_buffer(struct AVCodecContext *c, AVFrame *pic, int flags)
{
  CDecDXVA2 *pDec = (CDecDXVA2 *)c->opaque;
  IMediaSample *pSample = NULL;

  HRESULT hr = S_OK;

  if (pic->format != AV_PIX_FMT_DXVA2_VLD || (c->codec_id == AV_CODEC_ID_H264 && !H264_CHECK_PROFILE(c->profile))) {
    DbgLog((LOG_ERROR, 10, L"DXVA2 buffer request, but not dxva2 pixfmt or unsupported profile"));
    pDec->m_bFailHWDecode = TRUE;
    return -1;
  }

  if (!pDec->m_pDecoder || FFALIGN(c->coded_width, 16) != pDec->m_dwSurfaceWidth || FFALIGN(c->coded_height, 16) != pDec->m_dwSurfaceHeight) {
    DbgLog((LOG_TRACE, 10, L"No DXVA2 Decoder or image dimensions changed -> Re-Allocating resources"));
    if (!pDec->m_pDecoder && pDec->m_bNative && !pDec->m_pDXVA2Allocator) {
      ASSERT(0);
      hr = E_FAIL;
    } else if (pDec->m_bNative) {
      avcodec_flush_buffers(c);

      pDec->m_dwSurfaceWidth = FFALIGN(c->coded_width, 16);
      pDec->m_dwSurfaceHeight = FFALIGN(c->coded_height, 16);

      // Re-Commit the allocator (creates surfaces and new decoder)
      hr = pDec->m_pDXVA2Allocator->Decommit();
      if (pDec->m_pDXVA2Allocator->DecommitInProgress()) {
        DbgLog((LOG_TRACE, 10, L"WARNING! DXVA2 Allocator is still busy, trying to flush downstream"));
        pDec->m_pCallback->ReleaseAllDXVAResources();
        pDec->m_pCallback->GetOutputPin()->GetConnected()->BeginFlush();
        pDec->m_pCallback->GetOutputPin()->GetConnected()->EndFlush();
        if (pDec->m_pDXVA2Allocator->DecommitInProgress()) {
          DbgLog((LOG_TRACE, 10, L"WARNING! Flush had no effect, decommit of the allocator still not complete"));
        } else {
          DbgLog((LOG_TRACE, 10, L"Flush was successfull, decommit completed!"));
        }
      }
      hr = pDec->m_pDXVA2Allocator->Commit();
    } else if (!pDec->m_bNative) {
      hr = pDec->CreateDXVA2Decoder();
    }
    if (FAILED(hr)) {
      pDec->m_bFailHWDecode = TRUE;
      return -1;
    }
  }

  if (FAILED(pDec->m_pD3DDevMngr->TestDevice(pDec->m_hDevice))) {
    DbgLog((LOG_ERROR, 10, L"Device Lost"));
  }

  int i;
  if (pDec->m_bNative) {
    if (!pDec->m_pDXVA2Allocator)
      return -1;

    hr = pDec->m_pDXVA2Allocator->GetBuffer(&pSample, NULL, NULL, 0);
    if (FAILED(hr)) {
      DbgLog((LOG_ERROR, 10, L"DXVA2Allocator returned error, hr: 0x%x", hr));
      return -1;
    }

    ILAVDXVA2Sample *pLavDXVA2 = NULL;
    hr = pSample->QueryInterface(&pLavDXVA2);
    if (FAILED(hr)) {
      DbgLog((LOG_ERROR, 10, L"Sample is no LAV DXVA2 sample?????"));
      SafeRelease(&pSample);
      return -1;
    }
    i = pLavDXVA2->GetDXSurfaceId();
    SafeRelease(&pLavDXVA2);
  } else {
    int old, old_unused;
    for (i = 0, old = 0, old_unused = -1; i < pDec->m_NumSurfaces; i++) {
      d3d_surface_t *surface = &pDec->m_pSurfaces[i];
      if (!surface->used && (old_unused == -1 || surface->age < pDec->m_pSurfaces[old_unused].age))
        old_unused = i;
      if (surface->age < pDec->m_pSurfaces[old].age)
        old = i;
    }
    if (old_unused == -1) {
      DbgLog((LOG_TRACE, 10, L"No free surface, using oldest"));
      i = old;
    } else {
      i = old_unused;
    }
  }

  LPDIRECT3DSURFACE9 pSurface = pDec->m_pSurfaces[i].d3d;
  if (!pSurface) {
    DbgLog((LOG_ERROR, 10, L"There is a sample, but no D3D Surace? WTF?"));
    SafeRelease(&pSample);
    return -1;
  }

  pDec->m_pSurfaces[i].age  = pDec->m_CurrentSurfaceAge++;
  pDec->m_pSurfaces[i].used = true;

  memset(pic->data, 0, sizeof(pic->data));
  memset(pic->linesize, 0, sizeof(pic->linesize));
  memset(pic->buf, 0, sizeof(pic->buf));

  pic->data[0] = pic->data[3] = (uint8_t *)pSurface;
  pic->data[4] = (uint8_t *)pSample;

  SurfaceWrapper *surfaceWrapper = new SurfaceWrapper();
  surfaceWrapper->pDec = pDec;
  surfaceWrapper->surface = pSurface;
  surfaceWrapper->sample = pSample;
  pic->buf[0] = av_buffer_create(NULL, 0, free_dxva2_buffer, surfaceWrapper, 0);

  return 0;
}
Пример #2
0
//
// Copy
//
// return a pointer to an identical copy of pSample
IMediaSample * CTransInPlaceFilter::Copy(IMediaSample *pSource)
{
    IMediaSample * pDest;

    HRESULT hr;
    REFERENCE_TIME tStart, tStop;
    const BOOL bTime = S_OK == pSource->GetTime( &tStart, &tStop);

    // this may block for an indeterminate amount of time
    hr = OutputPin()->PeekAllocator()->GetBuffer(
              &pDest
              , bTime ? &tStart : NULL
              , bTime ? &tStop : NULL
              , m_bSampleSkipped ? AM_GBF_PREVFRAMESKIPPED : 0
              );

    if (FAILED(hr)) {
        return NULL;
    }

    ASSERT(pDest);
    IMediaSample2 *pSample2;
    if (SUCCEEDED(pDest->QueryInterface(IID_IMediaSample2, (void **)&pSample2))) {
        HRESULT hr = pSample2->SetProperties(
            FIELD_OFFSET(AM_SAMPLE2_PROPERTIES, pbBuffer),
            (PBYTE)m_pInput->SampleProps());
        pSample2->Release();
        if (FAILED(hr)) {
            pDest->Release();
            return NULL;
        }
    } else {
        if (bTime) {
            pDest->SetTime(&tStart, &tStop);
        }

        if (S_OK == pSource->IsSyncPoint()) {
            pDest->SetSyncPoint(TRUE);
        }
        if (S_OK == pSource->IsDiscontinuity() || m_bSampleSkipped) {
            pDest->SetDiscontinuity(TRUE);
        }
        if (S_OK == pSource->IsPreroll()) {
            pDest->SetPreroll(TRUE);
        }

        // Copy the media type
        AM_MEDIA_TYPE *pMediaType;
        if (S_OK == pSource->GetMediaType(&pMediaType)) {
            pDest->SetMediaType(pMediaType);
            DeleteMediaType( pMediaType );
        }

    }

    m_bSampleSkipped = FALSE;

    // Copy the sample media times
    REFERENCE_TIME TimeStart, TimeEnd;
    if (pSource->GetMediaTime(&TimeStart,&TimeEnd) == NOERROR) {
        pDest->SetMediaTime(&TimeStart,&TimeEnd);
    }

    // Copy the actual data length and the actual data.
    {
        const long lDataLength = pSource->GetActualDataLength();
        pDest->SetActualDataLength(lDataLength);

        // Copy the sample data
        {
            BYTE *pSourceBuffer, *pDestBuffer;
            long lSourceSize  = pSource->GetSize();
            long lDestSize = pDest->GetSize();

            ASSERT(lDestSize >= lSourceSize && lDestSize >= lDataLength);

            pSource->GetPointer(&pSourceBuffer);
            pDest->GetPointer(&pDestBuffer);
            ASSERT(lDestSize == 0 || pSourceBuffer != NULL && pDestBuffer != NULL);

            CopyMemory( (PVOID) pDestBuffer, (PVOID) pSourceBuffer, lDataLength );
        }
    }

    return pDest;

} // Copy
Пример #3
0
// Set up our output sample
HRESULT
CTransformFilter::InitializeOutputSample(IMediaSample *pSample, IMediaSample **ppOutSample) {
    IMediaSample *pOutSample;

    // default - times are the same

    AM_SAMPLE2_PROPERTIES * const pProps = m_pInput->SampleProps();
    DWORD dwFlags = m_bSampleSkipped ? AM_GBF_PREVFRAMESKIPPED : 0;

    // This will prevent the image renderer from switching us to DirectDraw
    // when we can't do it without skipping frames because we're not on a
    // keyframe.  If it really has to switch us, it still will, but then we
    // will have to wait for the next keyframe
    if(!(pProps->dwSampleFlags & AM_SAMPLE_SPLICEPOINT)) {
        dwFlags |= AM_GBF_NOTASYNCPOINT;
    }

    ASSERT(m_pOutput->m_pAllocator != NULL);
    HRESULT hr = m_pOutput->m_pAllocator->GetBuffer(&pOutSample
        , pProps->dwSampleFlags & AM_SAMPLE_TIMEVALID ?
        &pProps->tStart : NULL
        , pProps->dwSampleFlags & AM_SAMPLE_STOPVALID ?
        &pProps->tStop : NULL
        , dwFlags);
    *ppOutSample = pOutSample;
    if(FAILED(hr)) {
        return hr;
    }

    ASSERT(pOutSample);
    IMediaSample2 *pOutSample2;
    if(SUCCEEDED(pOutSample->QueryInterface(IID_IMediaSample2,
        (void **)&pOutSample2))) {
        /*  Modify it */
        AM_SAMPLE2_PROPERTIES OutProps;
        EXECUTE_ASSERT(SUCCEEDED(pOutSample2->GetProperties(FIELD_OFFSET(AM_SAMPLE2_PROPERTIES, tStart), (PBYTE)&OutProps)));
        OutProps.dwTypeSpecificFlags = pProps->dwTypeSpecificFlags;
        OutProps.dwSampleFlags =
            (OutProps.dwSampleFlags & AM_SAMPLE_TYPECHANGED) |
            (pProps->dwSampleFlags & ~AM_SAMPLE_TYPECHANGED);

        OutProps.tStart = pProps->tStart;
        OutProps.tStop  = pProps->tStop;
        OutProps.cbData = FIELD_OFFSET(AM_SAMPLE2_PROPERTIES, dwStreamId);

        hr = pOutSample2->SetProperties(FIELD_OFFSET(AM_SAMPLE2_PROPERTIES, dwStreamId),
            (PBYTE)&OutProps);
        if(pProps->dwSampleFlags & AM_SAMPLE_DATADISCONTINUITY) {
            m_bSampleSkipped = FALSE;
        }
        pOutSample2->Release();
    }
    else {
        if(pProps->dwSampleFlags & AM_SAMPLE_TIMEVALID) {
            pOutSample->SetTime(&pProps->tStart,
                &pProps->tStop);
        }
        if(pProps->dwSampleFlags & AM_SAMPLE_SPLICEPOINT) {
            pOutSample->SetSyncPoint(TRUE);
        }
        if(pProps->dwSampleFlags & AM_SAMPLE_DATADISCONTINUITY) {
            pOutSample->SetDiscontinuity(TRUE);
            m_bSampleSkipped = FALSE;
        }
        // Copy the media times

        LONGLONG MediaStart, MediaEnd;
        if(pSample->GetMediaTime(&MediaStart,&MediaEnd) == NOERROR) {
            pOutSample->SetMediaTime(&MediaStart,&MediaEnd);
        }
    }
    return S_OK;
}
Пример #4
0
HRESULT CWavPackDSSplitterInputPin::DeliverOneFrame(WavPack_parser* wpp)
{
    IMediaSample *pSample;
    BYTE *Buffer = NULL;
    HRESULT hr;
    unsigned long FrameLenBytes = 0, FrameLenSamples = 0, FrameIndex = 0;

    // Get a new media sample
    hr = m_pParentFilter->m_pOutputPin->GetDeliveryBuffer(&pSample, NULL, NULL, 0); 
    if (FAILED(hr))
    {
        DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop GetDeliveryBuffer failed 0x%08X",hr);
        return hr;
    }
    
    hr = pSample->GetPointer(&Buffer);
    if (FAILED(hr))
    {
        DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop GetPointer failed 0x%08X",hr);
        pSample->Release();
        return hr;
    }
    
    FrameLenBytes = wavpack_parser_read_frame(wpp, Buffer,
        &FrameIndex, &FrameLenSamples);
    if(!FrameLenBytes)
    {
        // Something bad happened, let's end here
        pSample->Release();
        m_pParentFilter->m_pOutputPin->DeliverEndOfStream();
        // TODO : check if we need to stop the thread
        DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop wavpack_parser_read_frame error");
        return hr;
    }
    pSample->SetActualDataLength(FrameLenBytes);
    
    if(wpp->is_correction == TRUE)
    {    
        IMediaSample2 *pSample2;
        if (SUCCEEDED(pSample->QueryInterface(IID_IMediaSample2, (void **)&pSample2)))
        {
            AM_SAMPLE2_PROPERTIES ams2p;
            ZeroMemory(&ams2p, sizeof(AM_SAMPLE2_PROPERTIES));
            hr = pSample2->GetProperties(sizeof(AM_SAMPLE2_PROPERTIES), (PBYTE)&ams2p);
            if(SUCCEEDED(hr))
            {            
                ams2p.dwStreamId = AM_STREAM_BLOCK_ADDITIONNAL;
                pSample2->SetProperties(sizeof(AM_SAMPLE2_PROPERTIES), (PBYTE)&ams2p);
            }
            pSample2->Release();
            pSample2 = NULL;
        }
    }
    
    REFERENCE_TIME rtStart, rtStop;
    rtStart = FrameIndex;
    rtStop = rtStart + FrameLenSamples;
    rtStart = (rtStart * 10000000) / wpp->sample_rate;
    rtStop = (rtStop * 10000000) / wpp->sample_rate;
    
    rtStart -= m_pParentFilter->m_rtStart;
    rtStop  -= m_pParentFilter->m_rtStart;
    
    pSample->SetTime(&rtStart, &rtStop);
    pSample->SetPreroll(FALSE);
    pSample->SetDiscontinuity(m_bDiscontinuity);
    if(m_bDiscontinuity)
    {
        m_bDiscontinuity = FALSE;
    }
    pSample->SetSyncPoint(TRUE);
    
    // Deliver the sample
    hr = m_pParentFilter->m_pOutputPin->Deliver(pSample);
    pSample->Release();
    pSample = NULL;
    if (FAILED(hr))
    {
        DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop Deliver failed 0x%08X",hr);
        return hr;
    }

    return S_OK;
}
HRESULT CMpeg2DecoderDXVA2::DecodeFrame(IMediaSample **ppSample)
{
	if (ppSample) {
		*ppSample = nullptr;
	}

	if (!m_pDec || !m_pVideoDecoder) {
		return E_UNEXPECTED;
	}

	if (m_pDec->picture->flags & PIC_FLAG_SKIP) {
		return GetDisplaySample(ppSample);
	}

	m_DecodeSampleIndex = GetFBufIndex(m_pDec->fbuf[0]);

	if (!m_SliceCount || m_DecodeSampleIndex < 0) {
		return S_FALSE;
	}

	if (m_fWaitForDecodeKeyFrame) {
		if ((m_pDec->picture->flags & PIC_MASK_CODING_TYPE) != PIC_FLAG_CODING_TYPE_I) {
			return S_FALSE;
		}
		m_fWaitForDecodeKeyFrame = false;
	}

	HRESULT hr;

	hr = m_pDeviceManager->TestDevice(m_pFilter->m_hDXVADevice);
	if (FAILED(hr)) {
		if (hr == DXVA2_E_NEW_VIDEO_DEVICE) {
			DBG_TRACE(TEXT("Device lost"));
			m_fDeviceLost = true;
		}
		return hr;
	}

	switch (m_pDec->picture->flags & PIC_MASK_CODING_TYPE) {
	case PIC_FLAG_CODING_TYPE_I:
		m_PrevRefSurfaceIndex = -1;
		m_ForwardRefSurfaceIndex = -1;
		//DBG_TRACE(TEXT("I [%d]"), m_CurSurfaceIndex);
		break;
	case PIC_FLAG_CODING_TYPE_P:
		m_PrevRefSurfaceIndex = GetFBufSampleID(m_pDec->fbuf[1]);
		m_ForwardRefSurfaceIndex = -1;
		//DBG_TRACE(TEXT("P [%d]->%d"), m_CurSurfaceIndex, m_PrevRefSurfaceIndex);
		break;
	case PIC_FLAG_CODING_TYPE_B:
		m_PrevRefSurfaceIndex = GetFBufSampleID(m_pDec->fbuf[1]);
		m_ForwardRefSurfaceIndex = GetFBufSampleID(m_pDec->fbuf[2]);
		//DBG_TRACE(TEXT("B %d->[%d]->%d"), m_PrevRefSurfaceIndex, m_CurSurfaceIndex, m_ForwardRefSurfaceIndex);
		if (m_ForwardRefSurfaceIndex < 0)
			return S_FALSE;
		break;
	}

	CDXVA2MediaSample *pSample = m_Samples[m_DecodeSampleIndex].pSample;

	if (!pSample) {
		IMediaSample *pMediaSample;
		IDXVA2MediaSample *pDXVA2Sample;

		for (;;) {
			hr = m_pFilter->GetDeliveryBuffer(&pMediaSample);
			if (FAILED(hr)) {
				return hr;
			}
			hr = pMediaSample->QueryInterface(IID_PPV_ARGS(&pDXVA2Sample));
			pMediaSample->Release();
			if (FAILED(hr)) {
				return hr;
			}
			pSample = static_cast<CDXVA2MediaSample*>(pDXVA2Sample);
			if (pSample->GetSurfaceID() == m_RefSamples[0].SurfaceID) {
				m_RefSamples[0].pSample = pSample;
			} else if (pSample->GetSurfaceID() == m_RefSamples[1].SurfaceID) {
				m_RefSamples[1].pSample = pSample;
			} else {
				break;
			}
		}
		m_Samples[m_DecodeSampleIndex].pSample = pSample;
		m_Samples[m_DecodeSampleIndex].SurfaceID = pSample->GetSurfaceID();
	}

	m_CurSurfaceIndex = pSample->GetSurfaceID();

#ifdef _DEBUG
	if ((m_pDec->picture->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_P) {
		_ASSERT(m_PrevRefSurfaceIndex>=0 && m_CurSurfaceIndex != m_PrevRefSurfaceIndex);
	} else if ((m_pDec->picture->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_B) {
		_ASSERT(m_PrevRefSurfaceIndex>=0
			&& m_CurSurfaceIndex != m_PrevRefSurfaceIndex
			&& m_ForwardRefSurfaceIndex>=0
			&& m_CurSurfaceIndex != m_ForwardRefSurfaceIndex);
	}
#endif

	IDirect3DSurface9 *pSurface;
	IMFGetService *pMFGetService;
	hr = pSample->QueryInterface(IID_PPV_ARGS(&pMFGetService));
	if (SUCCEEDED(hr)) {
		hr = pMFGetService->GetService(MR_BUFFER_SERVICE, IID_PPV_ARGS(&pSurface));
		pMFGetService->Release();
	}
	if (FAILED(hr)) {
		return hr;
	}

	int Retry = 0;
	for (;;) {
		hr = m_pVideoDecoder->BeginFrame(pSurface, nullptr);
		if (hr != E_PENDING || Retry >= 50)
			break;
		::Sleep(2);
		Retry++;
	}
	if (SUCCEEDED(hr)) {
		hr = CommitBuffers();
		if (SUCCEEDED(hr)) {
			DXVA2_DecodeExecuteParams ExecParams;
			DXVA2_DecodeBufferDesc BufferDesc[4];
			const UINT NumMBsInBuffer =
				(m_PictureParams.wPicWidthInMBminus1 + 1) * (m_PictureParams.wPicHeightInMBminus1 + 1);

			::ZeroMemory(BufferDesc, sizeof(BufferDesc));
			BufferDesc[0].CompressedBufferType = DXVA2_PictureParametersBufferType;
			BufferDesc[0].DataSize = sizeof(DXVA_PictureParameters);
			BufferDesc[1].CompressedBufferType = DXVA2_InverseQuantizationMatrixBufferType;
			BufferDesc[1].DataSize = sizeof(DXVA_QmatrixData);
			BufferDesc[2].CompressedBufferType = DXVA2_BitStreamDateBufferType;
			BufferDesc[2].DataSize = (UINT)m_SliceDataSize;
			BufferDesc[2].NumMBsInBuffer = NumMBsInBuffer;
			BufferDesc[3].CompressedBufferType = DXVA2_SliceControlBufferType;
			BufferDesc[3].DataSize = m_SliceCount * sizeof(DXVA_SliceInfo);
			BufferDesc[3].NumMBsInBuffer = NumMBsInBuffer;

			ExecParams.NumCompBuffers = 4;
			ExecParams.pCompressedBuffers = BufferDesc;
			ExecParams.pExtensionData = nullptr;

			hr = m_pVideoDecoder->Execute(&ExecParams);
			if (SUCCEEDED(hr)) {
				hr = GetDisplaySample(ppSample);
			}
		}

		m_pVideoDecoder->EndFrame(nullptr);
	}

	if ((m_pDec->picture->flags & PIC_MASK_CODING_TYPE) != PIC_FLAG_CODING_TYPE_B
			&& ppSample) {
		SafeRelease(m_RefSamples[1].pSample);
		m_RefSamples[1] = m_RefSamples[0];
		m_RefSamples[0].pSample = nullptr;
		m_RefSamples[0].SurfaceID = m_CurSurfaceIndex;
	}

	pSurface->Release();

	return hr;
}
DWORD WINAPI UdpReceiveThread(LPVOID param)
{
	HRESULT hr;
	ReceiveParam *receiveParam = (ReceiveParam*)param;
	HANDLE PushSemaphore = receiveParam->PushSemaphore;
	HANDLE PushDataMutex = receiveParam->PushDataMutex;
	std::map<REFERENCE_TIME,IMediaSample*>& SampleList = *receiveParam->SampleList;
	NetReceiveFilter* filter = receiveParam->filter;
	delete receiveParam;

	NetReceiveOutputPin* outputPin = reinterpret_cast<NetReceiveOutputPin*>(filter->GetPin(0));
	assert(outputPin != NULL);

	AM_MEDIA_TYPE mediaType;
	while (true)
	{
		outputPin->ConnectionMediaType(&mediaType);
		if (mediaType.majortype == GUID_NULL)
		{
			Sleep(300);
		}
		else
			break;
	}

	SOCKET udpSocket;
	udpSocket = ::socket(AF_INET, SOCK_DGRAM, 0);
	if (udpSocket == INVALID_SOCKET)
	{
		ErrorPrint("Create udp socket error");
		return 1;
	}

	sockaddr_in bindAddress;
	bindAddress.sin_family = AF_INET;
	bindAddress.sin_addr.s_addr = htonl(INADDR_ANY);
	if(mediaType.majortype == MEDIATYPE_Video)
	{
		bindAddress.sin_port = htons(VideoBroadcastPort);
	}
	else
	{
		bindAddress.sin_port = htons(AudioBroadcastPort);
	}

	int option = 1;
	int ret = setsockopt(udpSocket, SOL_SOCKET, SO_REUSEADDR, (char*)&option, sizeof(option));
	if (ret == SOCKET_ERROR)
	{
		ErrorPrint("Set socket reuse address error");
		return 1;
	}

	int recvSystemBufferSize = 1024 * 1024 * 10;

	ret = setsockopt(udpSocket, SOL_SOCKET, SO_RCVBUF, (char*)&recvSystemBufferSize, sizeof(recvSystemBufferSize));
	if (ret == SOCKET_ERROR)
	{
		ErrorPrint("Set socket receive system buffer size error");
	}

	ret = ::bind(udpSocket, (sockaddr*)&bindAddress, sizeof(bindAddress));
	if(ret == SOCKET_ERROR)
	{
		ErrorPrint("Bind udp receive socket error");
		return 1;
	}

	sockaddr_in fromAddress;
	fromAddress.sin_family = AF_INET;
	int addressLen = sizeof(fromAddress);

	std::map<long long, IMediaSample*> idToSampleMap;

	const int packetMaxSize = 10 * 1024;
	MediaPacketHeader* mediaPacketHeader = (MediaPacketHeader*)new char[sizeof(MediaPacketHeader) + packetMaxSize];
	boost::scoped_array<char> bufferContainer((char*)mediaPacketHeader);
	char* dataStart = (char*)mediaPacketHeader;
	char* dataBuffer = (char*)mediaPacketHeader + sizeof(MediaPacketHeader);
	while (true)
	{
		int recvedSize = recvfrom(udpSocket, dataStart, sizeof(MediaPacketHeader) + packetMaxSize, 0, (sockaddr*)&fromAddress, &addressLen);
		if (recvedSize == SOCKET_ERROR)
		{
			ErrorPrint("Receive from udp error");
			return 1;
		}

		if (g_IsBroadcasting) //是自己广播的数据包,丢弃之
		{
			continue;
		}

		if (mediaPacketHeader->type == 0) // 是sample头
		{
#ifdef UDP_PRINT
			std::cout<<"Receive media packet header:"<<mediaPacketHeader->id<<std::endl;
#endif
			std::map<long long, IMediaSample*>::iterator it = idToSampleMap.begin();
			while (it != idToSampleMap.end()) //处理发生过丢包的sample
			{
				std::map<long long, IMediaSample*>::iterator tmp = it++;
				if (tmp->first < mediaPacketHeader->id) //这个sample肯定丢包了,序列号比后来的小,并且没有接受完整,直接丢弃掉
				{
					std::cout<<"Lose packet:"<<mediaPacketHeader->id<<std::endl;
					tmp->second->Release(); //一定要把sample给释放掉
					idToSampleMap.erase(tmp);
				}
				else //将所有要丢弃的包都处理完了
					break;
			}

// 			if (mediaType.majortype == MEDIATYPE_Video)
// 			{
// 				std::cout<<"Video header:"<<mediaPacketHeader->id<<std::endl;
// 			}

//			std::cout<<"Before get free sample"<<std::endl;
			IMediaSample *sample = filter->GetFreeSample(); //此时为这个sample头申请一个新的sample
//			std::cout<<"After get free sample"<<std::endl;
			if (sample == NULL)
			{
				ErrorPrint("Get free sample error");
				return 1;
			}

			AM_SAMPLE2_PROPERTIES* sample2Properties = (AM_SAMPLE2_PROPERTIES*)dataBuffer;

			sample2Properties->cbData = sizeof(AM_SAMPLE2_PROPERTIES) - 9;

			IMediaSample2 *mediaSample2;
			hr = sample->QueryInterface(IID_IMediaSample2, (void**)&mediaSample2);
			if (FAILED(hr))
			{
				ErrorPrint("Get media sample2 interface error",hr);
				sample->Release();
				return 1;
			}
			ComReleaser mediaSample2Releaser(mediaSample2);

			hr = mediaSample2->SetProperties(sample2Properties->cbData, (BYTE*)sample2Properties);//设置sample属性
			if (FAILED(hr))
			{
				ErrorPrint("Set sample properties error");
			}
			sample->SetTime(&(sample2Properties->tStart), &(sample2Properties->tStop));
			sample->SetActualDataLength(sample2Properties->lActual);

			idToSampleMap.insert(std::make_pair(mediaPacketHeader->id, sample)); //插入到map当中,等待所有的sample数据接受完
		}
		else if (mediaPacketHeader->type == 1) //是sample数据
		{
#ifdef UDP_PRINT
			std::cout<<"Receive sample data:"<<mediaPacketHeader->id<<std::endl;
#endif
			std::map<long long, IMediaSample*>::iterator it = idToSampleMap.find(mediaPacketHeader->id);
			if (it != idToSampleMap.end()) //如果id找不到,sample头丢失了,或者已经过期了,直接将该包丢弃
			{
				IMediaSample* sample = it->second;
				PBYTE dataPointer = NULL;
				hr = sample->GetPointer(&dataPointer);
				if (FAILED(hr))
				{
					ErrorPrint("Get data pointer error",hr);
					idToSampleMap.erase(it);
					sample->Release();
					continue;
				}
				memcpy(dataPointer + mediaPacketHeader->offset, dataBuffer, mediaPacketHeader->size);
				if ( (mediaPacketHeader->offset + mediaPacketHeader->size) == sample->GetActualDataLength()) //已经接收完整了,当然也有可能中间数据丢包了,但现在不管这种情况
				{
					idToSampleMap.erase(it);
					REFERENCE_TIME startTime,endTime;
					sample->GetTime(&startTime,&endTime);
					//通知PUSH线程进行数据传送
					WaitForSingleObject(PushDataMutex, INFINITE);
// 					if (mediaType.majortype == MEDIATYPE_Video)
// 					{
// 						std::cout<<"Finished Video sample:"<<mediaPacketHeader->id<<";Current Thread:"<<GetCurrentThreadId()<<";Map size:"<<idToSampleMap.size()<<std::endl;
// 						std::cout<<"Sample start time:"<<startTime <<";Sample end time:"<<endTime<<std::endl;
// 					}
					SampleList.insert(std::make_pair(startTime,sample));
					if (SampleList.size() >= 24 * 10)
					{
						ReleaseSemaphore(PushSemaphore, 1, NULL);
					}
					ReleaseMutex(PushDataMutex);
				}
			}
			else
				std::cout<<"Lose packet header:"<<mediaPacketHeader->id<<std::endl;
		}

// 		if(idToSampleMap.size() == 0 ||  idToSampleMap.begin()->first < )
// 
// 		mediaPacketHeader
// 

	}
}
DWORD WINAPI ReceiveThread(PVOID param)
{
	HRESULT hr;
	ReceiveParam *receiveParam = (ReceiveParam*)param;
	HANDLE PushSemaphore = receiveParam->PushSemaphore;
	HANDLE PushDataMutex = receiveParam->PushDataMutex;
	std::map<REFERENCE_TIME,IMediaSample*>& SampleList = *receiveParam->SampleList;
	NetReceiveFilter* filter = receiveParam->filter;
	SOCKET socket ;
	delete receiveParam;

	LONG packSize;
	//	CMediaSample *tmpSample = (CMediaSample*) malloc(sizeof(CMediaSample));
	REFERENCE_TIME startTime = 0,endTime = 0; //马上播放
	REFERENCE_TIME mediaStartTime = 0,mediaEndTime = 0;
	AM_SAMPLE2_PROPERTIES sample2Properties;

	NetReceiveOutputPin* outputPin = reinterpret_cast<NetReceiveOutputPin*>(filter->GetPin(0));
	assert(outputPin != NULL);

	filter->waitForNewSocket();

	while(true)
	{

		IMediaSample *sample = filter->GetFreeSample();
		if (sample == NULL)
		{
			ErrorPrint("Get free sample error");
			return 1;
		}

		PBYTE dataPointer = NULL;
		hr = sample->GetPointer(&dataPointer);
		if (FAILED(hr))
		{
			ErrorPrint("Get data pointer error",hr);
			sample->Release();
			return 1;
		}

		CAutoLock lock(filter->getSocketLock());
		socket = filter->getSocket();

		if (!receiveData(socket, (char*)&sample2Properties, sizeof(sample2Properties)))
		{
			ErrorPrint("Get pack Properties error");
			sample->Release();
			filter->waitForNewSocket();
			continue;
		}
		packSize = sample2Properties.lActual;

		if (packSize > 100 * 1024)
		{
			std::cout<<"Exceed 100K:"<<packSize/1024<<std::endl;
		}

		AM_MEDIA_TYPE mediaType;
		filter->GetPin(0)->ConnectionMediaType(&mediaType);

		if (filter->getPlayMode() == 1)
		{
// 			static const unsigned long  offset = 10000000; //将延迟增加,尽量缓冲一些
// 			sample2Properties.tStart +=offset;
// 			sample2Properties.tStop += offset;

			sample2Properties.cbData = sizeof(sample2Properties) - 9;
			sample2Properties.pbBuffer= dataPointer;

			IMediaSample2 *mediaSample2;
			hr = sample->QueryInterface(IID_IMediaSample2, (void**)&mediaSample2);
			if (FAILED(hr))
			{
				ErrorPrint("Get media sample2 interface error",hr);
				sample->Release();
				return 1;
			}
			ComReleaser mediaSample2Releaser(mediaSample2);

			hr = mediaSample2->SetProperties(sample2Properties.cbData, (BYTE*)&sample2Properties);
			if (FAILED(hr))
			{
				ErrorPrint("Set sample properties error");
			}
			sample->SetTime(&sample2Properties.tStart, &sample2Properties.tStop);

			sample->GetTime(&startTime,&endTime);
		}
		else
		{
			startTime = 0;
			endTime = 0;
		}

		ASSERT(packSize <= sample->GetSize());
		sample->SetActualDataLength(packSize);
		sample->SetTime(&startTime, &endTime);

		if(!receiveData(socket, (char*)dataPointer, packSize))
		{
			ErrorPrint("Receive pack errors");
			sample->Release();
			filter->waitForNewSocket();
			continue;
		}

		//通知PUSH线程进行数据传送
		WaitForSingleObject(PushDataMutex, INFINITE);
		SampleList.insert(std::make_pair(startTime, sample));
		if(filter->getPlayMode() == 0) //如果尽快播放,则只要有一个sample就通知push线程
		{
			if (SampleList.size() == 1)
			{
				ReleaseSemaphore(PushSemaphore, 1, NULL);
			}
		}
		else if (filter->getPlayMode() == 1)//如果考虑时间戳,我们则缓冲尽量多的sample,但也不能太多
		{
			if (SampleList.size() >= 24 * 10)
			{
				ReleaseSemaphore(PushSemaphore, 1, NULL);
			}
		}
		ReleaseMutex(PushDataMutex);

		outputPin->newTransSample(sample2Properties, dataPointer); //通知进行sample的转发
	}

	return 0;
}
Пример #8
0
HRESULT CLAVOutputPin::DeliverPacket(Packet *pPacket)
{
  HRESULT hr = S_OK;
  IMediaSample *pSample = NULL;

  long nBytes = (long)pPacket->GetDataSize();

  if(nBytes == 0) {
    goto done;
  }

  CHECK_HR(hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0));

  if (m_bPacketAllocator) {
    ILAVMediaSample *pLAVSample = NULL;
    CHECK_HR(hr = pSample->QueryInterface(&pLAVSample));
    CHECK_HR(hr = pLAVSample->SetPacket(pPacket));
    SafeRelease(&pLAVSample);
  } else {
    // Resize buffer if it is too small
    // This can cause a playback hick-up, we should avoid this if possible by setting a big enough buffer size
    if(nBytes > pSample->GetSize()) {
      SafeRelease(&pSample);
      ALLOCATOR_PROPERTIES props, actual;
      CHECK_HR(hr = m_pAllocator->GetProperties(&props));
      // Give us 2 times the requested size, so we don't resize every time
      props.cbBuffer = nBytes*2;
      if(props.cBuffers > 1) {
        CHECK_HR(hr = __super::DeliverBeginFlush());
        CHECK_HR(hr = __super::DeliverEndFlush());
      }
      CHECK_HR(hr = m_pAllocator->Decommit());
      CHECK_HR(hr = m_pAllocator->SetProperties(&props, &actual));
      CHECK_HR(hr = m_pAllocator->Commit());
      CHECK_HR(hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0));
    }

    // Fill the sample
    BYTE* pData = NULL;
    if(FAILED(hr = pSample->GetPointer(&pData)) || !pData) goto done;

    memcpy(pData, pPacket->GetData(), nBytes);
  }

  if(pPacket->pmt) {
    DbgLog((LOG_TRACE, 10, L"::DeliverPacket() - sending new media type to decoder"));
    pSample->SetMediaType(pPacket->pmt);
    pPacket->bDiscontinuity = true;

    CAutoLock cAutoLock(m_pLock);
    CMediaType pmt = *(pPacket->pmt);
    m_mts.clear();
    m_mts.push_back(pmt);
    pPacket->pmt = NULL;

    SetMediaType(&pmt);
  }

  bool fTimeValid = pPacket->rtStart != Packet::INVALID_TIME;

  CHECK_HR(hr = pSample->SetActualDataLength(nBytes));
  CHECK_HR(hr = pSample->SetTime(fTimeValid ? &pPacket->rtStart : NULL, fTimeValid ? &pPacket->rtStop : NULL));
  CHECK_HR(hr = pSample->SetMediaTime(NULL, NULL));
  CHECK_HR(hr = pSample->SetDiscontinuity(pPacket->bDiscontinuity));
  CHECK_HR(hr = pSample->SetSyncPoint(pPacket->bSyncPoint));
  CHECK_HR(hr = pSample->SetPreroll(fTimeValid && pPacket->rtStart < 0));
  // Deliver
  CHECK_HR(hr = Deliver(pSample));

done:
  if (!m_bPacketAllocator)
    SAFE_DELETE(pPacket);
  SafeRelease(&pSample);
  return hr;
}
Пример #9
0
HRESULT CLAVOutputPin::DeliverPacket(Packet *pPacket)
{
  HRESULT hr = S_OK;
  IMediaSample *pSample = nullptr;

  long nBytes = (long)pPacket->GetDataSize();

  if(nBytes == 0) {
    goto done;
  }

  CHECK_HR(hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0));

  if (m_bPacketAllocator) {
    ILAVMediaSample *pLAVSample = nullptr;
    CHECK_HR(hr = pSample->QueryInterface(&pLAVSample));
    CHECK_HR(hr = pLAVSample->SetPacket(pPacket));
    SafeRelease(&pLAVSample);
  } else {
    // Resize buffer if it is too small
    // This can cause a playback hick-up, we should avoid this if possible by setting a big enough buffer size
    if(nBytes > pSample->GetSize()) {
      SafeRelease(&pSample);
      ALLOCATOR_PROPERTIES props, actual;
      CHECK_HR(hr = m_pAllocator->GetProperties(&props));
      // Give us 2 times the requested size, so we don't resize every time
      props.cbBuffer = nBytes*2;
      if(props.cBuffers > 1) {
        CHECK_HR(hr = __super::DeliverBeginFlush());
        CHECK_HR(hr = __super::DeliverEndFlush());
      }
      CHECK_HR(hr = m_pAllocator->Decommit());
      CHECK_HR(hr = m_pAllocator->SetProperties(&props, &actual));
      CHECK_HR(hr = m_pAllocator->Commit());
      CHECK_HR(hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0));
    }

    // Fill the sample
    BYTE* pData = nullptr;
    if(FAILED(hr = pSample->GetPointer(&pData)) || !pData) goto done;

    memcpy(pData, pPacket->GetData(), nBytes);
  }

  if(pPacket->pmt) {
    DbgLog((LOG_TRACE, 10, L"::DeliverPacket() - sending new media type to decoder"));
    pSample->SetMediaType(pPacket->pmt);
    pPacket->bDiscontinuity = true;

    CAutoLock cAutoLock(m_pLock);
    CMediaType pmt = *(pPacket->pmt);
    m_mts.clear();
    m_mts.push_back(pmt);
    pPacket->pmt = nullptr;

    SetMediaType(&pmt);
  }

  bool fTimeValid = pPacket->rtStart != Packet::INVALID_TIME;

  // IBitRateInfo
  m_BitRate.nBytesSinceLastDeliverTime += nBytes;

  if (fTimeValid) {
    if (m_BitRate.rtLastDeliverTime == Packet::INVALID_TIME) {
      m_BitRate.rtLastDeliverTime = pPacket->rtStart;
      m_BitRate.nBytesSinceLastDeliverTime = 0;
    }

    if (m_BitRate.rtLastDeliverTime + 10000000 < pPacket->rtStart) {
      REFERENCE_TIME rtDiff = pPacket->rtStart - m_BitRate.rtLastDeliverTime;

      double dSecs, dBits;

      dSecs = rtDiff / 10000000.0;
      dBits = 8.0 * m_BitRate.nBytesSinceLastDeliverTime;
      m_BitRate.nCurrentBitRate = (DWORD)(dBits / dSecs);

      m_BitRate.rtTotalTimeDelivered += rtDiff;
      m_BitRate.nTotalBytesDelivered += m_BitRate.nBytesSinceLastDeliverTime;

      dSecs = m_BitRate.rtTotalTimeDelivered / 10000000.0;
      dBits = 8.0 * m_BitRate.nTotalBytesDelivered;
      m_BitRate.nAverageBitRate = (DWORD)(dBits / dSecs);

      m_BitRate.rtLastDeliverTime = pPacket->rtStart;
      m_BitRate.nBytesSinceLastDeliverTime = 0;
    }
  }

  CHECK_HR(hr = pSample->SetActualDataLength(nBytes));
  CHECK_HR(hr = pSample->SetTime(fTimeValid ? &pPacket->rtStart : nullptr, fTimeValid ? &pPacket->rtStop : nullptr));
  CHECK_HR(hr = pSample->SetMediaTime(nullptr, nullptr));
  CHECK_HR(hr = pSample->SetDiscontinuity(pPacket->bDiscontinuity));
  CHECK_HR(hr = pSample->SetSyncPoint(pPacket->bSyncPoint));
  CHECK_HR(hr = pSample->SetPreroll(fTimeValid && pPacket->rtStart < 0));
  // Deliver
  CHECK_HR(hr = Deliver(pSample));

done:
  if (!m_bPacketAllocator || !pSample)
    SAFE_DELETE(pPacket);
  SafeRelease(&pSample);
  return hr;
}
Пример #10
0
int CDecDXVA2::get_dxva2_buffer(struct AVCodecContext *c, AVFrame *pic, int flags)
{
  CDecDXVA2 *pDec = (CDecDXVA2 *)c->opaque;
  IMediaSample *pSample = nullptr;

  HRESULT hr = S_OK;

  if (pic->format != AV_PIX_FMT_DXVA2_VLD || (c->codec_id == AV_CODEC_ID_H264 && !H264_CHECK_PROFILE(c->profile)) || (c->codec_id == AV_CODEC_ID_HEVC && !HEVC_CHECK_PROFILE(c->profile))) {
    DbgLog((LOG_ERROR, 10, L"DXVA2 buffer request, but not dxva2 pixfmt or unsupported profile"));
    pDec->m_bFailHWDecode = TRUE;
    return -1;
  }

  hr = pDec->ReInitDXVA2Decoder(c);
  if (FAILED(hr)) {
    pDec->m_bFailHWDecode = TRUE;
    return -1;
  }

  if (FAILED(pDec->m_pD3DDevMngr->TestDevice(pDec->m_hDevice))) {
    DbgLog((LOG_ERROR, 10, L"Device Lost"));
  }

  int i;
  if (pDec->m_bNative) {
    if (!pDec->m_pDXVA2Allocator)
      return -1;

    hr = pDec->m_pDXVA2Allocator->GetBuffer(&pSample, nullptr, nullptr, 0);
    if (FAILED(hr)) {
      DbgLog((LOG_ERROR, 10, L"DXVA2Allocator returned error, hr: 0x%x", hr));
      return -1;
    }

    ILAVDXVA2Sample *pLavDXVA2 = nullptr;
    hr = pSample->QueryInterface(&pLavDXVA2);
    if (FAILED(hr)) {
      DbgLog((LOG_ERROR, 10, L"Sample is no LAV DXVA2 sample?????"));
      SafeRelease(&pSample);
      return -1;
    }
    i = pLavDXVA2->GetDXSurfaceId();
    SafeRelease(&pLavDXVA2);
  } else {
    int old, old_unused;
    for (i = 0, old = 0, old_unused = -1; i < pDec->m_NumSurfaces; i++) {
      d3d_surface_t *surface = &pDec->m_pSurfaces[i];
      if (!surface->used && (old_unused == -1 || surface->age < pDec->m_pSurfaces[old_unused].age))
        old_unused = i;
      if (surface->age < pDec->m_pSurfaces[old].age)
        old = i;
    }
    if (old_unused == -1) {
      DbgLog((LOG_TRACE, 10, L"No free surface, using oldest"));
      i = old;
    } else {
      i = old_unused;
    }
  }

  LPDIRECT3DSURFACE9 pSurface = pDec->m_pSurfaces[i].d3d;
  if (!pSurface) {
    DbgLog((LOG_ERROR, 10, L"There is a sample, but no D3D Surace? WTF?"));
    SafeRelease(&pSample);
    return -1;
  }

  pDec->m_pSurfaces[i].age  = pDec->m_CurrentSurfaceAge++;
  pDec->m_pSurfaces[i].used = true;

  memset(pic->data, 0, sizeof(pic->data));
  memset(pic->linesize, 0, sizeof(pic->linesize));
  memset(pic->buf, 0, sizeof(pic->buf));

  pic->data[0] = pic->data[3] = (uint8_t *)pSurface;
  pic->data[4] = (uint8_t *)pSample;

  SurfaceWrapper *surfaceWrapper = new SurfaceWrapper();
  surfaceWrapper->pDec = pDec;
  surfaceWrapper->sample = pSample;
  surfaceWrapper->surface = pSurface;
  surfaceWrapper->surface->AddRef();
  surfaceWrapper->pDXDecoder = pDec->m_pDecoder;
  surfaceWrapper->pDXDecoder->AddRef();

  pic->buf[0] = av_buffer_create(nullptr, 0, free_dxva2_buffer, surfaceWrapper, 0);

  return 0;
}