Beispiel #1
0
bool DirectShowTimedSample::isReady(IReferenceClock *clock) const
{
    REFERENCE_TIME sampleStartTime;
    REFERENCE_TIME sampleEndTime;
    REFERENCE_TIME currentTime;
    if (m_sample->GetTime(&sampleStartTime, &sampleEndTime) == S_OK) {
        if (clock->GetTime(&currentTime) == S_OK)
            return currentTime >= sampleStartTime;
    }
    return true;
}
Beispiel #2
0
bool DirectShowTimedSample::schedule(
        IReferenceClock *clock, REFERENCE_TIME startTime, HANDLE handle)
{
    REFERENCE_TIME sampleStartTime;
    REFERENCE_TIME sampleEndTime;
    if (m_sample->GetTime(&sampleStartTime, &sampleEndTime) == S_OK) {
        if (clock->AdviseTime(
                startTime, sampleStartTime, reinterpret_cast<HEVENT>(handle), &m_cookie) == S_OK) {
            return true;
        }
    }
    return false;
}
Beispiel #3
0
HRESULT 
MediaChunk::Write(Atom* patm)
{
    // record chunk start position
    LONGLONG posChunk = patm->Position() + patm->Length();

	if (m_bOldIndexFormat)
	{
		long cBytes = 0;

		// ensure that we don't break in the middle of a sample (Maxim Kartavenkov)
		const int MAX_PCM_SIZE = 22050;
		int max_bytes = MAX_PCM_SIZE - (MAX_PCM_SIZE % m_pTrack->Handler()->BlockAlign());

		list<IMediaSample*>::iterator it = m_Samples.begin();
		long cAvail = 0;
		BYTE* pBuffer = NULL;

		for (;;)
		{
			if (!cAvail)
			{
				if (it == m_Samples.end())
				{
					break;
				}
				IMediaSample* pSample = *it++;
				pSample->GetPointer(&pBuffer);
				cAvail = pSample->GetActualDataLength();
				REFERENCE_TIME tStart, tStop;
				if (SUCCEEDED(pSample->GetTime(&tStart, &tStop)))
				{
					m_pTrack->SetOldIndexStart(tStart);
				}
			}
			long cThis = max_bytes - cBytes;
			if (cThis > cAvail)
			{
				cThis = cAvail;
			}
			
			int cActual = 0;
			m_pTrack->Handler()->WriteData(patm, pBuffer, cThis, &cActual);
			cBytes += cActual;
			cAvail -= cActual;
			pBuffer += cActual;

			if (cBytes >= max_bytes)
			{
				m_pTrack->OldIndex(posChunk, cBytes);
				posChunk = patm->Position() + patm->Length();				
				cBytes = 0;
			}
		}
		if (cBytes)
		{
			m_pTrack->OldIndex(posChunk, cBytes);
		}
		return S_OK;
	}

    // Remember that large H264 samples may be broken 
    // across several buffers, with Sync flag at start and
    // time on last buffer.
    bool bSync = false;
    long cBytes = 0;
	long nSamples = 0;

    // loop once through the samples writing the data
    list<IMediaSample*>::iterator it;
    for (it = m_Samples.begin(); it != m_Samples.end(); it++)
    {
        IMediaSample* pSample = *it;

        // record positive sync flag, but for
        // multiple-buffer samples, only one sync flag will be present
        // so don't overwrite with later negatives.
        if (pSample->IsSyncPoint() == S_OK)
        {
            bSync = true;
        }

		// write payload, including any transformation (eg BSF to length-prepended)
        BYTE* pBuffer;
        pSample->GetPointer(&pBuffer);
		int cActual = 0;
		m_pTrack->Handler()->WriteData(patm, pBuffer, pSample->GetActualDataLength(), &cActual);
		cBytes += cActual;
        REFERENCE_TIME tStart, tEnd;
        HRESULT hr = pSample->GetTime(&tStart, &tEnd);
        if (SUCCEEDED(hr))
        {
			// this is the last buffer in the sample
			m_pTrack->IndexSample(bSync, tStart, tEnd, cBytes);
            // reset for new sample
            bSync = false;
			cBytes = 0;
			nSamples++;
        }
    }

    // add chunk position to index
	m_pTrack->IndexChunk(posChunk, nSamples);

    return S_OK;
}
DWORD WINAPI UdpReceiveThread(LPVOID param)
{
	HRESULT hr;
	ReceiveParam *receiveParam = (ReceiveParam*)param;
	HANDLE PushSemaphore = receiveParam->PushSemaphore;
	HANDLE PushDataMutex = receiveParam->PushDataMutex;
	std::map<REFERENCE_TIME,IMediaSample*>& SampleList = *receiveParam->SampleList;
	NetReceiveFilter* filter = receiveParam->filter;
	delete receiveParam;

	NetReceiveOutputPin* outputPin = reinterpret_cast<NetReceiveOutputPin*>(filter->GetPin(0));
	assert(outputPin != NULL);

	AM_MEDIA_TYPE mediaType;
	while (true)
	{
		outputPin->ConnectionMediaType(&mediaType);
		if (mediaType.majortype == GUID_NULL)
		{
			Sleep(300);
		}
		else
			break;
	}

	SOCKET udpSocket;
	udpSocket = ::socket(AF_INET, SOCK_DGRAM, 0);
	if (udpSocket == INVALID_SOCKET)
	{
		ErrorPrint("Create udp socket error");
		return 1;
	}

	sockaddr_in bindAddress;
	bindAddress.sin_family = AF_INET;
	bindAddress.sin_addr.s_addr = htonl(INADDR_ANY);
	if(mediaType.majortype == MEDIATYPE_Video)
	{
		bindAddress.sin_port = htons(VideoBroadcastPort);
	}
	else
	{
		bindAddress.sin_port = htons(AudioBroadcastPort);
	}

	int option = 1;
	int ret = setsockopt(udpSocket, SOL_SOCKET, SO_REUSEADDR, (char*)&option, sizeof(option));
	if (ret == SOCKET_ERROR)
	{
		ErrorPrint("Set socket reuse address error");
		return 1;
	}

	int recvSystemBufferSize = 1024 * 1024 * 10;

	ret = setsockopt(udpSocket, SOL_SOCKET, SO_RCVBUF, (char*)&recvSystemBufferSize, sizeof(recvSystemBufferSize));
	if (ret == SOCKET_ERROR)
	{
		ErrorPrint("Set socket receive system buffer size error");
	}

	ret = ::bind(udpSocket, (sockaddr*)&bindAddress, sizeof(bindAddress));
	if(ret == SOCKET_ERROR)
	{
		ErrorPrint("Bind udp receive socket error");
		return 1;
	}

	sockaddr_in fromAddress;
	fromAddress.sin_family = AF_INET;
	int addressLen = sizeof(fromAddress);

	std::map<long long, IMediaSample*> idToSampleMap;

	const int packetMaxSize = 10 * 1024;
	MediaPacketHeader* mediaPacketHeader = (MediaPacketHeader*)new char[sizeof(MediaPacketHeader) + packetMaxSize];
	boost::scoped_array<char> bufferContainer((char*)mediaPacketHeader);
	char* dataStart = (char*)mediaPacketHeader;
	char* dataBuffer = (char*)mediaPacketHeader + sizeof(MediaPacketHeader);
	while (true)
	{
		int recvedSize = recvfrom(udpSocket, dataStart, sizeof(MediaPacketHeader) + packetMaxSize, 0, (sockaddr*)&fromAddress, &addressLen);
		if (recvedSize == SOCKET_ERROR)
		{
			ErrorPrint("Receive from udp error");
			return 1;
		}

		if (g_IsBroadcasting) //是自己广播的数据包,丢弃之
		{
			continue;
		}

		if (mediaPacketHeader->type == 0) // 是sample头
		{
#ifdef UDP_PRINT
			std::cout<<"Receive media packet header:"<<mediaPacketHeader->id<<std::endl;
#endif
			std::map<long long, IMediaSample*>::iterator it = idToSampleMap.begin();
			while (it != idToSampleMap.end()) //处理发生过丢包的sample
			{
				std::map<long long, IMediaSample*>::iterator tmp = it++;
				if (tmp->first < mediaPacketHeader->id) //这个sample肯定丢包了,序列号比后来的小,并且没有接受完整,直接丢弃掉
				{
					std::cout<<"Lose packet:"<<mediaPacketHeader->id<<std::endl;
					tmp->second->Release(); //一定要把sample给释放掉
					idToSampleMap.erase(tmp);
				}
				else //将所有要丢弃的包都处理完了
					break;
			}

// 			if (mediaType.majortype == MEDIATYPE_Video)
// 			{
// 				std::cout<<"Video header:"<<mediaPacketHeader->id<<std::endl;
// 			}

//			std::cout<<"Before get free sample"<<std::endl;
			IMediaSample *sample = filter->GetFreeSample(); //此时为这个sample头申请一个新的sample
//			std::cout<<"After get free sample"<<std::endl;
			if (sample == NULL)
			{
				ErrorPrint("Get free sample error");
				return 1;
			}

			AM_SAMPLE2_PROPERTIES* sample2Properties = (AM_SAMPLE2_PROPERTIES*)dataBuffer;

			sample2Properties->cbData = sizeof(AM_SAMPLE2_PROPERTIES) - 9;

			IMediaSample2 *mediaSample2;
			hr = sample->QueryInterface(IID_IMediaSample2, (void**)&mediaSample2);
			if (FAILED(hr))
			{
				ErrorPrint("Get media sample2 interface error",hr);
				sample->Release();
				return 1;
			}
			ComReleaser mediaSample2Releaser(mediaSample2);

			hr = mediaSample2->SetProperties(sample2Properties->cbData, (BYTE*)sample2Properties);//设置sample属性
			if (FAILED(hr))
			{
				ErrorPrint("Set sample properties error");
			}
			sample->SetTime(&(sample2Properties->tStart), &(sample2Properties->tStop));
			sample->SetActualDataLength(sample2Properties->lActual);

			idToSampleMap.insert(std::make_pair(mediaPacketHeader->id, sample)); //插入到map当中,等待所有的sample数据接受完
		}
		else if (mediaPacketHeader->type == 1) //是sample数据
		{
#ifdef UDP_PRINT
			std::cout<<"Receive sample data:"<<mediaPacketHeader->id<<std::endl;
#endif
			std::map<long long, IMediaSample*>::iterator it = idToSampleMap.find(mediaPacketHeader->id);
			if (it != idToSampleMap.end()) //如果id找不到,sample头丢失了,或者已经过期了,直接将该包丢弃
			{
				IMediaSample* sample = it->second;
				PBYTE dataPointer = NULL;
				hr = sample->GetPointer(&dataPointer);
				if (FAILED(hr))
				{
					ErrorPrint("Get data pointer error",hr);
					idToSampleMap.erase(it);
					sample->Release();
					continue;
				}
				memcpy(dataPointer + mediaPacketHeader->offset, dataBuffer, mediaPacketHeader->size);
				if ( (mediaPacketHeader->offset + mediaPacketHeader->size) == sample->GetActualDataLength()) //已经接收完整了,当然也有可能中间数据丢包了,但现在不管这种情况
				{
					idToSampleMap.erase(it);
					REFERENCE_TIME startTime,endTime;
					sample->GetTime(&startTime,&endTime);
					//通知PUSH线程进行数据传送
					WaitForSingleObject(PushDataMutex, INFINITE);
// 					if (mediaType.majortype == MEDIATYPE_Video)
// 					{
// 						std::cout<<"Finished Video sample:"<<mediaPacketHeader->id<<";Current Thread:"<<GetCurrentThreadId()<<";Map size:"<<idToSampleMap.size()<<std::endl;
// 						std::cout<<"Sample start time:"<<startTime <<";Sample end time:"<<endTime<<std::endl;
// 					}
					SampleList.insert(std::make_pair(startTime,sample));
					if (SampleList.size() >= 24 * 10)
					{
						ReleaseSemaphore(PushSemaphore, 1, NULL);
					}
					ReleaseMutex(PushDataMutex);
				}
			}
			else
				std::cout<<"Lose packet header:"<<mediaPacketHeader->id<<std::endl;
		}

// 		if(idToSampleMap.size() == 0 ||  idToSampleMap.begin()->first < )
// 
// 		mediaPacketHeader
// 

	}
}
DWORD WINAPI ReceiveThread(PVOID param)
{
	HRESULT hr;
	ReceiveParam *receiveParam = (ReceiveParam*)param;
	HANDLE PushSemaphore = receiveParam->PushSemaphore;
	HANDLE PushDataMutex = receiveParam->PushDataMutex;
	std::map<REFERENCE_TIME,IMediaSample*>& SampleList = *receiveParam->SampleList;
	NetReceiveFilter* filter = receiveParam->filter;
	SOCKET socket ;
	delete receiveParam;

	LONG packSize;
	//	CMediaSample *tmpSample = (CMediaSample*) malloc(sizeof(CMediaSample));
	REFERENCE_TIME startTime = 0,endTime = 0; //马上播放
	REFERENCE_TIME mediaStartTime = 0,mediaEndTime = 0;
	AM_SAMPLE2_PROPERTIES sample2Properties;

	NetReceiveOutputPin* outputPin = reinterpret_cast<NetReceiveOutputPin*>(filter->GetPin(0));
	assert(outputPin != NULL);

	filter->waitForNewSocket();

	while(true)
	{

		IMediaSample *sample = filter->GetFreeSample();
		if (sample == NULL)
		{
			ErrorPrint("Get free sample error");
			return 1;
		}

		PBYTE dataPointer = NULL;
		hr = sample->GetPointer(&dataPointer);
		if (FAILED(hr))
		{
			ErrorPrint("Get data pointer error",hr);
			sample->Release();
			return 1;
		}

		CAutoLock lock(filter->getSocketLock());
		socket = filter->getSocket();

		if (!receiveData(socket, (char*)&sample2Properties, sizeof(sample2Properties)))
		{
			ErrorPrint("Get pack Properties error");
			sample->Release();
			filter->waitForNewSocket();
			continue;
		}
		packSize = sample2Properties.lActual;

		if (packSize > 100 * 1024)
		{
			std::cout<<"Exceed 100K:"<<packSize/1024<<std::endl;
		}

		AM_MEDIA_TYPE mediaType;
		filter->GetPin(0)->ConnectionMediaType(&mediaType);

		if (filter->getPlayMode() == 1)
		{
// 			static const unsigned long  offset = 10000000; //将延迟增加,尽量缓冲一些
// 			sample2Properties.tStart +=offset;
// 			sample2Properties.tStop += offset;

			sample2Properties.cbData = sizeof(sample2Properties) - 9;
			sample2Properties.pbBuffer= dataPointer;

			IMediaSample2 *mediaSample2;
			hr = sample->QueryInterface(IID_IMediaSample2, (void**)&mediaSample2);
			if (FAILED(hr))
			{
				ErrorPrint("Get media sample2 interface error",hr);
				sample->Release();
				return 1;
			}
			ComReleaser mediaSample2Releaser(mediaSample2);

			hr = mediaSample2->SetProperties(sample2Properties.cbData, (BYTE*)&sample2Properties);
			if (FAILED(hr))
			{
				ErrorPrint("Set sample properties error");
			}
			sample->SetTime(&sample2Properties.tStart, &sample2Properties.tStop);

			sample->GetTime(&startTime,&endTime);
		}
		else
		{
			startTime = 0;
			endTime = 0;
		}

		ASSERT(packSize <= sample->GetSize());
		sample->SetActualDataLength(packSize);
		sample->SetTime(&startTime, &endTime);

		if(!receiveData(socket, (char*)dataPointer, packSize))
		{
			ErrorPrint("Receive pack errors");
			sample->Release();
			filter->waitForNewSocket();
			continue;
		}

		//通知PUSH线程进行数据传送
		WaitForSingleObject(PushDataMutex, INFINITE);
		SampleList.insert(std::make_pair(startTime, sample));
		if(filter->getPlayMode() == 0) //如果尽快播放,则只要有一个sample就通知push线程
		{
			if (SampleList.size() == 1)
			{
				ReleaseSemaphore(PushSemaphore, 1, NULL);
			}
		}
		else if (filter->getPlayMode() == 1)//如果考虑时间戳,我们则缓冲尽量多的sample,但也不能太多
		{
			if (SampleList.size() >= 24 * 10)
			{
				ReleaseSemaphore(PushSemaphore, 1, NULL);
			}
		}
		ReleaseMutex(PushDataMutex);

		outputPin->newTransSample(sample2Properties, dataPointer); //通知进行sample的转发
	}

	return 0;
}
DWORD WINAPI PushDataThread(PVOID param)
{
	PushParam* pushParam = (PushParam*)param;
	HANDLE PushSemaphore = pushParam->PushSemaphore;
	HANDLE PushDataMutex = pushParam->PushDataMutex;
	NetReceiveFilter* filter = pushParam->filter;
	std::map<REFERENCE_TIME, IMediaSample*>& SampleList = *pushParam->SampleList;

	delete pushParam;

	REFERENCE_TIME startTime,endTime;
	CRefTime streamTime(LONG(0)),lastStreamTime(LONG(0));

	bool first = true;
	AM_MEDIA_TYPE mediaType;
	IMediaSample* sample ;

	while (SampleList.size() == 0) //等待足够多的数据
	{
		WaitForSingleObject(PushSemaphore,INFINITE);
	}

	CBasePin* pin = filter->GetPin(0);
	pin->ConnectionMediaType(&mediaType);
	IFilterGraph* filterGraph = filter->GetFilterGraph();
	ComReleaser filterGraphReleaser(filterGraph);

	HRESULT hr;
	IMediaControl* mediaControl;
	hr = filterGraph->QueryInterface(IID_IMediaControl, (void**)&mediaControl);
	if(FAILED(hr))
	{
		ErrorPrint("Get media control error", hr);
		return false;
	}
	ComReleaser mediaControlReleaser(mediaControl);

	while (true)
	{
		WaitForSingleObject(PushDataMutex, INFINITE);
		if (filter->getPlayMode() == 0) // 如果只是尽快播放,则不考虑时间戳,而且一次一sample的往下传
		{
			if (SampleList.size() == 0)
			{
				ReleaseMutex(PushDataMutex);
				while (SampleList.size() == 0)
				{
					WaitForSingleObject(PushSemaphore,INFINITE);
				}
				WaitForSingleObject(PushDataMutex, INFINITE);
			}
			sample = SampleList.begin()->second;
		}
		else if (filter->getPlayMode() == 1) //需要考虑时间戳
		{
			NetReceiveFilter::State state = filter->getState();
			if (SampleList.size() == 0)
			{
				g_ReferenceTimeFilter->pauseTime(); //暂停时钟
				ReleaseMutex(PushDataMutex);
				while (SampleList.size() == 0) //等待足够多的数据
				{
					WaitForSingleObject(PushSemaphore,INFINITE);
				}
				WaitForSingleObject(PushDataMutex, INFINITE);
				g_ReferenceTimeFilter->startTime(); //启动时钟

			}

			if (state == NetReceiveFilter::Stopped)
			{
				ReleaseMutex(PushDataMutex);
				Sleep(50);
				continue;
			}

			if(g_ReferenceTimeFilter->isStop())
			{
				ReleaseMutex(PushDataMutex);
				Sleep(50);
				continue;
			}

			sample = SampleList.begin()->second;
			sample->GetTime(&startTime,&endTime);
			filter->StreamTime(streamTime); //得到当前的流时间
			g_ReferenceTimeFilter->GetTime(&startTime);
			g_ReferenceTimeFilter->GetTime(&endTime);

			if (mediaType.majortype == MEDIATYPE_Video)
			{
				int a = 0;
			}
			else
			{
				int b = 0;
			}

			if(state != NetReceiveFilter::Paused) //pause时不修正
			{
				if(startTime - 10000000 > streamTime )
				{
					ReleaseMutex(PushDataMutex);
					Sleep(50);
					continue;
				}
				sample->SetTime(&startTime, &endTime);
			}
			

			if (mediaType.majortype == MEDIATYPE_Video)
			{
				int a = 0;
			}
			else
			{
				int b = 0;
			}

		}
// 		if (mediaType.majortype == MEDIATYPE_Video)
// 		{
// 			std::cout<<"Push video data."<<std::endl;
// 		}
		SampleList.erase(SampleList.begin());
		ReleaseMutex(PushDataMutex);
		if(!filter->PushData(sample))
		{
			ErrorPrint("Push data error");
			sample->Release();
			continue;
		}
		sample->Release();
	}
	return 0;
}
Beispiel #7
0
void DeviceSource::Preprocess()
{
    if(!bCapturing)
        return;

    //----------------------------------------

    if(bRequestVolume)
    {
        if(audioOut)
            audioOut->SetVolume(fNewVol);
        else if(audioFilter)
        {
            IBasicAudio *basicAudio;
            if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio)))
            {
                long lVol = long((double(fNewVol)*NEAR_SILENTf)-NEAR_SILENTf);
                if(lVol <= -NEAR_SILENT)
                    lVol = -10000;
                basicAudio->put_Volume(lVol);
                basicAudio->Release();
            }
        }
        bRequestVolume = false;
    }

    //----------------------------------------

    IMediaSample *lastSample = NULL;

    OSEnterMutex(hSampleMutex);
    if(curSample)
    {
        lastSample = curSample;
        curSample = NULL;
    }
    OSLeaveMutex(hSampleMutex);

    int numThreads = MAX(OSGetTotalCores()-2, 1);

    if(lastSample)
    {
        REFERENCE_TIME refTimeStart, refTimeFinish;
        lastSample->GetTime(&refTimeStart, &refTimeFinish);

        static REFERENCE_TIME lastRefTime = 0;
        Log(TEXT("refTimeStart: %llu, refTimeFinish: %llu, offset = %llu"), refTimeStart, refTimeFinish, refTimeStart-lastRefTime);
        lastRefTime = refTimeStart;

        BYTE *lpImage = NULL;
        if(colorType == DeviceOutputType_RGB)
        {
            if(texture)
            {
                if(SUCCEEDED(lastSample->GetPointer(&lpImage)))
                    texture->SetImage(lpImage, GS_IMAGEFORMAT_BGRX, renderCX*4);

                bReadyToDraw = true;
            }
        }
        else if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12)
        {
            if(bUseThreadedConversion)
            {
                if(!bFirstFrame)
                {
                    List<HANDLE> events;
                    for(int i=0; i<numThreads; i++)
                        events << convertData[i].hSignalComplete;

                    WaitForMultipleObjects(numThreads, events.Array(), TRUE, INFINITE);
                    texture->SetImage(lpImageBuffer, GS_IMAGEFORMAT_RGBX, texturePitch);

                    bReadyToDraw = true;
                }
                else
                    bFirstFrame = false;

                if(SUCCEEDED(lastSample->GetPointer(&lpImage)))
                {
                    for(int i=0; i<numThreads; i++)
                        lastSample->AddRef();

                    for(int i=0; i<numThreads; i++)
                    {
                        convertData[i].input    = lpImage;
                        convertData[i].pitch    = texturePitch;
                        convertData[i].output   = lpImageBuffer;
                        convertData[i].sample   = lastSample;
                        SetEvent(convertData[i].hSignalConvert);
                    }
                }
            }
            else
            {
                if(SUCCEEDED(lastSample->GetPointer(&lpImage)))
                {
                    LPBYTE lpData;
                    UINT pitch;

                    if(texture->Map(lpData, pitch))
                    {
                        PackPlanar(lpData, lpImage, renderCX, renderCY, pitch, 0, renderCY);
                        texture->Unmap();
                    }
                }

                bReadyToDraw = true;
            }
        }
        else if(colorType == DeviceOutputType_YVYU || colorType == DeviceOutputType_YUY2)
        {
            if(SUCCEEDED(lastSample->GetPointer(&lpImage)))
            {
                LPBYTE lpData;
                UINT pitch;

                if(texture->Map(lpData, pitch))
                {
                    Convert422To444(lpData, lpImage, pitch, true);
                    texture->Unmap();
                }
            }

            bReadyToDraw = true;
        }
        else if(colorType == DeviceOutputType_UYVY || colorType == DeviceOutputType_HDYC)
        {
            if(SUCCEEDED(lastSample->GetPointer(&lpImage)))
            {
                LPBYTE lpData;
                UINT pitch;

                if(texture->Map(lpData, pitch))
                {
                    Convert422To444(lpData, lpImage, pitch, false);
                    texture->Unmap();
                }
            }

            bReadyToDraw = true;
        }

        lastSample->Release();
    }
}