Esempio n. 1
0
//IUnknown
STDMETHODIMP XnVideoStream::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{   
	XN_METHOD_START;

	XN_METHOD_CHECK_POINTER(ppv);

	HRESULT hr = S_OK;

	// Standard OLE stuff
	if(riid == IID_IAMStreamConfig) 
	{
		xnDumpFileWriteString(m_Dump, "\tPin query interface to IAMStreamConfig\n");
		hr = GetInterface(static_cast<IAMStreamConfig*>(this), ppv);
	}
	else if(riid == IID_IKsPropertySet)
	{
		xnDumpFileWriteString(m_Dump, "\tPin query interface to IKsPropertySet\n");
		hr = GetInterface(static_cast<IKsPropertySet*>(this), ppv);
	}
	else if(riid == IID_ISpecifyPropertyPages)
	{
		xnDumpFileWriteString(m_Dump, "\tPin query interface to ISpecifyPropertyPages\n");
		hr = GetInterface(static_cast<ISpecifyPropertyPages*>(this), ppv);
	}
	else 
	{
		OLECHAR strGuid[40];
		StringFromGUID2(riid, strGuid, 40);
		xnDumpFileWriteString(m_Dump, "\tPin query interface to %S\n", strGuid);
		hr = CSourceStream::NonDelegatingQueryInterface(riid, ppv);
	}

	XN_METHOD_RETURN(hr);
}
Esempio n. 2
0
void CALLBACK audioCallback(HWAVEOUT hwo, UINT uMsg, DWORD_PTR dwInstance, DWORD_PTR dwParam1, DWORD_PTR dwParam2)
{
	if (uMsg == WOM_DONE)
	{
		WAVEHDR* pHeader = (WAVEHDR*)dwParam1;
		XnUInt32 nIndex = pHeader->dwUser;

		xnDumpFileWriteString(g_AudioData.SyncDump, "Done playing index %d.", nIndex);

		if (g_AudioData.nFirstToCheck == -1 || g_AudioData.nFirstToCheck == nIndex)
		{
			g_AudioData.nFirstToCheck = -1;

			// get the timestamp of the packet just done playing
			XnUInt64 nPlayedTimestamp = g_AudioData.pAudioTimestamps[nIndex];

			// check how much time is still queued
			XnUInt32 nLastQueuedIndex = (g_AudioData.nAudioNextBuffer + NUMBER_OF_AUDIO_BUFFERS - 1) % NUMBER_OF_AUDIO_BUFFERS;
			XnUInt64 nLastQueuedTimestamp = g_AudioData.pAudioTimestamps[nLastQueuedIndex];

			xnDumpFileWriteString(g_AudioData.SyncDump, " %f ms in queue.", (nLastQueuedTimestamp - nPlayedTimestamp) / 1e3);

			if (nLastQueuedTimestamp - nPlayedTimestamp > AUDIO_LATENCY_THRESHOLD)
			{
				g_AudioData.bFlush = true;
				xnDumpFileWriteString(g_AudioData.SyncDump, " Will flush queue.\n");
			}
			else
				xnDumpFileWriteString(g_AudioData.SyncDump, "\n");
		}
		else
			xnDumpFileWriteString(g_AudioData.SyncDump, "\n");
	}
}
Esempio n. 3
0
void XnDataProcessor::ProcessData(const XnSensorProtocolResponseHeader* pHeader, const XnUChar* pData, XnUInt32 nDataOffset, XnUInt32 nDataSize)
{
	XN_PROFILING_START_SECTION("XnDataProcessor::ProcessData")

	// count these bytes
	m_nBytesReceived += nDataSize;

	// check if we start a new packet
	if (nDataOffset == 0)
	{
		// make sure no packet was lost
		if (pHeader->nPacketID != m_nLastPacketID+1 && pHeader->nPacketID != 0)
		{
			xnLogWarning(XN_MASK_SENSOR_PROTOCOL, "%s: Expected %x, got %x", m_csName, m_nLastPacketID+1, pHeader->nPacketID);
			OnPacketLost();
		}

		m_nLastPacketID = pHeader->nPacketID;

		// log packet arrival
		XnUInt64 nNow;
		xnOSGetHighResTimeStamp(&nNow);
		xnDumpFileWriteString(m_pDevicePrivateData->MiniPacketsDump, "%llu,0x%hx,0x%hx,0x%hx,%u\n", nNow, pHeader->nType, pHeader->nPacketID, pHeader->nBufSize, pHeader->nTimeStamp);
	}

	ProcessPacketChunk(pHeader, pData, nDataOffset, nDataSize);

	XN_PROFILING_END_SECTION
}
Esempio n. 4
0
void XnAudioProcessor::ProcessWholePacket(const XnSensorProtocolResponseHeader* pHeader, const XnUChar* pData)
{
	xnOSEnterCriticalSection(&m_pDevicePrivateData->hAudioBufferCriticalSection);

	// take write packet
	XnUChar* pWritePacket = m_pDevicePrivateData->pAudioBuffer + (m_pDevicePrivateData->nAudioWriteIndex * m_pDevicePrivateData->nAudioPacketSize);

	if (m_bDeleteChannel)
	{
		XnUInt16* pSamples = (XnUInt16*)pData;
		XnUInt16* pSamplesEnd = (XnUInt16*)(pData + pHeader->nBufSize);
		XnUInt16* pOutput = (XnUInt16*)pWritePacket;

		while (pSamples < pSamplesEnd)
		{
			*pOutput = *pSamples;

			pOutput++;
			// skip a sample
			pSamples += 2;
		}
	}
	else
	{
		// copy data
		xnOSMemCopy(pWritePacket, pData, pHeader->nBufSize);
	}

	// mark timestamp
	m_pDevicePrivateData->pAudioPacketsTimestamps[m_pDevicePrivateData->nAudioWriteIndex] = GetTimeStamp(pHeader->nTimeStamp);

	if (m_nLastPacketID % 10 == 0)
	{
		XnUInt64 nSysTime;
		xnOSGetTimeStamp(&nSysTime);

		xnDumpFileWriteString(m_pDevicePrivateData->BandwidthDump, "%llu,%s,%d,%d\n",
			nSysTime, "Audio", -1, m_nBytesReceived);

		m_nBytesReceived = 0;
	}

	// move write index forward
	m_pDevicePrivateData->nAudioWriteIndex = (m_pDevicePrivateData->nAudioWriteIndex + 1) % m_pDevicePrivateData->nAudioBufferNumOfPackets;

	// if write index got to read index (end of buffer), move read index forward (and loose a packet)
	if (m_pDevicePrivateData->nAudioWriteIndex == m_pDevicePrivateData->nAudioReadIndex)
	{
		m_pDevicePrivateData->nAudioReadIndex = (m_pDevicePrivateData->nAudioReadIndex + 1) % m_pDevicePrivateData->nAudioBufferNumOfPackets;
	}

	xnOSLeaveCriticalSection(&m_pDevicePrivateData->hAudioBufferCriticalSection);

	xnDumpFileWriteBuffer(m_AudioInDump, pData, pHeader->nBufSize);

	if (m_pDevicePrivateData->pAudioCallback != NULL)
	{
		m_pDevicePrivateData->pAudioCallback(m_pDevicePrivateData->pAudioCallbackCookie);
	}
}
Esempio n. 5
0
void XnDeviceBase::OnNewStreamData(XnDeviceStream* pStream, XnUInt64 nTimestamp, XnUInt32 nFrameID)
{
	XnUInt64 nNow;
	xnOSGetHighResTimeStamp(&nNow);
	xnDumpFileWriteString(m_StreamsDataDump, "%llu,%s,%llu,%u\n", nNow, pStream->GetName(), nTimestamp, nFrameID);

	if (strcmp(m_PrimaryStream.GetValue(), XN_PRIMARY_STREAM_ANY) == 0 ||
		strcmp(m_PrimaryStream.GetValue(), XN_PRIMARY_STREAM_NONE) == 0)
	{
		// any stream makes us advance
		m_nLastTimestamp = XN_MAX(m_nLastTimestamp, nTimestamp);
		m_nLastFrameID = XN_MAX(m_nLastFrameID, nFrameID);
	}
	else if (strcmp(m_PrimaryStream.GetValue(), pStream->GetName()) == 0) // this stream is the primary
	{
		m_nLastTimestamp = nTimestamp;
		m_nLastFrameID = nFrameID;
	}

	XnStatus nRetVal = xnOSSetEvent(m_hNewDataEvent);
	if (nRetVal != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_DDK, "Failed setting the new data event: %s", xnGetStatusString(nRetVal));
	}

	RaiseNewStreamDataEvent(pStream->GetName());
}
Esempio n. 6
0
void XnDeviceBase::OnNewStreamData(XnDeviceStream* pStream, OniFrame* pFrame)
{
    XnUInt64 nNow;
    xnOSGetHighResTimeStamp(&nNow);
    xnDumpFileWriteString(m_StreamsDataDump, "%llu,%s,%llu,%u\n", nNow, pStream->GetName(), pFrame->timestamp, pFrame->frameIndex);

    RaiseNewStreamDataEvent(pStream->GetName(), pFrame);
}
Esempio n. 7
0
XnStatus XnDeviceBase::Read(XnStreamDataSet* pStreamOutputSet)
{
	XnStatus nRetVal = XN_STATUS_OK;

	XN_VALIDATE_INPUT_PTR(pStreamOutputSet);

	if (m_ReadWriteMode.GetValue() == XN_DEVICE_MODE_WRITE)
	{
		return XN_STATUS_IO_DEVICE_WRONG_MODE;
	}

	XnUInt64 nNow;
	xnOSGetHighResTimeStamp(&nNow);
	xnDumpFileWriteString(m_StreamsDataDump, "%llu,Read Called\n", nNow);

	// First thing to do is wait for primary stream to advance. We do this BEFORE checking streams
	// because device streams might change during this wait.
	nRetVal = WaitForPrimaryStream(m_hNewDataEvent, pStreamOutputSet);
	XN_IS_STATUS_OK(nRetVal);

	xnOSGetHighResTimeStamp(&nNow);
	xnDumpFileWriteString(m_StreamsDataDump, "%llu,Read Condition Met\n", nNow);

	// take the list of stream output objects
	XnStreamData* apStreamOutputs[XN_DEVICE_BASE_MAX_STREAMS_COUNT];
	XnUInt32 nOutputsCount = XN_DEVICE_BASE_MAX_STREAMS_COUNT;

	nRetVal = XnStreamDataSetCopyToArray(pStreamOutputSet, apStreamOutputs, &nOutputsCount);
	XN_IS_STATUS_OK(nRetVal);

	// now read
	for (XnUInt32 nIndex = 0; nIndex < nOutputsCount; ++nIndex)
	{
		// find its corresponding stream
		XnDeviceStream* pStream;
		nRetVal = FindStream(apStreamOutputs[nIndex]->StreamName, &pStream);
		XN_IS_STATUS_OK(nRetVal);

		nRetVal = ReadFromStreamImpl(pStream, apStreamOutputs[nIndex]);
		XN_IS_STATUS_OK(nRetVal);
	}

	return (XN_STATUS_OK);
}
Esempio n. 8
0
HRESULT STDMETHODCALLTYPE XnVideoStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC)
{
	XN_METHOD_START;

	XN_METHOD_CHECK_POINTER(pmt);
	XN_METHOD_CHECK_POINTER(pSCC);

	xnDumpFileWriteString(m_Dump, "\tCalling %s for %d\n", __FUNCTION__, iIndex);

	CMediaType mediaType;
	VIDEO_STREAM_CONFIG_CAPS* pvscc = (VIDEO_STREAM_CONFIG_CAPS*)pSCC;
	HRESULT hr = GetStreamCapability(iIndex, mediaType, *pvscc);
	if (FAILED(hr)) XN_METHOD_RETURN(hr);

	xnDumpFileWriteString(m_Dump, "\tReturning %dx%d@%d using %s\n", m_aSupportedModes[iIndex].OutputMode.nXRes, m_aSupportedModes[iIndex].OutputMode.nYRes, m_aSupportedModes[iIndex].OutputMode.nFPS, xnPixelFormatToString(m_aSupportedModes[iIndex].Format));

	*pmt = CreateMediaType(&mediaType);
	XN_METHOD_RETURN(S_OK);
}
Esempio n. 9
0
XnStatus XnSensor::InitReading()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// open data endpoints
	nRetVal = m_SensorIO.OpenDataEndPoints((XnSensorUsbInterface)m_Interface.GetValue(), *m_Firmware.GetInfo());
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = m_Interface.UnsafeUpdateValue(m_SensorIO.GetCurrentInterface());
	XN_IS_STATUS_OK(nRetVal);

	// take frequency information
	XnFrequencyInformation FrequencyInformation;

	nRetVal = XnHostProtocolAlgorithmParams(&m_DevicePrivateData, XN_HOST_PROTOCOL_ALGORITHM_FREQUENCY, &FrequencyInformation, sizeof(XnFrequencyInformation), (XnResolutions)0, 0);
	if (nRetVal != XN_STATUS_OK)
		return nRetVal;

	m_DevicePrivateData.fDeviceFrequency = XN_PREPARE_VAR_FLOAT_IN_BUFFER(FrequencyInformation.fDeviceFrequency);

	// Init Dumps
	m_DevicePrivateData.BandwidthDump = xnDumpFileOpen(XN_DUMP_BANDWIDTH, "Bandwidth.csv");
	xnDumpFileWriteString(m_DevicePrivateData.BandwidthDump, "Timestamp,Frame Type,Frame ID,Size\n");
	m_DevicePrivateData.TimestampsDump = xnDumpFileOpen(XN_DUMP_TIMESTAMPS, "Timestamps.csv");
	xnDumpFileWriteString(m_DevicePrivateData.TimestampsDump, "Host Time (us),Stream,Device TS,Time (ms),Comments\n");
	m_DevicePrivateData.MiniPacketsDump = xnDumpFileOpen(XN_DUMP_MINI_PACKETS, "MiniPackets.csv");
	xnDumpFileWriteString(m_DevicePrivateData.MiniPacketsDump, "HostTS,Type,ID,Size,Timestamp\n");

	m_DevicePrivateData.nGlobalReferenceTS = 0;
	nRetVal = xnOSCreateCriticalSection(&m_DevicePrivateData.hEndPointsCS);
	XN_IS_STATUS_OK(nRetVal);

	// NOTE: when we go up, some streams might be open, and so we'll receive lots of garbage.
	// wait till streams are turned off, and then start reading.
//	pDevicePrivateData->bIgnoreDataPackets = TRUE;

	// open input threads
	nRetVal = XnDeviceSensorOpenInputThreads(GetDevicePrivateData(), (XnBool)m_ReadFromEP1.GetValue(), (XnBool)m_ReadFromEP2.GetValue(), (XnBool)m_ReadFromEP3.GetValue());
	XN_IS_STATUS_OK(nRetVal);


	return XN_STATUS_OK;
}
Esempio n. 10
0
void XnFrameStreamProcessor::OnEndOfFrame(const XnSensorProtocolResponseHeader* pHeader)
{
	// write dump
	XnBuffer* pCurWriteBuffer = m_pTripleBuffer->GetWriteBuffer();
	xnDumpFileWriteBuffer(m_InternalDump, pCurWriteBuffer->GetData(), pCurWriteBuffer->GetSize());
	xnDumpFileClose(m_InternalDump);
	xnDumpFileClose(m_InDump);

	if (!m_bFrameCorrupted)
	{
		// mark the buffer as stable
		XnUInt64 nTimestamp;
		if (m_pDevicePrivateData->pSensor->ShouldUseHostTimestamps())
		{
			// use the host timestamp of the first packet
			nTimestamp = m_nFirstPacketTimestamp;
		}
		else
		{
			// use timestamp in last packet
			nTimestamp = CreateTimestampFromDevice(pHeader->nTimeStamp);
		}

		OniFrame* pFrame = m_pTripleBuffer->GetWriteFrame();
		pFrame->timestamp = nTimestamp;
		
		XnUInt32 nFrameID;
		m_pTripleBuffer->MarkWriteBufferAsStable(&nFrameID);

		// let inheriting classes do their stuff
		OnFrameReady(nFrameID, nTimestamp);
	}
	else
	{
		// restart
		m_pTripleBuffer->GetWriteBuffer()->Reset();
	}

	// log bandwidth
	XnUInt64 nSysTime;
	xnOSGetTimeStamp(&nSysTime);
	xnDumpFileWriteString(m_pDevicePrivateData->BandwidthDump, "%llu,%s,%d,%d\n", 
		nSysTime, m_csName, GetCurrentFrameID(), m_nBytesReceived);

	// re-init dumps
	m_InDump = xnDumpFileOpen(m_csInDumpMask, "%s_%d.raw", m_csInDumpMask, GetCurrentFrameID());
	m_InternalDump = xnDumpFileOpen(m_csInternalDumpMask, "%s_%d.raw", m_csInternalDumpMask, GetCurrentFrameID());
	m_nBytesReceived = 0;
}
Esempio n. 11
0
XnBool XnSensor::HasSynchedFrameArrived(const XnChar* strDepthStream, const XnChar* strImageStream)
{
	// find both streams
	XnDeviceStream* pDepth;
	XnDeviceStream* pImage;

	if (XN_STATUS_OK != FindStream(strDepthStream, &pDepth))
		return FALSE;

	if (XN_STATUS_OK != FindStream(strImageStream, &pImage))
		return FALSE;

	XnUInt32 nThreshold = XN_SENSOR_FRAME_SYNC_MAX_DIFF;
	if (IsHighResTimestamps())
		nThreshold *= 1000;

	// wait for both to advance, and time difference to be less than threshold
	XnInt32 nTimestampDiff = XnInt32(pDepth->GetLastTimestamp() - pImage->GetLastTimestamp());

	XnBool bConditionMet = (
		pDepth->IsNewDataAvailable() &&
		pImage->IsNewDataAvailable() &&
		(XnUInt32)abs(nTimestampDiff) <= nThreshold
		);

	if (xnLogIsDumpMaskEnabled(XN_DUMP_FRAME_SYNC))
	{
		XnUInt64 nNow;
		xnOSGetHighResTimeStamp(&nNow);
		xnDumpFileWriteString(m_FrameSyncDump, "%llu,%u,%llu,%u,%llu,%s\n",
			nNow,
			pDepth->IsNewDataAvailable(),
			pDepth->GetLastTimestamp(),
			pImage->IsNewDataAvailable(),
			pImage->GetLastTimestamp(),
			bConditionMet ? "OK" : "Waiting");
	}

	return bConditionMet;
}
XN_C_API void xnOSLogMemFree(const void* pMemBlock)
{
	if (pMemBlock == NULL)
		return;

	XnMemBlockDataNode* pPrev = NULL;

	XnAutoCSLocker lock(g_hCS);
	XnMemBlockDataNode* pNode = g_allocatedMemory.pFirst;
	while (pNode != NULL)
	{
		if (pNode->Data.pMemBlock == pMemBlock)
		{
			// found. Remove it from the list
			if (pPrev == NULL) // no previous
				g_allocatedMemory.pFirst = pNode->pNext;
			else
				pPrev->pNext = pNode->pNext;

			// if it was last, update last
			if (g_allocatedMemory.pLast == pNode)
				g_allocatedMemory.pLast = pPrev;

			xnDumpFileWriteString(g_dump, "Free,0x%x\n", pMemBlock);

			// deallocate memory
			xnOSFree(pNode);

			return;
		}

		// move to next
		pPrev = pNode;
		pNode = pNode->pNext;
	}

	// if we got here then we're trying to free a non-allocated memory
	XN_ASSERT(FALSE);
}
Esempio n. 13
0
XnStatus XnSensor::InitImpl(const XnDeviceConfig *pDeviceConfig)
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	xnLogVerbose(XN_MASK_DEVICE_SENSOR, "Initializing device sensor...");


	// Frame Sync
	XnCallbackHandle hCallbackDummy;
	nRetVal = m_FrameSync.OnChangeEvent().Register(FrameSyncPropertyChangedCallback, this, hCallbackDummy);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = GetFirmware()->GetParams()->m_Stream0Mode.OnChangeEvent().Register(FrameSyncPropertyChangedCallback, this, hCallbackDummy);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = GetFirmware()->GetParams()->m_Stream1Mode.OnChangeEvent().Register(FrameSyncPropertyChangedCallback, this, hCallbackDummy);
	XN_IS_STATUS_OK(nRetVal);

	// other stuff
	m_FrameSyncDump = xnDumpFileOpen(XN_DUMP_FRAME_SYNC, "FrameSync.csv");
	xnDumpFileWriteString(m_FrameSyncDump, "HostTime(us),DepthNewData,DepthTimestamp(ms),ImageNewData,ImageTimestamp(ms),Diff(ms),Action\n");

	nRetVal = XnDeviceBase::InitImpl(pDeviceConfig);
	XN_IS_STATUS_OK(nRetVal);

	// now that everything is configured, open the sensor
	nRetVal = InitSensor(pDeviceConfig);
	if (nRetVal != XN_STATUS_OK)
	{
		Destroy();
		return (nRetVal);
	}

	xnLogInfo(XN_MASK_DEVICE_SENSOR, "Device sensor initialized");

	return (XN_STATUS_OK);
}
XN_C_API void* xnOSLogMemAlloc(void* pMemBlock, XnAllocationType nAllocType, XnUInt32 nBytes, const XnChar* csFunction, const XnChar* csFile, XnUInt32 nLine, const XnChar* csAdditional)
{
	static XnBool bFirstTime = TRUE;
	static XnBool bReentrent = FALSE;

	if (bFirstTime)
	{
		bFirstTime = FALSE;
		printf("************************************************************\n");
		printf("**  WARNING: Memory Profiling is on!                      **\n");
		printf("************************************************************\n");

		bReentrent = TRUE;
		xnOSCreateCriticalSection(&g_hCS);

#ifdef XN_MEMORY_PROFILING_DUMP
		xnDumpSetMaskState("MemProf", TRUE);
#endif
		g_dump = xnDumpFileOpen("MemProf", "MemProfiling.log");
		xnDumpFileWriteString(g_dump, "Entry,Address,AllocType,Bytes,Function,File,Line,AdditionalInfo\n");
		bReentrent = FALSE;
	}

	// ignore stuff that is being allocated during "first time"
	if (bReentrent)
	{
		return pMemBlock;
	}

	XnMemBlockDataNode* pNode;
	pNode = (XnMemBlockDataNode*)xnOSMalloc(sizeof(XnMemBlockDataNode));
	pNode->Data.pMemBlock = pMemBlock;
	pNode->Data.nAllocType = nAllocType;
	pNode->Data.nBytes = nBytes;
	pNode->Data.csFunction = csFunction;
	pNode->Data.csFile = csFile;
	pNode->Data.nLine = nLine;
	pNode->Data.csAdditional = csAdditional;
	pNode->Data.nFrames = XN_MEM_PROF_MAX_FRAMES;
	xnDumpFileWriteString(g_dump, "Alloc,0x%x,%s,%u,%s,%s,%u,%s\n", pMemBlock, XnGetAllocTypeString(nAllocType), nBytes, csFunction, csFile, nLine, csAdditional);

	// try to get call stack (skip 2 frames - this one and the alloc func)
	XnChar* pstrFrames[XN_MEM_PROF_MAX_FRAMES];
	for (XnUInt32 i = 0; i < XN_MEM_PROF_MAX_FRAMES; ++i)
	{
		pstrFrames[i] = pNode->Data.aFrames[i];
	}
	if (XN_STATUS_OK != xnOSGetCurrentCallStack(2, pstrFrames, XN_MEM_PROF_MAX_FRAME_LEN, &pNode->Data.nFrames))
	{
		pNode->Data.nFrames = 0;
	}

	pNode->pNext = NULL;

	XnAutoCSLocker lock(g_hCS);
	if (g_allocatedMemory.pLast == NULL)
	{
		g_allocatedMemory.pFirst = g_allocatedMemory.pLast = pNode;
	}
	else
	{
		g_allocatedMemory.pLast->pNext = pNode;
		g_allocatedMemory.pLast = pNode;
	}

	return pMemBlock;
}
Esempio n. 15
0
HRESULT XnVideoStream::FillBuffer(IMediaSample *pms)
{
	XN_METHOD_START;

	XN_METHOD_CHECK_POINTER(pms);

	if (!m_imageGen.IsGenerating())
	{
		XN_METHOD_RETURN(E_UNEXPECTED);
	}

	VIDEOINFOHEADER* videoInfo = (VIDEOINFOHEADER*)m_mt.Format();
	bool bUpsideDown = videoInfo->bmiHeader.biHeight > 0;

	if (m_bFlipVertically)
	{
		bUpsideDown = !bUpsideDown;
	}

	BYTE *pData;
	long lDataLen;

	pms->GetPointer(&pData);
	lDataLen = pms->GetSize();

	{
		CAutoLock cAutoLock(m_pFilter->pStateLock());

		XnStatus nRetVal = XN_STATUS_OK;

		// ignore timeouts
		for(;;)
		{
			nRetVal = m_imageGen.WaitAndUpdateData();
			if (nRetVal != XN_STATUS_WAIT_DATA_TIMEOUT)
			{
				break;
			}
			else
			{
				xnDumpFileWriteString(m_Dump, "\tTimeout during FillBuffer\n");
			}
		}

		if (nRetVal != XN_STATUS_OK) XN_METHOD_RETURN(E_UNEXPECTED);
	}

	xn::ImageMetaData imageMD;
	m_imageGen.GetMetaData(imageMD);

	if (imageMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24)
	{
		const XnRGB24Pixel* pImage = imageMD.RGB24Data();
		if (bUpsideDown)
		{
			// convert from left-to-right top-to-bottom RGB to left-to-right bottom-to-top BGR
			pImage += imageMD.XRes() * imageMD.YRes() - 1;

			for (XnUInt32 y = 0; y < imageMD.YRes(); ++y)
			{
				for (XnUInt32 x = 0; x < imageMD.XRes(); ++x, pImage -=1, pData += 3)
				{
					// translate RGB to BGR
					pData[0] = pImage->nBlue;
					pData[1] = pImage->nGreen;
					pData[2] = pImage->nRed;
				}
			}
		}
		else
		{
			for (XnUInt32 y = 0; y < imageMD.YRes(); ++y)
			{
				for (XnUInt32 x = 0; x < imageMD.XRes(); ++x, pImage += 1, pData += 3)
				{
					// translate RGB to BGR
					pData[0] = pImage->nBlue;
					pData[1] = pImage->nGreen;
					pData[2] = pImage->nRed;
				}
			}
		}
	}
	else if (imageMD.PixelFormat() == XN_PIXEL_FORMAT_MJPEG)
	{
		memcpy(pData, imageMD.Data(), imageMD.DataSize());
		pms->SetActualDataLength(imageMD.DataSize());
	}
	else
	{
		xnLogError(XN_MASK_FILTER, "Unsupported pixel format!");
		XN_METHOD_RETURN(E_UNEXPECTED);
	}

	// The current time is the sample's start
//		CRefTime rtStart = m_rtSampleTime;

	// Increment to find the finish time
//		m_rtSampleTime += (LONG)m_iRepeatTime;

//		pms->SetTime((REFERENCE_TIME *) &rtStart,(REFERENCE_TIME *) &m_rtSampleTime);

	pms->SetSyncPoint(TRUE);

	xnFPSMarkFrame(&m_FPS);

	XN_METHOD_RETURN(NOERROR);
}
Esempio n. 16
0
STDMETHODIMP XnVideoSource::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
    XN_METHOD_START;

    XN_METHOD_CHECK_POINTER(ppv);

    HRESULT hr = S_OK;

    if (riid == IID_ISpecifyPropertyPages)
    {
        xnDumpFileWriteString(m_Dump, "Filter query interface to ISpecifyPropertyPages\n");
        hr = GetInterface(static_cast<ISpecifyPropertyPages*>(this), ppv);
    }
    else if (riid == IID_IAMVideoControl)
    {
        xnDumpFileWriteString(m_Dump, "Filter query interface to IAMVideoControl\n");
        hr = GetInterface(static_cast<IAMVideoControl*>(this), ppv);
    }
    else if (riid == IID_IAMVideoProcAmp)
    {
        xnDumpFileWriteString(m_Dump, "Filter query interface to IAMVideoProcAmp\n");
        if (m_pVideoProcAmp == NULL)
        {
            m_pVideoProcAmp = new VideoProcAmp(this);
            if (m_pVideoProcAmp == NULL)
            {
                XN_METHOD_RETURN(E_OUTOFMEMORY);
            }
        }

        hr = GetInterface(static_cast<IAMVideoProcAmp*>(m_pVideoProcAmp), ppv);
    }
    else if (riid == IID_IAMCameraControl)
    {
        xnDumpFileWriteString(m_Dump, "Filter query interface to IAMCameraControl\n");
        if (m_pCameraControl == NULL)
        {
            m_pCameraControl = new CameraControl(this);
            if (m_pCameraControl == NULL)
            {
                XN_METHOD_RETURN(E_OUTOFMEMORY);
            }
        }

        hr = GetInterface(static_cast<IAMCameraControl*>(m_pCameraControl), ppv);
    }
    else if (riid == IID_IAdditionalOpenNIControls)
    {
        xnDumpFileWriteString(m_Dump, "Filter query interface to IAdditionalControls\n");
        hr = GetInterface(static_cast<IAdditionalControls*>(this), ppv);
    }
    else
    {
        OLECHAR strGuid[40];
        StringFromGUID2(riid, strGuid, 40);
        xnDumpFileWriteString(m_Dump, "Filter query interface to %S\n", strGuid);

        hr = CSource::NonDelegatingQueryInterface(riid, ppv);
    }

    XN_METHOD_RETURN(hr);
}
Esempio n. 17
0
XnUInt64 XnDataProcessor::GetTimeStamp(XnUInt32 nDeviceTimeStamp)
{
	const XnUInt64 nWrapPoint = ((XnUInt64)XN_MAX_UINT32) + 1;
	XnUInt64 nResultInTicks;

	XnUInt64 nNow;
	xnOSGetHighResTimeStamp(&nNow);

	const XnUInt32 nDumpCommentMaxLength = 200;
	XnChar csDumpComment[nDumpCommentMaxLength] = "";

	XnBool bCheckSanity = TRUE;

	// we register the first TS calculated as time-zero. Every stream's TS data will be 
	// synchronized with it
	if (m_pDevicePrivateData->nGlobalReferenceTS == 0)
	{
		xnOSEnterCriticalSection(&m_pDevicePrivateData->hEndPointsCS);
		if (m_pDevicePrivateData->nGlobalReferenceTS == 0)
		{
			m_pDevicePrivateData->nGlobalReferenceTS = nDeviceTimeStamp;
			m_pDevicePrivateData->nGlobalReferenceOSTime = nNow;
		}
		xnOSLeaveCriticalSection(&m_pDevicePrivateData->hEndPointsCS);
	}

	if (m_TimeStampData.bFirst)
	{
		/* 
		This is a bit tricky, as we need to synchronize the first timestamp of different streams. 
		We somehow need to translate 32-bit tick counts to 64-bit timestamps. The device timestamps
		wrap-around every ~71.5 seconds (for PS1080 @ 60 MHz).
		Lets assume the first packet of the first stream got timestamp X. Now we get the first packet of another
		stream with a timestamp Y.
		We need to figure out what is the relation between X and Y.
		We do that by analyzing the following scenarios:
		1. Y is after X, in the same period (no wraparound yet).
		2. Y is after X, in a different period (one or more wraparounds occurred).
		3. Y is before X, in the same period (might happen due to race condition).
		4. Y is before X, in a different period (this can happen if X is really small, and Y is almost at wraparound).

		The following code tried to handle all those cases. It uses an OS timer to try and figure out how 
		many wraparounds occurred.
		*/

		// estimate the number of wraparound that occurred using OS time
		XnUInt64 nOSTime = nNow - m_pDevicePrivateData->nGlobalReferenceOSTime;

		// calculate wraparound length
		XnDouble fWrapAroundInMicroseconds = nWrapPoint / (XnDouble)m_pDevicePrivateData->fDeviceFrequency;

		// perform a rough estimation
		XnInt32 nWraps = (XnInt32)(nOSTime / fWrapAroundInMicroseconds);

		// now fix the estimation by clipping TS to the correct wraparounds
		XnInt64 nEstimatedTicks = 
			nWraps * nWrapPoint + // wraps time
			nDeviceTimeStamp - m_pDevicePrivateData->nGlobalReferenceTS;

		XnInt64 nEstimatedTime = (XnInt64)(nEstimatedTicks / (XnDouble)m_pDevicePrivateData->fDeviceFrequency);

		if (nEstimatedTime < nOSTime - 0.5 * fWrapAroundInMicroseconds)
			nWraps++;
		else if (nEstimatedTime > nOSTime + 0.5 * fWrapAroundInMicroseconds)
			nWraps--;

		// handle the two special cases - 3 & 4 in which we get a timestamp which is
		// *before* global TS (meaning before time 0)
		if (nWraps < 0 || // case 4
			(nWraps == 0 && nDeviceTimeStamp < m_pDevicePrivateData->nGlobalReferenceTS)) // case 3
		{
			nDeviceTimeStamp = m_pDevicePrivateData->nGlobalReferenceTS;
			nWraps = 0;
		}

		m_TimeStampData.nReferenceTS = m_pDevicePrivateData->nGlobalReferenceTS;
		m_TimeStampData.nTotalTicksAtReferenceTS = nWrapPoint * nWraps;
		m_TimeStampData.nLastDeviceTS = 0;
		m_TimeStampData.bFirst = FALSE;
		nResultInTicks = 0;
		bCheckSanity = FALSE; // no need.
		sprintf(csDumpComment, "Init. Total Ticks in Ref TS: %llu", m_TimeStampData.nTotalTicksAtReferenceTS);
	}

	if (nDeviceTimeStamp > m_TimeStampData.nLastDeviceTS) // this is the normal case
	{
		nResultInTicks = m_TimeStampData.nTotalTicksAtReferenceTS + nDeviceTimeStamp - m_TimeStampData.nReferenceTS;
	}
	else // wrap around occurred
	{
		// add the passed time to the reference time
		m_TimeStampData.nTotalTicksAtReferenceTS += (nWrapPoint + nDeviceTimeStamp - m_TimeStampData.nReferenceTS);
		// mark reference timestamp
		m_TimeStampData.nReferenceTS = nDeviceTimeStamp;

		sprintf(csDumpComment, "Wrap around. Refernce TS: %u / TotalTicksAtReference: %llu", m_TimeStampData.nReferenceTS, m_TimeStampData.nTotalTicksAtReferenceTS);

		nResultInTicks = m_TimeStampData.nTotalTicksAtReferenceTS;
	}

	m_TimeStampData.nLastDeviceTS = nDeviceTimeStamp;

	// calculate result in microseconds
	// NOTE: Intel compiler does too much optimization, and we loose up to 5 milliseconds. We perform
	// the entire calculation in XnDouble as a workaround
	XnDouble dResultTimeMicroSeconds = (XnDouble)nResultInTicks / (XnDouble)m_pDevicePrivateData->fDeviceFrequency;
	XnUInt64 nResultTimeMilliSeconds = (XnUInt64)(dResultTimeMicroSeconds / 1000.0);

	XnBool bIsSane = TRUE;

	// perform sanity check
	if (bCheckSanity && (nResultTimeMilliSeconds > (m_TimeStampData.nLastResultTime + XN_SENSOR_TIMESTAMP_SANITY_DIFF*1000)))
	{
		bIsSane = FALSE;
		xnOSStrAppend(csDumpComment, ",Didn't pass sanity. Will try to re-sync.", nDumpCommentMaxLength);
	}

	// calc result
	XnUInt64 nResult = (m_pDevicePrivateData->pSensor->IsHighResTimestamps() ? (XnUInt64)dResultTimeMicroSeconds : nResultTimeMilliSeconds);

	// dump it
	xnDumpFileWriteString(m_pDevicePrivateData->TimestampsDump, "%llu,%s,%u,%llu,%s\n", nNow, m_TimeStampData.csStreamName, nDeviceTimeStamp, nResult, csDumpComment);

	if (bIsSane)
	{
		m_TimeStampData.nLastResultTime = nResultTimeMilliSeconds;
		return (nResult);
	}
	else
	{
		// sanity failed. We lost sync. restart
		m_TimeStampData.bFirst = TRUE;
		return GetTimeStamp(nDeviceTimeStamp);
	}
}
Esempio n. 18
0
// --------------------------------
// Code
// --------------------------------
void audioPlay()
{
	if (g_AudioData.hWaveOut == NULL) // not initialized
		return;

	const AudioMetaData* pAudioMD = getAudioMetaData();
	if (pAudioMD == NULL || pAudioMD->DataSize() == 0 || !pAudioMD->IsDataNew())
		return;

	if (g_AudioData.bFlush)
	{
		printf("Audio is falling behind. Flushing all queue.\n");
		xnDumpFileWriteString(g_AudioData.SyncDump, "Flushing queue...\n");

		// mark not to check all dropped headers
		g_AudioData.nFirstToCheck = g_AudioData.nAudioNextBuffer;
		// flush all queued headers
		waveOutReset(g_AudioData.hWaveOut);

		g_AudioData.bFlush = false;
		return;
	}

	int nBufferSize = pAudioMD->DataSize();

	WAVEHDR* pHeader = &g_AudioData.pAudioBuffers[g_AudioData.nAudioNextBuffer];
	if ((pHeader->dwFlags & WHDR_DONE) == 0)
	{
		printf("No audio buffer is available!. Audio buffer will be lost!\n");
		return;
	}

	// first unprepare this header
	MMRESULT mmRes = waveOutUnprepareHeader(g_AudioData.hWaveOut, pHeader, sizeof(WAVEHDR));
	if (mmRes != MMSYSERR_NOERROR)
	{
		CHAR msg[250];
		waveOutGetErrorText(mmRes, msg, 250);
		printf("Failed unpreparing header: %s\n", msg);
	}

	int nMaxPlayedAudio = (int)(pAudioMD->SampleRate() / 1000.0 * pAudioMD->NumberOfChannels() * 2 * AUDIO_LATENCY_THRESHOLD);
	if (nBufferSize > nMaxPlayedAudio)
	{
		printf("Dropping %d bytes of audio to keep synch.\n", nBufferSize - nMaxPlayedAudio);
		nBufferSize = nMaxPlayedAudio;
	}

	const XnUInt8* pData = pAudioMD->Data();

	if (nBufferSize > g_AudioData.nBufferSize)
	{
		printf("Dropping %d bytes of audio to match buffer size.\n", nBufferSize - g_AudioData.nBufferSize);
		pData += (nBufferSize - g_AudioData.nBufferSize);
		nBufferSize = g_AudioData.nBufferSize;
	}

	pHeader->dwFlags = 0;
	xnOSMemCopy(pHeader->lpData, pData, nBufferSize);
	pHeader->dwBufferLength = nBufferSize;

	// prepare header
	mmRes = waveOutPrepareHeader(g_AudioData.hWaveOut, pHeader, sizeof(WAVEHDR));
	if (mmRes != MMSYSERR_NOERROR)
	{
		CHAR msg[250];
		waveOutGetErrorText(mmRes, msg, 250);
		printf("Unable to prepare header: %s\n", msg);
		return;
	}

	// queue header
	mmRes = waveOutWrite(g_AudioData.hWaveOut, pHeader, sizeof(WAVEHDR));
	if (mmRes != MMSYSERR_NOERROR)
	{
		CHAR msg[250];
		waveOutGetErrorText(mmRes, msg, 250);
		printf("Unable to queue header: %s\n", msg);
		return;
	}

	// place end-time as a timestamp
	g_AudioData.pAudioTimestamps[g_AudioData.nAudioNextBuffer] = (XnUInt64)(pAudioMD->Timestamp() + nBufferSize / (pAudioMD->BitsPerSample() / 8.0) / pAudioMD->NumberOfChannels() / (pAudioMD->SampleRate() / 1e6));

	xnDumpFileWriteString(g_AudioData.SyncDump, "Queued index %d with timestamp %llu (%u bytes, %f ms, end timestamp: %llu)\n", g_AudioData.nAudioNextBuffer, pAudioMD->Timestamp(), nBufferSize, nBufferSize / 2.0 / pAudioMD->NumberOfChannels() / (pAudioMD->SampleRate() / 1e3), g_AudioData.pAudioTimestamps[g_AudioData.nAudioNextBuffer]);

	g_AudioData.nAudioNextBuffer = (g_AudioData.nAudioNextBuffer + 1) % NUMBER_OF_AUDIO_BUFFERS;
}