Exemplo n.º 1
0
XnStatus XnPixelStream::FixCropping()
{
    XnStatus nRetVal = XN_STATUS_OK;

    XnCropping cropping = *GetCropping();
    if (cropping.nXOffset > GetXRes() ||
            cropping.nYOffset > GetYRes() ||
            XnUInt32(cropping.nXOffset + cropping.nXSize) > GetXRes() ||
            XnUInt32(cropping.nYOffset + cropping.nYSize) > GetYRes())
    {
        // disable it
        cropping.bEnabled = FALSE;
        nRetVal = SetCropping(&cropping);
        XN_IS_STATUS_OK(nRetVal);
    }

    return (XN_STATUS_OK);
}
Exemplo n.º 2
0
XnStatus XnPixelStream::ValidateCropping(const OniCropping* pCropping)
{
	if (pCropping->enabled)
	{
		if (pCropping->originX > (int)GetXRes() ||
			XnUInt32(pCropping->originX + pCropping->width) > GetXRes() ||
			pCropping->originY > (int)GetYRes() ||
			XnUInt32(pCropping->originY + pCropping->height) > GetYRes())
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DDK, "Cropping values do not match stream resolution!");
		}

		if (pCropping->width == 0 || pCropping->height == 0)
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DDK, "Cannot set a cropping window of zero size!");
		}
	}
	
	return (XN_STATUS_OK);
}
Exemplo n.º 3
0
XnStatus XnPixelStream::ValidateCropping(const XnCropping* pCropping)
{
    XnStatus nRetVal = XN_STATUS_OK;

    if (pCropping->bEnabled)
    {
        if (pCropping->nXOffset > GetXRes() ||
                XnUInt32(pCropping->nXOffset + pCropping->nXSize) > GetXRes() ||
                pCropping->nYOffset > GetYRes() ||
                XnUInt32(pCropping->nYOffset + pCropping->nYSize) > GetYRes())
        {
            XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DDK, "Cropping values do not match stream resolution!");
        }

        if (pCropping->nXSize == 0 || pCropping->nYSize == 0)
        {
            XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DDK, "Cannot set a cropping window of zero size!");
        }
    }

    return (XN_STATUS_OK);
}
Exemplo n.º 4
0
void XnDeviceFileReader::FrameDelay(XnUInt64 nTimestamp)
{
	if (m_FrameDelay.GetValue() != TRUE)
		return;

	if (!IsHighResTimestamps())
		nTimestamp *= 1000;

	// first time
	if (m_nReferenceTime == 0)
	{
		xnOSQueryTimer(m_FrameDelayTimer, &m_nReferenceTime);
		m_nReferenceTimestamp = nTimestamp;
		return;
	}

	// delay
	XnUInt64 nNow;
	xnOSQueryTimer(m_FrameDelayTimer, &nNow);

	// check how much time has passed in the stream
	XnUInt64 nStreamDiff;
	if (nTimestamp < m_nReferenceTimestamp)
	{
		nStreamDiff = 0;
	}
	else
	{
		nStreamDiff = nTimestamp - m_nReferenceTimestamp;
	}

	// check how much time passed (for user)
	XnUInt64 nClockDiff = nNow - m_nReferenceTime;

	// update reference (so that frame delay will work with Pause / Resume)
	m_nReferenceTime = nNow;
	m_nReferenceTimestamp = nTimestamp;

	// check if we need to wait
	if (nClockDiff < nStreamDiff)
	{
		xnOSSleep(XnUInt32((nStreamDiff - nClockDiff) / 1000));

		// take this time as a reference
		xnOSQueryTimer(m_FrameDelayTimer, &m_nReferenceTime);
	}
}
Exemplo n.º 5
0
void PlayerDevice::SleepToTimestamp(XnUInt64 nTimeStamp)
{
	XnUInt64 nNow;
	xnOSGetHighResTimeStamp(&nNow);

	m_cs.Lock();

	XnBool bHasTimeReference = TRUE;
	if (!m_bHasTimeReference /*&& (nTimeStamp <= m_nStartTimestamp)*/)
	{
		m_nStartTimestamp = nTimeStamp;
		m_nStartTime = nNow;

		m_bHasTimeReference = TRUE;
		bHasTimeReference = FALSE;
	}

	m_cs.Unlock();

	if (bHasTimeReference && (m_dPlaybackSpeed > 0.0f))
	{
		// check this data timestamp compared to when we started
		XnInt64 nTimestampDiff = nTimeStamp - m_nStartTimestamp;

		// in some recordings, frames are not ordered by timestamp. Make sure this does not break the mechanism
		if (nTimestampDiff > 0)
		{
			XnInt64 nTimeDiff = nNow - m_nStartTime;

			// check if we need to wait some time
			XnInt64 nRequestedTimeDiff = (XnInt64)(nTimestampDiff / m_dPlaybackSpeed);
			if (nTimeDiff < nRequestedTimeDiff)
			{
				XnUInt32 nSleep = XnUInt32((nRequestedTimeDiff - nTimeDiff)/1000);
				nSleep = XN_MIN(nSleep, XN_PLAYBACK_SPEED_SANITY_SLEEP);
				xnOSSleep(nSleep);
			}

			// update reference to current frame (this will handle cases in which application
			// stopped reading frames and continued after a while)
			m_nStartTimestamp = nTimeStamp;
			xnOSGetHighResTimeStamp(&m_nStartTime);
		}
	}
}
Exemplo n.º 6
0
XnStatus XnPixelStream::CropImpl(OniFrame* pFrame, const OniCropping* pCropping)
{
	XnUChar* pPixelData = (XnUChar*)pFrame->data;
	XnUInt32 nCurDataSize = 0;

	for (XnUInt32 y = pCropping->originY; y < XnUInt32(pCropping->originY + pCropping->height); ++y)
	{
		XnUChar* pOrigLine = &pPixelData[y * GetXRes() * GetBytesPerPixel()];

		// move line
		xnOSMemCopy(pPixelData + nCurDataSize, pOrigLine + pCropping->originX * GetBytesPerPixel(), pCropping->width * GetBytesPerPixel());
		nCurDataSize += pCropping->width * GetBytesPerPixel();
	}

	// update size
	pFrame->dataSize = nCurDataSize;

	return XN_STATUS_OK;
}
int Util::CheckMissedFrame(const std::string msg, XnUInt64& nTimestamp, XnUInt32* pFrames, XnUInt64* pLastTime, XnUInt32* pMissedFrames)
{
	int ret = 0;

	++(*pFrames);
//		printf("Timestamp(User):%d\n", nTimestamp);
//	if ((*pLastTime != 0) && ((nTimestamp - *pLastTime) > 35000))
	if ((*pLastTime != 0) && (GetTimeDiff(nTimestamp, *pLastTime) > 35))
	{
		int missed = (int)(nTimestamp - *pLastTime) / 32000 - 1;
		printf("Missed %s: %llu -> %llu = %d > 35000 - %d frames\n",
			msg.c_str(), *pLastTime, nTimestamp, XnUInt32(nTimestamp - *pLastTime), missed);
		*pMissedFrames += missed;

		ret = missed;
	}
	*pLastTime = nTimestamp;

	return ret;
}
Exemplo n.º 8
0
XN_C_API XnStatus xnOSGetDirName(const XnChar* cpFilePath, XnChar* cpDirName, const XnUInt32 nBufferSize)
{
	char strFullPath[XN_FILE_MAX_PATH];
	char* pFileName;
	DWORD res = GetFullPathName(cpFilePath, XN_FILE_MAX_PATH, strFullPath, &pFileName);
	if (res == 0)
	{
		return XN_STATUS_ERROR;
	}

	if (XnUInt32(pFileName - strFullPath) > nBufferSize)
	{
		return XN_STATUS_OUTPUT_BUFFER_OVERFLOW;
	}

	pFileName[0] = '\0';
	strcpy(cpDirName, strFullPath);

	return XN_STATUS_OK;
}
Exemplo n.º 9
0
XnStatus XnPixelStream::CropImpl(XnStreamData* pStreamOutput, const XnCropping* pCropping)
{
    XnStatus nRetVal = XN_STATUS_OK;

    XnUChar* pPixelData = (XnUChar*)pStreamOutput->pData;
    XnUInt32 nCurDataSize = 0;

    for (XnUInt32 y = pCropping->nYOffset; y < XnUInt32(pCropping->nYOffset + pCropping->nYSize); ++y)
    {
        XnUChar* pOrigLine = &pPixelData[y * GetXRes() * GetBytesPerPixel()];

        // move line
        xnOSMemCopy(pPixelData + nCurDataSize, pOrigLine + pCropping->nXOffset * GetBytesPerPixel(), pCropping->nXSize * GetBytesPerPixel());
        nCurDataSize += pCropping->nXSize * GetBytesPerPixel();
    }

    // update size
    pStreamOutput->nDataSize = nCurDataSize;

    return XN_STATUS_OK;
}
Exemplo n.º 10
0
XnStatus PlayerImpl::SetNodeNewData(const XnChar* strNodeName, XnUInt64 nTimeStamp, XnUInt32 nFrame, const void* pData, XnUInt32 nSize)
{
	XnStatus nRetVal = XN_STATUS_OK;

	XnUInt64 nNow;
	xnOSGetHighResTimeStamp(&nNow);

	if (!m_bHasTimeReference)
	{
		m_nStartTimestamp = nTimeStamp;
		m_nStartTime = nNow;

		m_bHasTimeReference = TRUE;
	}

	if (m_dPlaybackSpeed != XN_PLAYBACK_SPEED_FASTEST)
	{
		// check this data timestamp compared to when we started
		XnInt64 nTimestampDiff = nTimeStamp - m_nStartTimestamp;
		XnInt64 nTimeDiff = nNow - m_nStartTime;

		// check if we need to wait some time
		XnInt64 nRequestedTimeDiff = (XnInt64)(nTimestampDiff / m_dPlaybackSpeed);
		if (nTimeDiff < nRequestedTimeDiff)
		{
			xnOSSleep(XnUInt32((nRequestedTimeDiff - nTimeDiff)/1000));
		}

		// update reference to current frame (this will handle cases in which application
		// stopped reading frames and continued after a while)
		m_nStartTimestamp = nTimeStamp;
		xnOSGetHighResTimeStamp(&m_nStartTime);
	}

	PlayedNodeInfo playedNode;
	nRetVal = m_playedNodes.Get(strNodeName, playedNode);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = xnLockedNodeStartChanges(playedNode.hNode, playedNode.hLock);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = xnSetIntProperty(playedNode.hNode, XN_PROP_TIMESTAMP, nTimeStamp);
	if (nRetVal != XN_STATUS_OK)
	{
		xnLockedNodeEndChanges(playedNode.hNode, playedNode.hLock);
		return (nRetVal);
	}
	nRetVal = xnSetIntProperty(playedNode.hNode, XN_PROP_FRAME_ID, nFrame);
	if (nRetVal != XN_STATUS_OK)
	{
		xnLockedNodeEndChanges(playedNode.hNode, playedNode.hLock);
		return (nRetVal);
	}
	nRetVal = xnSetGeneralProperty(playedNode.hNode, XN_PROP_NEWDATA, nSize, pData);
	if (nRetVal != XN_STATUS_OK)
	{
		xnLockedNodeEndChanges(playedNode.hNode, playedNode.hLock);
		return (nRetVal);
	}

	nRetVal = xnLockedNodeEndChanges(playedNode.hNode, playedNode.hLock);
	XN_IS_STATUS_OK(nRetVal);

	return XN_STATUS_OK;
}
Exemplo n.º 11
0
// The recorder
int main(int argc, char** argv)
{
	// OpenNi objects
	xn::Context context;
	xn::DepthGenerator depthGenerator;
	xn::ImageGenerator imageGenerator;

	// To count missed frames
	XnUInt64 nLastDepthTime = 0;
	XnUInt64 nLastImageTime = 0;
	XnUInt32 nMissedDepthFrames = 0;
	XnUInt32 nMissedImageFrames = 0;
	XnUInt32 nDepthFrames = 0;
	XnUInt32 nImageFrames = 0;

	RecConfiguration config;

	XnStatus nRetVal = XN_STATUS_OK;

	// Parse the command line arguments
	if (!ParseArgs(argc, argv, config))
	{
		printf("Parse error\n");
		return 1;
	}

	if (config.bVerbose)
	{
		// Turn on log
		xnLogInitSystem();
		xnLogSetConsoleOutput(TRUE);
		xnLogSetMaskMinSeverity(XN_LOG_MASK_ALL, XN_LOG_VERBOSE);
	}

	// Initialize OpenNI
	nRetVal = context.Init();
	CHECK_RC(nRetVal, "Init");

	nRetVal = ConfigureGenerators(config, context, depthGenerator, imageGenerator);
	CHECK_RC(nRetVal, "Config generators");

	nRetVal = context.StartGeneratingAll();
	CHECK_RC(nRetVal, "Generate all");

	// Create and initialize the cyclic buffer
	CyclicBuffer cyclicBuffer(context, depthGenerator, imageGenerator, config);
	cyclicBuffer.Initialize(config.strDirName, config.nDumpTime);

	// Mainloop
	for (;;)
	{
		if (xnOSWasKeyboardHit())
		{
			char c = xnOSReadCharFromInput();
			XnBool bStop = FALSE;
			switch (c)
			{
			case 27:
				bStop = TRUE;
				break;
			case 'd':
				cyclicBuffer.Dump();
				break;
			}
			if (bStop)
			{
				break;
			}
		}
		// Get next data
		context.WaitAndUpdateAll();
		// Save data
		cyclicBuffer.Update(depthGenerator, imageGenerator);

		// Check for missed frames
		if (config.bRecordDepth)
		{
			++nDepthFrames;
			XnUInt64 nTimestamp = depthGenerator.GetTimestamp();
			if (nLastDepthTime != 0 && nTimestamp - nLastDepthTime > 35000)
			{
				int missed = (int)(nTimestamp-nLastDepthTime)/32000 - 1;
				printf("Missed depth: %llu -> %llu = %d > 35000 - %d frames\n",
					nLastDepthTime, nTimestamp, XnUInt32(nTimestamp-nLastDepthTime), missed);
				nMissedDepthFrames += missed;
			}
			nLastDepthTime = nTimestamp;
		}
		if (config.bRecordImage)
		{
			++nImageFrames;
			XnUInt64 nTimestamp = imageGenerator.GetTimestamp();
			if (nLastImageTime != 0 && nTimestamp - nLastImageTime > 35000)
			{
				int missed = (int)(nTimestamp-nLastImageTime)/32000 - 1;
				printf("Missed image: %llu -> %llu = %d > 35000 - %d frames\n",
					nLastImageTime, nTimestamp, XnUInt32(nTimestamp-nLastImageTime), missed);
				nMissedImageFrames += missed;
			}
			nLastImageTime = nTimestamp;

		}

	}

	if (config.bRecordDepth)
	{
		printf("Missed %d of %d depth frames (%5.2f%%)\n", nMissedDepthFrames, (nMissedDepthFrames+nDepthFrames), (nMissedDepthFrames*100.0)/(nMissedDepthFrames+nDepthFrames));
	}
	if (config.bRecordImage)
	{
		printf("Missed %d of %d image frames (%5.2f%%)\n", nMissedImageFrames, (nMissedImageFrames+nImageFrames), (nMissedImageFrames*100.0)/(nMissedImageFrames+nImageFrames));
	}

	imageGenerator.Release();
	depthGenerator.Release();
	context.Release();

	return 0;
}
Exemplo n.º 12
0
void ofxOpenNIRecorder::update() {
	
	if (config.record_type == ONI_STREAMING) {
		
		//if (depth_generator.IsDataNew()) {
		//	recorder.Record(); // is this really doing anything??? Don't think so ;-)
		//}
		
	} else if (config.record_type == ONI_CYCLIC && is_recording) {
		
		// store depth frame metadata to cyclic buffer array
		if(config.record_depth) {
			xn::DepthMetaData dmd;
			depth_generator.GetMetaData(dmd);
			frames[m_nNextWrite].depth_frame.CopyFrom(dmd);
		}
		
		// store image frame metadata to cyclic buffer array
		if(config.record_image) {
			xn::ImageMetaData imd;
			image_generator.GetMetaData(imd);
			frames[m_nNextWrite].image_frame.CopyFrom(imd);
		}
		
		// store ir frame metadata to cyclic buffer array
		if(config.record_ir) {
			xn::IRMetaData ird;
			ir_generator.GetMetaData(ird);
			frames[m_nNextWrite].ir_frame.CopyFrom(ird);
		}
		
		// See if buffer is already full
		if (m_nBufferCount < m_nBufferSize) m_nBufferCount++;
		
		// Make sure cylic buffer pointers are good
		m_nNextWrite++;
		
		// Reset cyclic buffer pointer if we're at the end
		if (m_nNextWrite == m_nBufferSize) m_nNextWrite = 0;
	}
	
	
	// Check for missed frames
	if (config.record_depth) {
		
		++nDepthFrames;
		
		XnUInt64 nTimestamp = depth_generator.GetTimestamp();
		
		if (nLastDepthTime != 0 && nTimestamp - nLastDepthTime > 35000) {
			int missed = (int)(nTimestamp-nLastDepthTime)/32000 - 1;
			printf("Missed depth: %llu -> %llu = %d > 35000 - %d frames\n",
				   nLastDepthTime, nTimestamp, XnUInt32(nTimestamp-nLastDepthTime), missed);
			nMissedDepthFrames += missed;
		}
		
		nLastDepthTime = nTimestamp;
		
	}
	
	if (config.record_image) {
		++nImageFrames;
		XnUInt64 nTimestamp = image_generator.GetTimestamp();
		if (nLastImageTime != 0 && nTimestamp - nLastImageTime > 35000) {
			int missed = (int)(nTimestamp-nLastImageTime)/32000 - 1;
			printf("Missed image: %llu -> %llu = %d > 35000 - %d frames\n",
				   nLastImageTime, nTimestamp, XnUInt32(nTimestamp-nLastImageTime), missed);
			nMissedImageFrames += missed;
		}
		nLastImageTime = nTimestamp;
	}
	
	if (config.record_ir) {
		++nImageFrames;
		XnUInt64 nTimestamp = ir_generator.GetTimestamp();
		if (nLastImageTime != 0 && nTimestamp - nLastImageTime > 35000) {
			int missed = (int)(nTimestamp-nLastImageTime)/32000 - 1;
			printf("Missed image (IR): %llu -> %llu = %d > 35000 - %d frames\n",
				   nLastImageTime, nTimestamp, XnUInt32(nTimestamp-nLastImageTime), missed);
			nMissedImageFrames += missed;
		}
		nLastImageTime = nTimestamp;
	}
	
}