コード例 #1
0
HRESULT BinaryDumpReader::processDepth()
{
	if(m_CurrFrame >= m_NumFrames)
	{
		GlobalAppState::get().s_playData = false;
		std::cout << "binary dump sequence complete - press space to run again" << std::endl;
		m_CurrFrame = 0;
	}

	if(GlobalAppState::get().s_playData) {

		float* depth = getDepthFloat();
		memcpy(depth, m_data.m_DepthImages[m_CurrFrame], sizeof(float)*getDepthWidth()*getDepthHeight());

		incrementRingbufIdx();

		if (m_bHasColorData) {
			memcpy(m_colorRGBX, m_data.m_ColorImages[m_CurrFrame], sizeof(vec4uc)*getColorWidth()*getColorHeight());
		}

		m_CurrFrame++;
		return S_OK;
	} else {
		return S_FALSE;
	}
}
コード例 #2
0
ファイル: RGBDSensor.cpp プロジェクト: ZaneYang/VoxelHashing
void RGBDSensor::savePointCloud( const std::string& filename, const mat4f& transform /*= mat4f::identity()*/ ) const
{
	//DepthImage d(getDepthHeight(), getDepthWidth(), getDepthFloat());
	//ColorImageRGB c(d);
	//FreeImageWrapper::saveImage("test.png", c, true);

	PointCloudf pc;
	for (unsigned int i = 0; i < getDepthWidth()*getDepthHeight(); i++) {
		unsigned int x = i % getDepthWidth();
		unsigned int y = i / getDepthWidth();

		float d = getDepthFloat()[i];
		if (d != 0.0f && d != -std::numeric_limits<float>::infinity()) {
			vec3f p = getDepthIntrinsicsInv()*vec3f((float)x*d, (float)y*d, d);

			//TODO check why our R and B is flipped
			vec4f c = vec4f(getColorRGBX()[i].z, getColorRGBX()[i].y, getColorRGBX()[i].x, getColorRGBX()[i].w);
			c /= 255.0f;

			pc.m_points.push_back(p);
			pc.m_colors.push_back(c);
		}
	}

	PointCloudIOf::saveToFile(filename, pc);
}
コード例 #3
0
bool SensorDataReader::processDepth()
{
	if (m_currFrame >= m_numFrames)
	{
		GlobalAppState::get().s_playData = false;
		//std::cout << "binary dump sequence complete - press space to run again" << std::endl;
		stopReceivingFrames();
		std::cout << "binary dump sequence complete - stopped receiving frames" << std::endl;
		m_currFrame = 0;
	}

	if (GlobalAppState::get().s_playData) {

		float* depth = getDepthFloat();

		//TODO check why the frame cache is not used?
		ml::SensorData::RGBDFrameCacheRead::FrameState frameState = m_sensorDataCache->getNext();
		//ml::SensorData::RGBDFrameCacheRead::FrameState frameState;
		//frameState.m_colorFrame = m_sensorData->decompressColorAlloc(m_currFrame);
		//frameState.m_depthFrame = m_sensorData->decompressDepthAlloc(m_currFrame);


		for (unsigned int i = 0; i < getDepthWidth()*getDepthHeight(); i++) {
			if (frameState.m_depthFrame[i] == 0) depth[i] = -std::numeric_limits<float>::infinity();
			else depth[i] = (float)frameState.m_depthFrame[i] / m_sensorData->m_depthShift;
		}

		incrementRingbufIdx();

		if (m_bHasColorData) {
			for (unsigned int i = 0; i < getColorWidth()*getColorHeight(); i++) {
				m_colorRGBX[i] = vec4uc(frameState.m_colorFrame[i]);
			}
		}
		frameState.free();

		m_currFrame++;
		return true;
	}
	else {
		return false;
	}
}
コード例 #4
0
HRESULT PrimeSenseSensor::processDepth()
{

	HRESULT hr = S_OK;

	m_bDepthImageIsUpdated = false;
	m_bDepthImageCameraIsUpdated = false;
	m_bNormalImageCameraIsUpdated = false;

	hr = readDepthAndColor(getDepthFloat(), m_colorRGBX);

	m_bDepthImageIsUpdated = true;
	m_bDepthImageCameraIsUpdated = true;
	m_bNormalImageCameraIsUpdated = true;

	m_bDepthReceived = true;
	m_bColorReceived = true;

	return hr;
}
コード例 #5
0
HRESULT KinectSensor::processDepth()
{
	HRESULT hr = S_OK;

	//wait until data is available
	if (!(WAIT_OBJECT_0 == WaitForSingleObject(m_hNextDepthFrameEvent, 0)))	return S_FALSE;

	// This code allows to get depth up to 8m
	BOOL bNearMode = false;
	if(m_kinect4Windows)
	{
		bNearMode = true;
	}

	INuiFrameTexture * pTexture = NULL;
	NUI_IMAGE_FRAME imageFrame;

	hr = m_pNuiSensor->NuiImageStreamGetNextFrame(m_pDepthStreamHandle, 0, &imageFrame);
	hr = m_pNuiSensor->NuiImageFrameGetDepthImagePixelFrameTexture(m_pDepthStreamHandle, &imageFrame, &bNearMode, &pTexture);

	NUI_LOCKED_RECT LockedRect;
	hr = pTexture->LockRect(0, &LockedRect, NULL, 0);
	if ( FAILED(hr) ) { return hr; }

	NUI_DEPTH_IMAGE_PIXEL * pBuffer =  (NUI_DEPTH_IMAGE_PIXEL *) LockedRect.pBits;

	////#pragma omp parallel for
	//	for (int j = 0; j < (int)getDepthWidth()*(int)getDepthHeight(); j++)	{
	//		m_depthD16[j] = pBuffer[j].depth;
	//	}

	USHORT* test = new USHORT[getDepthWidth()*getDepthHeight()];

	float* depth = getDepthFloat();
	for (unsigned int j = 0; j < getDepthHeight(); j++) {
		for (unsigned int i = 0; i < getDepthWidth(); i++) {

			unsigned int desIdx = j*getDepthWidth() + i;
			unsigned int srcIdx = j*getDepthWidth() + (getDepthWidth() - i - 1);	//x-flip of the kinect

			const USHORT& d = pBuffer[srcIdx].depth;
			if (d == 0)
				depth[desIdx] = -std::numeric_limits<float>::infinity();
			else
				depth[desIdx] = (float)d * 0.001f;

			test[srcIdx] = d *8;
		}
	}

	hr = pTexture->UnlockRect(0);
	if ( FAILED(hr) ) { return hr; };

	hr = m_pNuiSensor->NuiImageStreamReleaseFrame(m_pDepthStreamHandle, &imageFrame);

	// Get offset x, y coordinates for color in depth space
	// This will allow us to later compensate for the differences in location, angle, etc between the depth and color cameras
	m_pNuiSensor->NuiImageGetColorPixelCoordinateFrameFromDepthPixelFrameAtResolution(
		cColorResolution,
		cDepthResolution,
		getDepthWidth()*getDepthHeight(),
		test,
		getDepthWidth()*getDepthHeight()*2,
		m_colorCoordinates
		);


	delete [] test;

	return hr;
}