コード例 #1
0
ファイル: D3D11RenderTarget.cpp プロジェクト: billbliss3/Opt
	template <class T>	void D3D11RenderTarget::captureColorBuffer(BaseImage<T>& result, unsigned int which)
	{
		DXGI_FORMAT format = m_textureFormats[which];
		if (format == DXGI_FORMAT_R8G8B8A8_UNORM) {
			if (!std::is_same<vec4uc, T>::value)	throw MLIB_EXCEPTION("incompatible image format");
		}
		else if (format == DXGI_FORMAT_R32G32B32A32_FLOAT) {
			if (!std::is_same<vec4f, T>::value)		throw MLIB_EXCEPTION("incompatible image format");
		}
		else {
			throw MLIB_EXCEPTION("unknown image format");
		}

		auto &context = m_graphics->getContext();
		context.CopyResource(m_captureTextures[which], m_targets[which]);

		result.allocate(m_width, m_height);

		D3D11_MAPPED_SUBRESOURCE resource;
		UINT subresource = D3D11CalcSubresource(0, 0, 0);
		HRESULT hr = context.Map(m_captureTextures[which], subresource, D3D11_MAP_READ, 0, &resource);
		const BYTE *data = (BYTE *)resource.pData;

		for (unsigned int y = 0; y < m_height; y++)	{
			memcpy(&result(0U, y), data + resource.RowPitch * y, m_width * sizeof(T));
		}

		context.Unmap(m_captureTextures[which], subresource);
	}
コード例 #2
0
void DX11CustomRenderTarget::Clear( ID3D11DeviceContext* pd3dDeviceContext, float clearDepth/* = 1.f*/ )
{
	for (UINT i = 0; i < m_uNumTargets; i++)
	{
		if (m_TextureFormats[i] == DXGI_FORMAT_R32G32B32A32_FLOAT)
		{
			float clearColor[] = {-std::numeric_limits<float>::infinity(), -std::numeric_limits<float>::infinity(), -std::numeric_limits<float>::infinity(), 1.0f};
			pd3dDeviceContext->ClearRenderTargetView(m_TargetsRTV[i], clearColor);
		}
		else if (m_TextureFormats[i] == DXGI_FORMAT_R32_FLOAT)
		{
			float clearColor[] = {-std::numeric_limits<float>::infinity()};
			pd3dDeviceContext->ClearRenderTargetView(m_TargetsRTV[i], clearColor);
		}
		else if (m_TextureFormats[i] == DXGI_FORMAT_R8G8B8A8_UNORM_SRGB) {
			float clearColor[] = {0};
			pd3dDeviceContext->ClearRenderTargetView(m_TargetsRTV[i], clearColor);
		}
		else
		{
			throw MLIB_EXCEPTION("unknown texture format");
		}
	}
		
	pd3dDeviceContext->ClearDepthStencilView( m_DepthStencilDSV, D3D11_CLEAR_DEPTH, clearDepth, 0);
}
コード例 #3
0
ml::mat4f SensorDataReader::getRigidTransform(int offset) const
{
	unsigned int idx = m_currFrame - 1 + offset;
	if (idx >= m_sensorData->m_frames.size()) throw MLIB_EXCEPTION("invalid trajectory index " + std::to_string(idx));
	const mat4f& transform = m_sensorData->m_frames[idx].getCameraToWorld();
	return transform;
	//return m_data.m_trajectory[idx];
}
コード例 #4
0
ファイル: D3D11TriMesh.cpp プロジェクト: billbliss3/Opt
void ml::D3D11TriMesh::updateColors(const std::vector<vec4f> &newValues) {
	auto &vertices = m_triMesh.getVertices();
	if (newValues.size() != vertices.size()) {
		throw MLIB_EXCEPTION("vertex buffer size doesn't match");
	}
	for (size_t i = 0; i < newValues.size(); i++) {
		vertices[i].color = newValues[i];
	}
	createGPU();
}
コード例 #5
0
void SensorDataReader::getTrajectory(std::vector<mat4f>& trajectory) const
{
	trajectory.clear();
	if (!m_sensorData) return;
	trajectory.resize(m_sensorData->m_frames.size());
	for (unsigned int f = 0; f < m_sensorData->m_frames.size(); f++) {
		trajectory[f] = m_sensorData->m_frames[f].getCameraToWorld();
		if (trajectory[f][0] == -std::numeric_limits<float>::infinity())
			throw MLIB_EXCEPTION("ERROR invalid transform in reference trajectory");
	}
}
コード例 #6
0
void CUDARayCastSDF::rayIntervalSplatting(const HashData& hashData, const HashParams& hashParams, const DepthCameraData& cameraData, const mat4f& lastRigidTransform)
{
	if (hashParams.m_numOccupiedBlocks == 0)	return;

	if (m_params.m_maxNumVertices <= 6*hashParams.m_numOccupiedBlocks) { // 6 verts (2 triangles) per block
		MLIB_EXCEPTION("not enough space for vertex buffer for ray interval splatting");
	}

	m_params.m_numOccupiedSDFBlocks = hashParams.m_numOccupiedBlocks;
	m_params.m_viewMatrix = MatrixConversion::toCUDA(lastRigidTransform.getInverse());
	m_params.m_viewMatrixInverse = MatrixConversion::toCUDA(lastRigidTransform);

	//m_data.updateParams(m_params); // !!! debugging

	m_rayIntervalSplatting.rayIntervalSplatting(DXUTGetD3D11DeviceContext(), hashData, cameraData, m_data, m_params, m_params.m_numOccupiedSDFBlocks*6);
}
コード例 #7
0
void SensorDataReader::createFirstConnected()
{
	releaseData();

	std::string filename = GlobalAppState::get().s_binaryDumpSensorFile;

	std::cout << "Start loading binary dump... ";
	m_sensorData = new SensorData;
	m_sensorData->loadFromFile(filename);
	std::cout << "DONE!" << std::endl;
	std::cout << *m_sensorData << std::endl;

	//std::cout << "depth intrinsics:" << std::endl;
	//std::cout << m_sensorData->m_calibrationDepth.m_intrinsic << std::endl;
	//std::cout << "color intrinsics:" << std::endl;
	//std::cout << m_sensorData->m_calibrationColor.m_intrinsic << std::endl;

	RGBDSensor::init(m_sensorData->m_depthWidth, m_sensorData->m_depthHeight, std::max(m_sensorData->m_colorWidth, 1u), std::max(m_sensorData->m_colorHeight, 1u), 1);
	initializeDepthIntrinsics(m_sensorData->m_calibrationDepth.m_intrinsic(0, 0), m_sensorData->m_calibrationDepth.m_intrinsic(1, 1), m_sensorData->m_calibrationDepth.m_intrinsic(0, 2), m_sensorData->m_calibrationDepth.m_intrinsic(1, 2));
	initializeColorIntrinsics(m_sensorData->m_calibrationColor.m_intrinsic(0, 0), m_sensorData->m_calibrationColor.m_intrinsic(1, 1), m_sensorData->m_calibrationColor.m_intrinsic(0, 2), m_sensorData->m_calibrationColor.m_intrinsic(1, 2));

	initializeDepthExtrinsics(m_sensorData->m_calibrationDepth.m_extrinsic);
	initializeColorExtrinsics(m_sensorData->m_calibrationColor.m_extrinsic);


	m_numFrames = (unsigned int)m_sensorData->m_frames.size();
	if (m_numFrames > GlobalBundlingState::get().s_maxNumImages * GlobalBundlingState::get().s_submapSize) {
		throw MLIB_EXCEPTION("sens file #frames = " + std::to_string(m_numFrames) + ", please change param file to accommodate");
		//std::cout << "WARNING: sens file #frames = " << m_numFrames << ", please change param file to accommodate" << std::endl;
		//std::cout << "(press key to continue)" << std::endl;
		//getchar();
	}

	if (m_numFrames > 0 && m_sensorData->m_frames[0].getColorCompressed()) {
		m_bHasColorData = true;
	}
	else {
		m_bHasColorData = false;
	}

	const unsigned int cacheSize = 10;
	m_sensorDataCache = new ml::SensorData::RGBDFrameCacheRead(m_sensorData, cacheSize);
}
コード例 #8
0
ファイル: D3D11TriMesh.cpp プロジェクト: billbliss3/Opt
void ml::D3D11TriMesh::initVB(GraphicsDevice &g)
{
    if (m_triMesh.getVertices().size() == 0) return;
	auto &device = g.castD3D11().getDevice();

	size_t byteSize = sizeof(TriMeshf::Vertex) * m_triMesh.getVertices().size();
	if (byteSize > std::numeric_limits<UINT>::max()) {
		throw MLIB_EXCEPTION("buffer size too big " + std::to_string(byteSize) + ", while max is " + std::to_string(std::numeric_limits<UINT>::max()));
	}

	D3D11_BUFFER_DESC bufferDesc;
	ZeroMemory( &bufferDesc, sizeof(bufferDesc) );
	bufferDesc.Usage = D3D11_USAGE_DEFAULT;
	bufferDesc.ByteWidth = (UINT)byteSize;
	bufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
	bufferDesc.CPUAccessFlags = 0;

	D3D11_SUBRESOURCE_DATA data;
	ZeroMemory( &data, sizeof(data) );
    data.pSysMem = &m_triMesh.getVertices()[0];

	D3D_VALIDATE(device.CreateBuffer( &bufferDesc, &data, &m_vertexBuffer ));
}
コード例 #9
0
HRESULT BinaryDumpReader::createFirstConnected()
{
	releaseData();

	if (GlobalAppState::get().s_binaryDumpSensorFile.size() == 0) throw MLIB_EXCEPTION("need to specific s_binaryDumpSensorFile[0]");
	std::string filename = GlobalAppState::get().s_binaryDumpSensorFile[0];

	std::cout << "Start loading binary dump" << std::endl;
	//BinaryDataStreamZLibFile inputStream(filename, false);
	BinaryDataStreamFile inputStream(filename, false);
	inputStream >> m_data;
	std::cout << "Loading finished" << std::endl;
	std::cout << m_data << std::endl;

	std::cout << "intrinsics:" << std::endl;
	std::cout << m_data.m_CalibrationDepth.m_Intrinsic << std::endl;

	RGBDSensor::init(m_data.m_DepthImageWidth, m_data.m_DepthImageHeight, std::max(m_data.m_ColorImageWidth,1u), std::max(m_data.m_ColorImageHeight,1u), 1);
	initializeDepthIntrinsics(m_data.m_CalibrationDepth.m_Intrinsic(0,0), m_data.m_CalibrationDepth.m_Intrinsic(1,1), m_data.m_CalibrationDepth.m_Intrinsic(0,2), m_data.m_CalibrationDepth.m_Intrinsic(1,2));
	initializeColorIntrinsics(m_data.m_CalibrationColor.m_Intrinsic(0,0), m_data.m_CalibrationColor.m_Intrinsic(1,1), m_data.m_CalibrationColor.m_Intrinsic(0,2), m_data.m_CalibrationColor.m_Intrinsic(1,2));

	initializeDepthExtrinsics(m_data.m_CalibrationDepth.m_Extrinsic);
	initializeColorExtrinsics(m_data.m_CalibrationColor.m_Extrinsic);


	m_NumFrames = m_data.m_DepthNumFrames;
	assert(m_data.m_ColorNumFrames == m_data.m_DepthNumFrames || m_data.m_ColorNumFrames == 0);		
		
	if (m_data.m_ColorImages.size() > 0) {
		m_bHasColorData = true;
	} else {
		m_bHasColorData = false;
	}

	return S_OK;
}
コード例 #10
0
ファイル: RGBDSensor.cpp プロジェクト: ZaneYang/VoxelHashing
void RGBDSensor::computePointCurrentPointCloud(PointCloudf& pc, const mat4f& transform /*= mat4f::identity()*/) const
{
	if (!(getColorWidth() == getDepthWidth() && getColorHeight() == getDepthHeight()))	throw MLIB_EXCEPTION("invalid dimensions");

	for (unsigned int i = 0; i < getDepthWidth()*getDepthHeight(); i++) {
		unsigned int x = i % getDepthWidth();
		unsigned int y = i / getDepthWidth();
		vec3f p = depthToSkeleton(x,y);
		if (p.x != -std::numeric_limits<float>::infinity() && p.x != 0.0f)	{

			vec3f n = getNormal(x,y);
			if (n.x != -FLT_MAX) {
				pc.m_points.push_back(p);
				pc.m_normals.push_back(n);
				vec4uc c = m_colorRGBX[i];
				pc.m_colors.push_back(vec4f(c.z/255.0f, c.y/255.0f, c.x/255.0f, 1.0f));	//there's a swap... dunno why really
			}
		}
	}
	for (auto& p : pc.m_points) {
		p = transform * p;
	}
	mat4f invTranspose = transform.getInverse().getTranspose();
	for (auto& n : pc.m_normals) {
		n = invTranspose * n;
		n.normalize();
	}
}