void RGBDSensor::savePointCloud( const std::string& filename, const mat4f& transform /*= mat4f::identity()*/ ) const { //DepthImage d(getDepthHeight(), getDepthWidth(), getDepthFloat()); //ColorImageRGB c(d); //FreeImageWrapper::saveImage("test.png", c, true); PointCloudf pc; for (unsigned int i = 0; i < getDepthWidth()*getDepthHeight(); i++) { unsigned int x = i % getDepthWidth(); unsigned int y = i / getDepthWidth(); float d = getDepthFloat()[i]; if (d != 0.0f && d != -std::numeric_limits<float>::infinity()) { vec3f p = getDepthIntrinsicsInv()*vec3f((float)x*d, (float)y*d, d); //TODO check why our R and B is flipped vec4f c = vec4f(getColorRGBX()[i].z, getColorRGBX()[i].y, getColorRGBX()[i].x, getColorRGBX()[i].w); c /= 255.0f; pc.m_points.push_back(p); pc.m_colors.push_back(c); } } PointCloudIOf::saveToFile(filename, pc); }
void RGBDSensor::computePointCurrentPointCloud(PointCloudf& pc, const mat4f& transform /*= mat4f::identity()*/) const { if (!(getColorWidth() == getDepthWidth() && getColorHeight() == getDepthHeight())) throw MLIB_EXCEPTION("invalid dimensions"); for (unsigned int i = 0; i < getDepthWidth()*getDepthHeight(); i++) { unsigned int x = i % getDepthWidth(); unsigned int y = i / getDepthWidth(); vec3f p = depthToSkeleton(x,y); if (p.x != -std::numeric_limits<float>::infinity() && p.x != 0.0f) { vec3f n = getNormal(x,y); if (n.x != -FLT_MAX) { pc.m_points.push_back(p); pc.m_normals.push_back(n); vec4uc c = m_colorRGBX[i]; pc.m_colors.push_back(vec4f(c.z/255.0f, c.y/255.0f, c.x/255.0f, 1.0f)); //there's a swap... dunno why really } } } for (auto& p : pc.m_points) { p = transform * p; } mat4f invTranspose = transform.getInverse().getTranspose(); for (auto& n : pc.m_normals) { n = invTranspose * n; n.normalize(); } }
HRESULT BinaryDumpReader::createFirstConnected() { std::string filename = GlobalAppState::getInstance().s_BinaryDumpReaderSourceFile; std::cout << "Start loading binary dump" << std::endl; //BinaryDataStreamZLibFile inputStream(filename, false); BinaryDataStreamFile inputStream(filename, false); CalibratedSensorData sensorData; inputStream >> sensorData; std::cout << "Loading finished" << std::endl; std::cout << sensorData << std::endl; DepthSensor::init(sensorData.m_DepthImageWidth, sensorData.m_DepthImageHeight, std::max(sensorData.m_ColorImageWidth,1u), std::max(sensorData.m_ColorImageHeight,1u)); mat4f intrinsics(sensorData.m_CalibrationDepth.m_Intrinsic); initializeIntrinsics(sensorData.m_CalibrationDepth.m_Intrinsic(0,0), sensorData.m_CalibrationDepth.m_Intrinsic(1,1), sensorData.m_CalibrationDepth.m_Intrinsic(0,2), sensorData.m_CalibrationDepth.m_Intrinsic(1,2)); m_NumFrames = sensorData.m_DepthNumFrames; assert(sensorData.m_ColorNumFrames == sensorData.m_DepthNumFrames || sensorData.m_ColorNumFrames == 0); releaseData(); m_DepthD16Array = new USHORT*[m_NumFrames]; for (unsigned int i = 0; i < m_NumFrames; i++) { m_DepthD16Array[i] = new USHORT[getDepthWidth()*getDepthHeight()]; for (unsigned int k = 0; k < getDepthWidth()*getDepthHeight(); k++) { m_DepthD16Array[i][k] = (USHORT)(sensorData.m_DepthImages[i][k]*1000.0f + 0.5f); } } std::cout << "loading depth done" << std::endl; if (sensorData.m_ColorImages.size() > 0) { m_bHasColorData = true; m_ColorRGBXArray = new BYTE*[m_NumFrames]; for (unsigned int i = 0; i < m_NumFrames; i++) { m_ColorRGBXArray[i] = new BYTE[getColorWidth()*getColorHeight()*getColorBytesPerPixel()]; for (unsigned int k = 0; k < getColorWidth()*getColorHeight(); k++) { const BYTE* c = (BYTE*)&(sensorData.m_ColorImages[i][k]); m_ColorRGBXArray[i][k*getColorBytesPerPixel()+0] = c[0]; m_ColorRGBXArray[i][k*getColorBytesPerPixel()+1] = c[1]; m_ColorRGBXArray[i][k*getColorBytesPerPixel()+2] = c[2]; m_ColorRGBXArray[i][k*getColorBytesPerPixel()+3] = 255; //I don't know really why this has to be swapped... } //std::string outFile = "colorout//color" + std::to_string(i) + ".png"; //ColorImageR8G8B8A8 image(getColorHeight(), getColorWidth(), (vec4uc*)m_ColorRGBXArray[i]); //FreeImageWrapper::saveImage(outFile, image); } } else { m_bHasColorData = false; } sensorData.deleteData(); std::cout << "loading color done" << std::endl; return S_OK; }
HRESULT ImageReaderSensor::processColor() { HRESULT hr = S_OK; if (m_CurrentFrameNumberColor >= m_NumFrames) { return S_FALSE; } bool readColor = false; if (readColor) { char frameNumber_c[10]; sprintf_s(frameNumber_c,"%06d", m_CurrentFrameNumberDepth+1); std::string frameNumber(frameNumber_c); std::string currFileName = m_BaseFilename; currFileName.append("color/").append(frameNumber).append(".png"); ColorImageRGB image; FreeImageWrapper::loadImage(currFileName, image); image.flipY(); for (UINT i = 0; i < getDepthWidth() * getDepthHeight(); i++) { vec3f c = image.getDataPointer()[i]; c = 255.0f*c; m_colorRGBX[4*i+0] = (BYTE)c.x; m_colorRGBX[4*i+1] = (BYTE)c.y; m_colorRGBX[4*i+2] = (BYTE)c.z; m_colorRGBX[4*i+3] = 255; } } m_CurrentFrameNumberColor++; return hr; }
HRESULT BinaryDumpReader::processDepth() { if(m_CurrFrame >= m_NumFrames) { GlobalAppState::get().s_playData = false; std::cout << "binary dump sequence complete - press space to run again" << std::endl; m_CurrFrame = 0; } if(GlobalAppState::get().s_playData) { float* depth = getDepthFloat(); memcpy(depth, m_data.m_DepthImages[m_CurrFrame], sizeof(float)*getDepthWidth()*getDepthHeight()); incrementRingbufIdx(); if (m_bHasColorData) { memcpy(m_colorRGBX, m_data.m_ColorImages[m_CurrFrame], sizeof(vec4uc)*getColorWidth()*getColorHeight()); } m_CurrFrame++; return S_OK; } else { return S_FALSE; } }
void RGBDSensor::recordFrame() { m_recordedDepthData.push_back(m_depthFloat[m_currentRingBufIdx]); m_recordedColorData.push_back(m_colorRGBX); m_depthFloat[m_currentRingBufIdx] = new float[getDepthWidth()*getDepthHeight()]; m_colorRGBX = new vec4uc[getColorWidth()*getColorHeight()]; }
HRESULT KinectSensor::processColor() { if (! (WAIT_OBJECT_0 == WaitForSingleObject(m_hNextColorFrameEvent, 0)) ) return S_FALSE; NUI_IMAGE_FRAME imageFrame; HRESULT hr = S_OK; hr = m_pNuiSensor->NuiImageStreamGetNextFrame(m_pColorStreamHandle, 0, &imageFrame); if ( FAILED(hr) ) { return hr; } NUI_LOCKED_RECT LockedRect; hr = imageFrame.pFrameTexture->LockRect(0, &LockedRect, NULL, 0); if ( FAILED(hr) ) { return hr; } // loop over each row and column of the color #pragma omp parallel for for (int yi = 0; yi < (int)getColorHeight(); ++yi) { LONG y = yi; LONG* pDest = ((LONG*)m_colorRGBX) + (int)getColorWidth() * y; for (LONG x = 0; x < (int)getColorWidth(); ++x) { // calculate index into depth array //int depthIndex = x/m_colorToDepthDivisor + y/m_colorToDepthDivisor * getDepthWidth(); //TODO x flip int depthIndex = (getDepthWidth() - 1 - x/m_colorToDepthDivisor) + y/m_colorToDepthDivisor * getDepthWidth(); // retrieve the depth to color mapping for the current depth pixel LONG colorInDepthX = m_colorCoordinates[depthIndex * 2]; LONG colorInDepthY = m_colorCoordinates[depthIndex * 2 + 1]; // make sure the depth pixel maps to a valid point in color space if ( colorInDepthX >= 0 && colorInDepthX < (int)getColorWidth() && colorInDepthY >= 0 && colorInDepthY < (int)getColorHeight() ) { // calculate index into color array LONG colorIndex = colorInDepthY * (int)getColorWidth() + colorInDepthX; //TODO x flip //LONG colorIndex = colorInDepthY * (int)getColorWidth() + (getColorWidth() - 1 - colorInDepthX); // set source for copy to the color pixel LONG* pSrc = ((LONG *)LockedRect.pBits) + colorIndex; LONG tmp = *pSrc; tmp|=0xFF000000; // Flag for is valid *pDest = tmp; } else { *pDest = 0x00000000; } pDest++; } } hr = imageFrame.pFrameTexture->UnlockRect(0); if ( FAILED(hr) ) { return hr; }; hr = m_pNuiSensor->NuiImageStreamReleaseFrame(m_pColorStreamHandle, &imageFrame); return hr; }
HRESULT BinaryDumpReader::processDepth() { if (m_CurrFrame < m_NumFrames) { std::cout << "curr Frame " << m_CurrFrame << std::endl; memcpy(m_depthD16, m_DepthD16Array[m_CurrFrame], sizeof(USHORT)*getDepthWidth()*getDepthHeight()); if (m_bHasColorData) { memcpy(m_colorRGBX, m_ColorRGBXArray[m_CurrFrame], getColorBytesPerPixel()*getColorWidth()*getColorHeight()); } m_CurrFrame++; return S_OK; } else { return S_FALSE; } }
bool StructureSensor::processDepth() { std::pair<float*,UCHAR*> frames = m_server.process(m_oldDepth, m_oldColor); if (frames.first == NULL || frames.second == NULL) return false; // depth memcpy(m_depthFloat[m_currentRingBufIdx], frames.first, sizeof(float)*getDepthWidth()*getDepthHeight()); // color memcpy(m_colorRGBX, (vec4uc*)frames.second, sizeof(vec4uc)*getColorWidth()*getColorHeight()); m_oldDepth = frames.first; m_oldColor = frames.second; return true; }
bool SensorDataReader::processDepth() { if (m_currFrame >= m_numFrames) { GlobalAppState::get().s_playData = false; //std::cout << "binary dump sequence complete - press space to run again" << std::endl; stopReceivingFrames(); std::cout << "binary dump sequence complete - stopped receiving frames" << std::endl; m_currFrame = 0; } if (GlobalAppState::get().s_playData) { float* depth = getDepthFloat(); //TODO check why the frame cache is not used? ml::SensorData::RGBDFrameCacheRead::FrameState frameState = m_sensorDataCache->getNext(); //ml::SensorData::RGBDFrameCacheRead::FrameState frameState; //frameState.m_colorFrame = m_sensorData->decompressColorAlloc(m_currFrame); //frameState.m_depthFrame = m_sensorData->decompressDepthAlloc(m_currFrame); for (unsigned int i = 0; i < getDepthWidth()*getDepthHeight(); i++) { if (frameState.m_depthFrame[i] == 0) depth[i] = -std::numeric_limits<float>::infinity(); else depth[i] = (float)frameState.m_depthFrame[i] / m_sensorData->m_depthShift; } incrementRingbufIdx(); if (m_bHasColorData) { for (unsigned int i = 0; i < getColorWidth()*getColorHeight(); i++) { m_colorRGBX[i] = vec4uc(frameState.m_colorFrame[i]); } } frameState.free(); m_currFrame++; return true; } else { return false; } }
HRESULT ImageReaderSensor::processDepth() { HRESULT hr = S_OK; if (m_CurrentFrameNumberDepth >= m_NumFrames) { return S_FALSE; } std::cout << "Processing Depth Frame " << m_CurrentFrameNumberDepth << std::endl; char frameNumber_c[10]; sprintf_s(frameNumber_c,"%06d", m_CurrentFrameNumberDepth+1); std::string frameNumber(frameNumber_c); std::string currFileName = m_BaseFilename; currFileName.append("depth/").append(frameNumber).append(".png"); DepthImage image; FreeImageWrapper::loadImage(currFileName, image); image.flipY(); for (UINT i = 0; i < getDepthWidth() * getDepthHeight(); i++) { m_depthD16[i] = (USHORT)(image.getDataPointer()[i] * 1000); } m_CurrentFrameNumberDepth++; return hr; }
ml::vec3f RGBDSensor::getNormal(unsigned int x, unsigned int y) const { vec3f ret(-std::numeric_limits<float>::infinity()); if (x > 0 && y > 0 && x < getDepthWidth() - 1 && y < getDepthHeight() - 1) { vec3f cc = depthToSkeleton(x,y); vec3f pc = depthToSkeleton(x+1,y+0); vec3f cp = depthToSkeleton(x+0,y+1); vec3f mc = depthToSkeleton(x-1,y+0); vec3f cm = depthToSkeleton(x+0,y-1); if (cc.x != -std::numeric_limits<float>::infinity() && pc.x != -std::numeric_limits<float>::infinity() && cp.x != -std::numeric_limits<float>::infinity() && mc.x != -std::numeric_limits<float>::infinity() && cm.x != -std::numeric_limits<float>::infinity()) { vec3f n = (pc - mc) ^ (cp - cm); float l = n.length(); if (l > 0.0f) { ret = n/l; } } } return ret; }
HRESULT KinectSensor::createFirstConnected() { INuiSensor* pNuiSensor = NULL; HRESULT hr = S_OK; int iSensorCount = 0; hr = NuiGetSensorCount(&iSensorCount); if (FAILED(hr) ) { return hr; } // Look at each Kinect sensor for (int i = 0; i < iSensorCount; ++i) { // Create the sensor so we can check status, if we can't create it, move on to the next hr = NuiCreateSensorByIndex(i, &pNuiSensor); if (FAILED(hr)) { continue; } // Get the status of the sensor, and if connected, then we can initialize it hr = pNuiSensor->NuiStatus(); if (S_OK == hr) { m_pNuiSensor = pNuiSensor; break; } // This sensor wasn't OK, so release it since we're not using it pNuiSensor->Release(); } if (NULL == m_pNuiSensor) { return E_FAIL; } // Initialize the Kinect and specify that we'll be using depth //hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX); hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH); if (FAILED(hr) ) { return hr; } // Create an event that will be signaled when depth data is available m_hNextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL); // Open a depth image stream to receive depth frames hr = m_pNuiSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH, //NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, cDepthResolution, (8000 << NUI_IMAGE_PLAYER_INDEX_SHIFT), 2, m_hNextDepthFrameEvent, &m_pDepthStreamHandle); if (FAILED(hr) ) { return hr; } // Create an event that will be signaled when color data is available m_hNextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL); // Open a color image stream to receive color frames hr = m_pNuiSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, cColorResolution, 0, 2, m_hNextColorFrameEvent, &m_pColorStreamHandle ); if (FAILED(hr) ) { return hr; } INuiColorCameraSettings* colorCameraSettings; HRESULT hrFlag = m_pNuiSensor->NuiGetColorCameraSettings(&colorCameraSettings); if (hr != E_NUI_HARDWARE_FEATURE_UNAVAILABLE) { m_kinect4Windows = true; } //TODO MATTHIAS: does this function have to be called every frame? USHORT* test = new USHORT[getDepthWidth()*getDepthHeight()]; // Get offset x, y coordinates for color in depth space // This will allow us to later compensate for the differences in location, angle, etc between the depth and color cameras m_pNuiSensor->NuiImageGetColorPixelCoordinateFrameFromDepthPixelFrameAtResolution( cColorResolution, cDepthResolution, getDepthWidth()*getDepthHeight(), test, getDepthWidth()*getDepthHeight()*2, m_colorCoordinates ); SAFE_DELETE_ARRAY(test); // Start with near mode on (if possible) m_bNearMode = false; if (m_kinect4Windows) { toggleNearMode(); } //toggleAutoWhiteBalance(); return hr; }
void RGBDSensor::saveRecordedFramesToFile( const std::string& filename ) { if (m_recordedDepthData.size() == 0 || m_recordedColorData.size() == 0) return; CalibratedSensorData cs; cs.m_DepthImageWidth = getDepthWidth(); cs.m_DepthImageHeight = getDepthHeight(); cs.m_ColorImageWidth = getColorWidth(); cs.m_ColorImageHeight = getColorHeight(); cs.m_DepthNumFrames = (unsigned int)m_recordedDepthData.size(); cs.m_ColorNumFrames = (unsigned int)m_recordedColorData.size(); cs.m_CalibrationDepth.m_Intrinsic = getDepthIntrinsics(); cs.m_CalibrationDepth.m_Extrinsic = getDepthExtrinsics(); cs.m_CalibrationDepth.m_IntrinsicInverse = cs.m_CalibrationDepth.m_Intrinsic.getInverse(); cs.m_CalibrationDepth.m_ExtrinsicInverse = cs.m_CalibrationDepth.m_Extrinsic.getInverse(); cs.m_CalibrationColor.m_Intrinsic = getColorIntrinsics(); cs.m_CalibrationColor.m_Extrinsic = getColorExtrinsics(); cs.m_CalibrationColor.m_IntrinsicInverse = cs.m_CalibrationColor.m_Intrinsic.getInverse(); cs.m_CalibrationColor.m_ExtrinsicInverse = cs.m_CalibrationColor.m_Extrinsic.getInverse(); cs.m_DepthImages.resize(cs.m_DepthNumFrames); cs.m_ColorImages.resize(cs.m_ColorNumFrames); unsigned int dFrame = 0; for (auto& a : m_recordedDepthData) { cs.m_DepthImages[dFrame] = a; dFrame++; } unsigned int cFrame = 0; for (auto& a : m_recordedColorData) { cs.m_ColorImages[cFrame] = a; cFrame++; } cs.m_trajectory = m_recordedTrajectory; std::cout << cs << std::endl; std::cout << "dumping recorded frames... "; std::string actualFilename = filename; while (util::fileExists(actualFilename)) { std::string path = util::directoryFromPath(actualFilename); std::string curr = util::fileNameFromPath(actualFilename); std::string ext = util::getFileExtension(curr); curr = util::removeExtensions(curr); std::string base = util::getBaseBeforeNumericSuffix(curr); unsigned int num = util::getNumericSuffix(curr); if (num == (unsigned int)-1) { num = 0; } actualFilename = path + base + std::to_string(num+1) + "." + ext; } BinaryDataStreamFile outStream(actualFilename, true); //BinaryDataStreamZLibFile outStream(filename, true); outStream << cs; std::cout << "done" << std::endl; m_recordedDepthData.clear(); m_recordedColorData.clear(); //destructor of cs frees all allocated data //m_recordedTrajectory.clear(); }
HRESULT KinectSensor::processDepth() { HRESULT hr = S_OK; //wait until data is available if (!(WAIT_OBJECT_0 == WaitForSingleObject(m_hNextDepthFrameEvent, 0))) return S_FALSE; // This code allows to get depth up to 8m BOOL bNearMode = false; if(m_kinect4Windows) { bNearMode = true; } INuiFrameTexture * pTexture = NULL; NUI_IMAGE_FRAME imageFrame; hr = m_pNuiSensor->NuiImageStreamGetNextFrame(m_pDepthStreamHandle, 0, &imageFrame); hr = m_pNuiSensor->NuiImageFrameGetDepthImagePixelFrameTexture(m_pDepthStreamHandle, &imageFrame, &bNearMode, &pTexture); NUI_LOCKED_RECT LockedRect; hr = pTexture->LockRect(0, &LockedRect, NULL, 0); if ( FAILED(hr) ) { return hr; } NUI_DEPTH_IMAGE_PIXEL * pBuffer = (NUI_DEPTH_IMAGE_PIXEL *) LockedRect.pBits; ////#pragma omp parallel for // for (int j = 0; j < (int)getDepthWidth()*(int)getDepthHeight(); j++) { // m_depthD16[j] = pBuffer[j].depth; // } USHORT* test = new USHORT[getDepthWidth()*getDepthHeight()]; float* depth = getDepthFloat(); for (unsigned int j = 0; j < getDepthHeight(); j++) { for (unsigned int i = 0; i < getDepthWidth(); i++) { unsigned int desIdx = j*getDepthWidth() + i; unsigned int srcIdx = j*getDepthWidth() + (getDepthWidth() - i - 1); //x-flip of the kinect const USHORT& d = pBuffer[srcIdx].depth; if (d == 0) depth[desIdx] = -std::numeric_limits<float>::infinity(); else depth[desIdx] = (float)d * 0.001f; test[srcIdx] = d *8; } } hr = pTexture->UnlockRect(0); if ( FAILED(hr) ) { return hr; }; hr = m_pNuiSensor->NuiImageStreamReleaseFrame(m_pDepthStreamHandle, &imageFrame); // Get offset x, y coordinates for color in depth space // This will allow us to later compensate for the differences in location, angle, etc between the depth and color cameras m_pNuiSensor->NuiImageGetColorPixelCoordinateFrameFromDepthPixelFrameAtResolution( cColorResolution, cDepthResolution, getDepthWidth()*getDepthHeight(), test, getDepthWidth()*getDepthHeight()*2, m_colorCoordinates ); delete [] test; return hr; }
float4* CUDARGBDSensor::getAndComputeDepthHSV() const { depthToHSV(d_depthHSV, m_depthCameraData.d_depthData, getDepthWidth(), getDepthHeight(), GlobalAppState::get().s_sensorDepthMin, GlobalAppState::get().s_sensorDepthMax); return d_depthHSV; }
HRESULT CUDARGBDSensor::process(ID3D11DeviceContext* context) { HRESULT hr = S_OK; if (m_RGBDAdapter->process(context) == S_FALSE) return S_FALSE; //////////////////////////////////////////////////////////////////////////////////// // Process Color //////////////////////////////////////////////////////////////////////////////////// //Start Timing if (GlobalAppState::get().s_timingsDetailledEnabled) { cutilSafeCall(cudaDeviceSynchronize()); m_timer.start(); } if (m_bFilterIntensityValues) gaussFilterFloat4Map(m_depthCameraData.d_colorData, m_RGBDAdapter->getColorMapResampledFloat4(), m_fBilateralFilterSigmaDIntensity, m_fBilateralFilterSigmaRIntensity, m_RGBDAdapter->getWidth(), m_RGBDAdapter->getHeight()); else copyFloat4Map(m_depthCameraData.d_colorData, m_RGBDAdapter->getColorMapResampledFloat4(), m_RGBDAdapter->getWidth(), m_RGBDAdapter->getHeight()); // Stop Timing if (GlobalAppState::get().s_timingsDetailledEnabled) { cutilSafeCall(cudaDeviceSynchronize()); m_timer.stop(); TimingLog::totalTimeFilterColor += m_timer.getElapsedTimeMS(); TimingLog::countTimeFilterColor++; } //////////////////////////////////////////////////////////////////////////////////// // Process Depth //////////////////////////////////////////////////////////////////////////////////// //Start Timing if (GlobalAppState::get().s_timingsDetailledEnabled) { cutilSafeCall(cudaDeviceSynchronize()); m_timer.start(); } if (m_bFilterDepthValues) gaussFilterFloatMap(d_depthMapFilteredFloat, m_RGBDAdapter->getDepthMapResampledFloat(), m_fBilateralFilterSigmaD, m_fBilateralFilterSigmaR, m_RGBDAdapter->getWidth(), m_RGBDAdapter->getHeight()); else copyFloatMap(d_depthMapFilteredFloat, m_RGBDAdapter->getDepthMapResampledFloat(), m_RGBDAdapter->getWidth(), m_RGBDAdapter->getHeight()); //TODO this call seems not needed as the depth map is overwriten later anyway later anyway... setInvalidFloatMap(m_depthCameraData.d_depthData, m_RGBDAdapter->getWidth(), m_RGBDAdapter->getHeight()); // Stop Timing if (GlobalAppState::get().s_timingsDetailledEnabled) { cutilSafeCall(cudaDeviceSynchronize()); m_timer.stop(); TimingLog::totalTimeFilterDepth += m_timer.getElapsedTimeMS(); TimingLog::countTimeFilterDepth++; } //////////////////////////////////////////////////////////////////////////////////// // Render to Color Space //////////////////////////////////////////////////////////////////////////////////// //Start Timing if (GlobalAppState::get().s_timingsDetailledEnabled) { cutilSafeCall(cudaDeviceSynchronize()); m_timer.start(); } if (GlobalAppState::get().s_bUseCameraCalibration) { mat4f depthExt = m_RGBDAdapter->getDepthExtrinsics(); g_CustomRenderTarget.Clear(context); g_CustomRenderTarget.Bind(context); g_RGBDRenderer.RenderDepthMap(context, d_depthMapFilteredFloat, m_depthCameraData.d_colorData, m_RGBDAdapter->getWidth(), m_RGBDAdapter->getHeight(), m_RGBDAdapter->getDepthIntrinsicsInv(), depthExt, m_RGBDAdapter->getColorIntrinsics(), g_CustomRenderTarget.getWidth(), g_CustomRenderTarget.getHeight(), GlobalAppState::get().s_remappingDepthDiscontinuityThresOffset, GlobalAppState::get().s_remappingDepthDiscontinuityThresLin); g_CustomRenderTarget.Unbind(context); g_CustomRenderTarget.copyToCuda(m_depthCameraData.d_depthData, 0); //Util::writeToImage(m_depthCameraData.d_depthData, getDepthWidth(), getDepthHeight(), "depth.png"); //Util::writeToImage(m_depthCameraData.d_colorData, getDepthWidth(), getDepthHeight(), "color.png"); } else { copyFloatMap(m_depthCameraData.d_depthData, d_depthMapFilteredFloat, m_RGBDAdapter->getWidth(), m_RGBDAdapter->getHeight()); } bool bErode = false; if (bErode) { unsigned int numIter = 20; numIter = 2 * ((numIter + 1) / 2); for (unsigned int i = 0; i < numIter; i++) { if (i % 2 == 0) { erodeDepthMap(d_depthErodeHelper, m_depthCameraData.d_depthData, 5, getDepthWidth(), getDepthHeight(), 0.05f, 0.3f); } else { erodeDepthMap(m_depthCameraData.d_depthData, d_depthErodeHelper, 5, getDepthWidth(), getDepthHeight(), 0.05f, 0.3f); } } } //TODO check whether the intensity is actually used convertColorToIntensityFloat(d_intensityMapFilteredFloat, m_depthCameraData.d_colorData, m_RGBDAdapter->getWidth(), m_RGBDAdapter->getHeight()); float4x4 M((m_RGBDAdapter->getColorIntrinsicsInv()).ptr()); m_depthCameraData.updateParams(getDepthCameraParams()); convertDepthFloatToCameraSpaceFloat4(d_cameraSpaceFloat4, m_depthCameraData.d_depthData, M, m_RGBDAdapter->getWidth(), m_RGBDAdapter->getHeight(), m_depthCameraData); // !!! todo computeNormals(d_normalMapFloat4, d_cameraSpaceFloat4, m_RGBDAdapter->getWidth(), m_RGBDAdapter->getHeight()); float4x4 Mintrinsics((m_RGBDAdapter->getColorIntrinsics()).ptr()); cudaMemcpyToArray(m_depthCameraData.d_depthArray, 0, 0, m_depthCameraData.d_depthData, sizeof(float)*m_depthCameraParams.m_imageHeight*m_depthCameraParams.m_imageWidth, cudaMemcpyDeviceToDevice); cudaMemcpyToArray(m_depthCameraData.d_colorArray, 0, 0, m_depthCameraData.d_colorData, sizeof(float4)*m_depthCameraParams.m_imageHeight*m_depthCameraParams.m_imageWidth, cudaMemcpyDeviceToDevice); // Stop Timing if (GlobalAppState::get().s_timingsDetailledEnabled) { cutilSafeCall(cudaDeviceSynchronize()); m_timer.stop(); TimingLog::totalTimeRemapDepth += m_timer.getElapsedTimeMS(); TimingLog::countTimeRemapDepth++; } return hr; }
ml::vec3f RGBDSensor::depthToSkeleton(unsigned int ux, unsigned int uy) const { return depthToSkeleton(ux, uy, m_depthFloat[m_currentRingBufIdx][uy*getDepthWidth()+ux]); }