TempOctree::TempOctree(const char* lidarFileName) :tempFileName(0), file(getLidarPartFileName(lidarFileName,"Points").c_str(),IO::File::ReadOnly) { file.setEndianness(Misc::LittleEndian); /* Open the LiDAR file's index file: */ LidarFile indexFile(getLidarPartFileName(lidarFileName,"Index").c_str(),IO::File::ReadOnly); indexFile.setEndianness(Misc::LittleEndian); /* Read the octree file header: */ LidarOctreeFileHeader ofh(indexFile); /* Initialize the tree structure: */ maxNumPointsPerNode=ofh.maxNumPointsPerNode; pointBbox=Box(ofh.domain.getMin(),ofh.domain.getMax()); /* Read the root node's structure: */ LidarOctreeFileNode rootfn; rootfn.read(indexFile); root.domain=ofh.domain; root.numPoints=rootfn.numPoints; root.pointsOffset=sizeof(LidarDataFileHeader)+rootfn.dataOffset*sizeof(LidarPoint); root.children=0; /* Read the entire octree structure: */ readLidarSubtree(root,rootfn.childrenOffset,indexFile); }
void abb_file_suite::AbbMotionFtpDownloader::handleJointTrajectory(const trajectory_msgs::JointTrajectory &traj) { ROS_INFO_STREAM("Trajectory received"); // generate temporary file with appropriate rapid code std::ofstream ofh ("/tmp/mGodel_blend.mod"); if (!ofh) { ROS_WARN_STREAM("Could not create file"); return; } std::vector<rapid_emitter::TrajectoryPt> pts; pts.reserve(traj.points.size()); for (std::size_t i = 0; i < traj.points.size(); ++i) { std::vector<double> tmp = toDegrees(traj.points[i].positions); if (j23_coupled_) linkageAdjust(tmp); double duration = 0.0; // Timing if (i > 0) { duration = (traj.points[i].time_from_start - traj.points[i-1].time_from_start).toSec(); } rapid_emitter::TrajectoryPt pt (tmp, duration); pts.push_back(pt); } rapid_emitter::ProcessParams params; params.wolf = false; rapid_emitter::emitJointTrajectoryFile(ofh, pts, params); ofh.close(); // send to controller if (!uploadFile(ip_ + "/PARTMODULES", "/tmp/mGodel_blend.mod")) { ROS_WARN("Could not upload joint trajectory to remote ftp server"); } }
LidarProcessOctree::LidarProcessOctree(const char* lidarFileName,size_t sCacheSize) :indexFile(getLidarPartFileName(lidarFileName,"Index").c_str()), pointsFile(getLidarPartFileName(lidarFileName,"Points").c_str()), offset(OffsetVector::zero), numSubdivideCalls(0),numLoadedNodes(0), lruHead(0),lruTail(0) { indexFile.setEndianness(Misc::LittleEndian); pointsFile.setEndianness(Misc::LittleEndian); /* Read the octree file header: */ LidarOctreeFileHeader ofh(indexFile); /* Initialize the root node's domain: */ root.domain=ofh.domain; /* Initialize the tree structure: */ maxNumPointsPerNode=ofh.maxNumPointsPerNode; /* Calculate the memory and GPU cache sizes: */ size_t memNodeSize=sizeof(Node)+size_t(maxNumPointsPerNode)*sizeof(LidarPoint); cacheSize=(unsigned int)(sCacheSize/memNodeSize); if(cacheSize==0U) Misc::throwStdErr("LidarProcessOctree::LidarProcessOctree: Specified memory cache size too small"); std::cout<<"Cache size: "<<cacheSize<<" memory nodes"<<std::endl; /* Read the root node's structure: */ LidarOctreeFileNode rootfn; rootfn.read(indexFile); root.childrenOffset=rootfn.childrenOffset; root.numPoints=rootfn.numPoints; root.dataOffset=rootfn.dataOffset; root.detailSize=rootfn.detailSize; /* Get the total number of nodes by dividing the index file's size by the size of one octree node: */ numNodes=size_t((indexFile.getSize()-LidarOctreeFileHeader::getFileSize())/LidarFile::Offset(LidarOctreeFileNode::getFileSize())); /* Read the point file's header: */ LidarDataFileHeader dfh(pointsFile); pointsRecordSize=LidarFile::Offset(dfh.recordSize); if(root.numPoints>0) { /* Load the root node's points: */ root.points=new LidarPoint[maxNumPointsPerNode]; // Always allocate maximum to prevent memory fragmentation pointsFile.setReadPosAbs(LidarDataFileHeader::getFileSize()+pointsRecordSize*root.dataOffset); pointsFile.read(root.points,root.numPoints); } ++numLoadedNodes; /* Try loading an offset file: */ try { IO::FilePtr offsetFile(IO::openFile(getLidarPartFileName(lidarFileName,"Offset").c_str())); offsetFile->setEndianness(Misc::LittleEndian); /* Read the original offset vector: */ offsetFile->read<OffsetVector::Scalar>(offset.getComponents(),3); /* Invert the offset transformation: */ offset=-offset; } catch(IO::File::OpenError err) { /* Ignore the error */ } /* Initialize the node cache: */ numCachedNodes=1U; }