Imath::Box3d HoudiniScene::readBound( double time ) const { OP_Node *node = retrieveNode( true ); Imath::Box3d bounds; UT_BoundingBox box; OP_Context context( time ); /// \todo: this doesn't account for SOPs containing multiple shapes /// if we fix it, we need to fix the condition below as well if ( node->getBoundingBox( box, context ) ) { bounds = IECore::convert<Imath::Box3d>( box ); } // paths embedded within a sop already have bounds accounted for if ( m_contentIndex ) { return bounds; } NameList children; childNames( children ); for ( NameList::iterator it=children.begin(); it != children.end(); ++it ) { ConstSceneInterfacePtr childScene = child( *it ); Imath::Box3d childBound = childScene->readBound( time ); if ( !childBound.isEmpty() ) { bounds.extendBy( Imath::transform( childBound, childScene->readTransformAsMatrix( time ) ) ); } } return bounds; }
static Imath::Box3d getBoundingBox(const V3d& offset, const std::vector<float>& vertices) { Imath::Box3d bbox; for (size_t i = 0; i < vertices.size(); i+=3) bbox.extendBy(V3d(vertices[i], vertices[i+1], vertices[i+2])); if (!bbox.isEmpty()) { bbox.min += offset; bbox.max += offset; } return bbox; }
/// Load point cloud in text format, assuming fields XYZ bool PointArray::loadText(QString fileName, size_t maxPointCount, std::vector<GeomField>& fields, V3d& offset, size_t& npoints, uint64_t& totPoints, Imath::Box3d& bbox, V3d& centroid) { V3d Psum(0); // Use C file IO here, since it's about 40% faster than C++ streams for // large text files (tested on linux x86_64, gcc 4.6.3). FILE* inFile = fopen(fileName.toUtf8(), "r"); if (!inFile) return false; fseek(inFile, 0, SEEK_END); const size_t numBytes = ftell(inFile); fseek(inFile, 0, SEEK_SET); std::vector<Imath::V3d> points; Imath::V3d p; size_t readCount = 0; // Read three doubles; "%*[^\n]" discards up to just before end of line while (fscanf(inFile, " %lf %lf %lf%*[^\n]", &p.x, &p.y, &p.z) == 3) { points.push_back(p); ++readCount; if (readCount % 10000 == 0) emit loadProgress(int(100*ftell(inFile)/numBytes)); } fclose(inFile); totPoints = points.size(); npoints = points.size(); // Zero points + nonzero bytes => bad text file if (totPoints == 0 && numBytes != 0) return false; if (totPoints > 0) offset = points[0]; fields.push_back(GeomField(TypeSpec::vec3float32(), "position", npoints)); V3f* position = (V3f*)fields[0].as<float>(); for (size_t i = 0; i < npoints; ++i) { position[i] = points[i] - offset; bbox.extendBy(points[i]); Psum += points[i]; } if (npoints > 0) centroid = (1.0/npoints)*Psum; return true; }
Imath::Box3d LiveScene::readBound( double time ) const { tbb::mutex::scoped_lock l( s_mutex ); if( fabs( MAnimControl::currentTime().as( MTime::kSeconds ) - time ) > 1.e-4 ) { throw Exception( "IECoreMaya::LiveScene::readBound: time must be the same as on the maya timeline!" ); } if( m_isRoot ) { MDagPathArray paths; getChildDags( m_dagPath, paths ); Imath::Box3d bound; for( unsigned i=0; i < paths.length(); ++i ) { MFnDagNode dagFn( paths[i] ); Imath::Box3d b = IECore::convert<Imath::Box3d, MBoundingBox>( dagFn.boundingBox() ); if( b.hasVolume() ) { bound.extendBy( b ); } } return bound; } else if( m_dagPath.length() == 0 ) { throw Exception( "IECoreMaya::LiveScene::readBound: Dag path no longer exists!" ); } else { MFnDagNode dagFn( m_dagPath ); Imath::Box3d ret = IECore::convert<Imath::Box3d, MBoundingBox>( dagFn.boundingBox() ); Imath::M44d invTransform = IECore::convert<Imath::M44d, MMatrix>( dagFn.transformationMatrix() ).inverse(); return Imath::transform( ret, invTransform ); } }
/// Load ascii version of the point cloud library PCD format bool PointArray::loadPly(QString fileName, size_t maxPointCount, std::vector<GeomField>& fields, V3d& offset, size_t& npoints, uint64_t& totPoints, Imath::Box3d& bbox, V3d& centroid) { std::unique_ptr<t_ply_, int(*)(p_ply)> ply( ply_open(fileName.toUtf8().constData(), NULL, 0, NULL), ply_close); if (!ply || !ply_read_header(ply.get())) return false; // Parse out header data p_ply_element vertexElement = findVertexElement(ply.get(), npoints); if (vertexElement) { if (!loadPlyVertexProperties(fileName, ply.get(), vertexElement, fields, offset, npoints)) return false; } else { if (!loadDisplazNativePly(fileName, ply.get(), fields, offset, npoints)) return false; } totPoints = npoints; // Compute bounding box and centroid const V3f* P = (V3f*)fields[0].as<float>(); V3d Psum(0); for (size_t i = 0; i < npoints; ++i) { Psum += P[i]; bbox.extendBy(P[i]); } if (npoints > 0) centroid = (1.0/npoints) * Psum; centroid += offset; bbox.min += offset; bbox.max += offset; return true; }
bool PointArray::loadFile(QString fileName, size_t maxPointCount) { QTime loadTimer; loadTimer.start(); setFileName(fileName); // Read file into point data fields. Use very basic file type detection // based on extension. uint64_t totPoints = 0; Imath::Box3d bbox; V3d offset(0); V3d centroid(0); emit loadStepStarted("Reading file"); if (fileName.endsWith(".las") || fileName.endsWith(".laz")) { if (!loadLas(fileName, maxPointCount, m_fields, offset, m_npoints, totPoints, bbox, centroid)) { return false; } } else if (fileName.endsWith(".ply")) { if (!loadPly(fileName, maxPointCount, m_fields, offset, m_npoints, totPoints, bbox, centroid)) { return false; } } #if 0 else if (fileName.endsWith(".dat")) { // Load crappy db format for debugging std::ifstream file(fileName.toUtf8(), std::ios::binary); file.seekg(0, std::ios::end); totPoints = file.tellg()/(4*sizeof(float)); file.seekg(0); m_fields.push_back(GeomField(TypeSpec::vec3float32(), "position", totPoints)); m_fields.push_back(GeomField(TypeSpec::float32(), "intensity", totPoints)); float* position = m_fields[0].as<float>(); float* intensity = m_fields[1].as<float>(); for (size_t i = 0; i < totPoints; ++i) { file.read((char*)position, 3*sizeof(float)); file.read((char*)intensity, 1*sizeof(float)); bbox.extendBy(V3d(position[0], position[1], position[2])); position += 3; intensity += 1; } m_npoints = totPoints; } #endif else { // Last resort: try loading as text if (!loadText(fileName, maxPointCount, m_fields, offset, m_npoints, totPoints, bbox, centroid)) { return false; } } // Search for position field m_positionFieldIdx = -1; for (size_t i = 0; i < m_fields.size(); ++i) { if (m_fields[i].name == "position" && m_fields[i].spec.count == 3) { m_positionFieldIdx = (int)i; break; } } if (m_positionFieldIdx == -1) { g_logger.error("No position field found in file %s", fileName); return false; } m_P = (V3f*)m_fields[m_positionFieldIdx].as<float>(); setBoundingBox(bbox); setOffset(offset); setCentroid(centroid); emit loadProgress(100); g_logger.info("Loaded %d of %d points from file %s in %.2f seconds", m_npoints, totPoints, fileName, loadTimer.elapsed()/1000.0); if (totPoints == 0) { m_rootNode.reset(new OctreeNode(V3f(0), 1)); return true; } // Sort points into octree order emit loadStepStarted("Sorting points"); std::unique_ptr<size_t[]> inds(new size_t[m_npoints]); for (size_t i = 0; i < m_npoints; ++i) inds[i] = i; // Expand the bound so that it's cubic. Not exactly sure it's required // here, but cubic nodes sometimes work better the points are better // distributed for LoD, splitting is unbiased, etc. Imath::Box3f rootBound(bbox.min - offset, bbox.max - offset); V3f diag = rootBound.size(); float rootRadius = std::max(std::max(diag.x, diag.y), diag.z) / 2; ProgressFunc progressFunc(*this); m_rootNode.reset(makeTree(0, &inds[0], 0, m_npoints, &m_P[0], rootBound.center(), rootRadius, progressFunc)); // Reorder point fields into octree order emit loadStepStarted("Reordering fields"); for (size_t i = 0; i < m_fields.size(); ++i) { g_logger.debug("Reordering field %d: %s", i, m_fields[i]); reorder(m_fields[i], inds.get(), m_npoints); emit loadProgress(int(100*(i+1)/m_fields.size())); } m_P = (V3f*)m_fields[m_positionFieldIdx].as<float>(); return true; }
// Triangulate a polygon, returning vertex indices to the triangles // // verts - 3D vertex positions // outerRingInds - Indices of outer boundary vertices in `verts` array; // the j'th component of the i'th polygon vertex is // verts[3*outerRingInds[i]+j]. // innerRingSizes - Number of vertices in each polygon hole // innerRingInds - Indices of vertices for all holes; the first hole has // indices innerRingInds[i] for i in [0, innerRingSize[0]) // triangleInds - Indices of triangular pieces are appended to this array. // // Return false if polygon couldn't be triangulated. static bool triangulatePolygon(const std::vector<float>& verts, const std::vector<GLuint>& outerRingInds, const std::vector<GLuint>& innerRingSizes, const std::vector<GLuint>& innerRingInds, std::vector<GLuint>& triangleInds) { // Figure out which dimension the bounding box is smallest along, and // discard it to reduce dimensionality to 2D. Imath::Box3d bbox; for (size_t i = 0; i < outerRingInds.size(); ++i) { GLuint j = 3*outerRingInds[i]; assert(j + 2 < verts.size()); bbox.extendBy(V3d(verts[j], verts[j+1], verts[j+2])); } V3d diag = bbox.size(); int xind = 0; int yind = 1; if (diag.z > std::min(diag.x, diag.y)) { if (diag.x < diag.y) xind = 2; else yind = 2; } std::list<TPPLPoly> triangles; if (innerRingSizes.empty()) { TPPLPoly poly; // Simple case - no holes // // TODO: Use Triangulate_MONO, after figuring out why it's not always // working? initTPPLPoly(poly, verts, xind, yind, outerRingInds.data(), (int)outerRingInds.size(), false); TPPLPartition polypartition; if (!polypartition.Triangulate_EC(&poly, &triangles)) { g_logger.warning("Ignoring polygon which couldn't be triangulated."); return false; } } else { // Set up outer polygon boundary and holes. std::list<TPPLPoly> inputPolys; inputPolys.resize(1 + innerRingSizes.size()); auto polyIter = inputPolys.begin(); initTPPLPoly(*polyIter, verts, xind, yind, outerRingInds.data(), (int)outerRingInds.size(), false); ++polyIter; for (size_t i = 0, j = 0; i < innerRingSizes.size(); ++i, ++polyIter) { size_t count = innerRingSizes[i]; if (j + count > innerRingInds.size()) { g_logger.warning("Ignoring polygon with inner ring %d of %d too large", i, innerRingSizes.size()); return false; } initTPPLPoly(*polyIter, verts, xind, yind, innerRingInds.data() + j, (int)count, true); j += count; } // Triangulate std::list<TPPLPoly> noHolePolys; TPPLPartition polypartition; polypartition.RemoveHoles(&inputPolys, &noHolePolys); if (!polypartition.Triangulate_EC(&noHolePolys, &triangles)) { g_logger.warning("Ignoring polygon which couldn't be triangulated."); return false; } } for (auto tri = triangles.begin(); tri != triangles.end(); ++tri) { triangleInds.push_back((*tri)[0].id); triangleInds.push_back((*tri)[1].id); triangleInds.push_back((*tri)[2].id); } return true; }
bool PointArray::loadLas(QString fileName, size_t maxPointCount, std::vector<GeomField>& fields, V3d& offset, size_t& npoints, uint64_t& totalPoints, Imath::Box3d& bbox, V3d& centroid) { V3d Psum(0); #ifdef DISPLAZ_USE_PDAL // Open file if (!pdal::FileUtils::fileExists(fileName.toLatin1().constData())) { g_logger.info("File \"%s\" does not exist", fileName.toStdString() ); return false; } std::unique_ptr<pdal::PipelineManager> manager(new pdal::PipelineManager); pdal::StageFactory factory; std::string driver = factory.inferReaderDriver(fileName.toStdString()); manager->addReader(driver); pdal::Stage* reader = static_cast<pdal::Reader*>(manager->getStage()); pdal::Options options; pdal::Option fname("filename", fileName.toStdString()); options.add(fname); reader->setOptions(options); manager->execute(); pdal::PointBufferSet pbSet = manager->buffers(); pdal::PointContext context = manager->context(); bool hasColor = context.hasDim(pdal::Dimension::Id::Red); pdal::QuickInfo quickinfo = reader->preview(); // Figure out how much to decimate the point cloud. totalPoints = quickinfo.m_pointCount; size_t decimate = totalPoints == 0 ? 1 : 1 + (totalPoints - 1) / maxPointCount; if(decimate > 1) { g_logger.info("Decimating \"%s\" by factor of %d", fileName.toStdString(), decimate); } npoints = (totalPoints + decimate - 1) / decimate; pdal::BOX3D pdal_bounds = quickinfo.m_bounds; offset = V3d(0.5*(pdal_bounds.minx + pdal_bounds.maxx), 0.5*(pdal_bounds.miny + pdal_bounds.maxy), 0.5*(pdal_bounds.minz + pdal_bounds.maxz)); // Attempt to place all data on the same vertical scale, but allow // other offsets if the magnitude of z is too large (and we would // therefore loose noticable precision by storing the data as floats) if (fabs(offset.z) < 10000) offset.z = 0; // Allocate all arrays fields.push_back(GeomField(TypeSpec::vec3float32(), "position", npoints)); fields.push_back(GeomField(TypeSpec::uint16_i(), "intensity", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "returnNumber", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "numberOfReturns", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "pointSourceId", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "classification", npoints)); // Output iterators for the output arrays V3f* position = (V3f*)fields[0].as<float>(); uint16_t* intensity = fields[1].as<uint16_t>(); uint8_t* returnNumber = fields[2].as<uint8_t>(); uint8_t* numReturns = fields[3].as<uint8_t>(); uint8_t* pointSourceId = fields[4].as<uint8_t>(); uint8_t* classification = fields[5].as<uint8_t>(); uint16_t* color = 0; if (hasColor) { fields.push_back(GeomField(TypeSpec(TypeSpec::Uint,2,3,TypeSpec::Color), "color", npoints)); color = fields.back().as<uint16_t>(); } size_t readCount = 0; size_t storeCount = 0; size_t nextDecimateBlock = 1; size_t nextStore = 1; for (auto st = pbSet.begin(); st != pbSet.end(); ++st) // while (size_t numRead = chunkIter->read(buf)) { pdal::PointBufferPtr buf = *st; for (size_t i = 0; i < buf->size(); ++i) { ++readCount; V3d P = V3d(buf->getFieldAs<double>(pdal::Dimension::Id::X, i), buf->getFieldAs<double>(pdal::Dimension::Id::Y, i), buf->getFieldAs<double>(pdal::Dimension::Id::Z, i)); // V3d P = V3d(xDim.applyScaling(buf.getField<int32_t>(xDim, i)), // yDim.applyScaling(buf.getField<int32_t>(yDim, i)), // zDim.applyScaling(buf.getField<int32_t>(zDim, i))); bbox.extendBy(P); Psum += P; if(readCount < nextStore) continue; ++storeCount; // Store the point *position++ = P - offset; *intensity++ = buf->getFieldAs<uint16_t>(pdal::Dimension::Id::Intensity, i); *returnNumber++ = buf->getFieldAs<uint8_t>(pdal::Dimension::Id::ReturnNumber, i); *numReturns++ = buf->getFieldAs<uint8_t>(pdal::Dimension::Id::NumberOfReturns, i); *pointSourceId++ = buf->getFieldAs<uint8_t>(pdal::Dimension::Id::PointSourceId, i); *classification++ = buf->getFieldAs<uint8_t>(pdal::Dimension::Id::Classification, i); // Extract point RGB if (hasColor) { *color++ = buf->getFieldAs<uint16_t>(pdal::Dimension::Id::Red, i); *color++ = buf->getFieldAs<uint16_t>(pdal::Dimension::Id::Green, i); *color++ = buf->getFieldAs<uint16_t>(pdal::Dimension::Id::Blue, i); } // Figure out which point will be the next stored point. nextDecimateBlock += decimate; nextStore = nextDecimateBlock; if(decimate > 1) { // Randomize selected point within block to avoid repeated patterns nextStore += (qrand() % decimate); if(nextDecimateBlock <= totalPoints && nextStore > totalPoints) nextStore = totalPoints; } } emit loadProgress(100*readCount/totalPoints); } #else LASreadOpener lasReadOpener; #ifdef _WIN32 // Hack: liblas doesn't like forward slashes as path separators on windows fileName = fileName.replace('/', '\\'); #endif lasReadOpener.set_file_name(fileName.toLatin1().constData()); std::unique_ptr<LASreader> lasReader(lasReadOpener.open()); if(!lasReader) { g_logger.error("Couldn't open file \"%s\"", fileName); return false; } //std::ofstream dumpFile("points.txt"); // Figure out how much to decimate the point cloud. totalPoints = std::max<uint64_t>(lasReader->header.extended_number_of_point_records, lasReader->header.number_of_point_records); size_t decimate = totalPoints == 0 ? 1 : 1 + (totalPoints - 1) / maxPointCount; if(decimate > 1) { g_logger.info("Decimating \"%s\" by factor of %d", fileName.toStdString(), decimate); } npoints = (totalPoints + decimate - 1) / decimate; offset = V3d(lasReader->header.min_x, lasReader->header.min_y, 0); // Attempt to place all data on the same vertical scale, but allow other // offsets if the magnitude of z is too large (and we would therefore loose // noticable precision by storing the data as floats) if (fabs(lasReader->header.min_z) > 10000) offset.z = lasReader->header.min_z; fields.push_back(GeomField(TypeSpec::vec3float32(), "position", npoints)); fields.push_back(GeomField(TypeSpec::uint16_i(), "intensity", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "returnNumber", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "numberOfReturns", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "pointSourceId", npoints)); fields.push_back(GeomField(TypeSpec::uint8_i(), "classification", npoints)); if (totalPoints == 0) { g_logger.warning("File %s has zero points", fileName); return true; } // Iterate over all points & pull in the data. V3f* position = (V3f*)fields[0].as<float>(); uint16_t* intensity = fields[1].as<uint16_t>(); uint8_t* returnNumber = fields[2].as<uint8_t>(); uint8_t* numReturns = fields[3].as<uint8_t>(); uint8_t* pointSourceId = fields[4].as<uint8_t>(); uint8_t* classification = fields[5].as<uint8_t>(); uint64_t readCount = 0; uint64_t nextDecimateBlock = 1; uint64_t nextStore = 1; size_t storeCount = 0; if (!lasReader->read_point()) return false; const LASpoint& point = lasReader->point; uint16_t* color = 0; if (point.have_rgb) { fields.push_back(GeomField(TypeSpec(TypeSpec::Uint,2,3,TypeSpec::Color), "color", npoints)); color = fields.back().as<uint16_t>(); } do { // Read a point from the las file ++readCount; if(readCount % 10000 == 0) emit loadProgress(100*readCount/totalPoints); V3d P = V3d(point.get_x(), point.get_y(), point.get_z()); bbox.extendBy(P); Psum += P; if(readCount < nextStore) continue; ++storeCount; // Store the point *position++ = P - offset; // float intens = float(point.scan_angle_rank) / 40; *intensity++ = point.intensity; *returnNumber++ = point.return_number; # if LAS_TOOLS_VERSION >= 140315 *numReturns++ = point.number_of_returns; # else *numReturns++ = point.number_of_returns_of_given_pulse; # endif *pointSourceId++ = point.point_source_ID; // Put flags back in classification byte to avoid memory bloat *classification++ = point.classification | (point.synthetic_flag << 5) | (point.keypoint_flag << 6) | (point.withheld_flag << 7); // Extract point RGB if (color) { *color++ = point.rgb[0]; *color++ = point.rgb[1]; *color++ = point.rgb[2]; } // Figure out which point will be the next stored point. nextDecimateBlock += decimate; nextStore = nextDecimateBlock; if(decimate > 1) { // Randomize selected point within block to avoid repeated patterns nextStore += (qrand() % decimate); if(nextDecimateBlock <= totalPoints && nextStore > totalPoints) nextStore = totalPoints; } } while(lasReader->read_point()); lasReader->close(); #endif if (readCount < totalPoints) { g_logger.warning("Expected %d points in file \"%s\", got %d", totalPoints, fileName, readCount); npoints = storeCount; // Shrink all fields to fit - these will have wasted space at the end, // but that will be fixed during reordering. for (size_t i = 0; i < fields.size(); ++i) fields[i].size = npoints; totalPoints = readCount; } if (totalPoints > 0) centroid = (1.0/totalPoints)*Psum; return true; }