コード例 #1
0
ファイル: LASFilter.cpp プロジェクト: imight/trunk
CC_FILE_ERROR LASFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, CCVector3d* coordinatesShift/*=0*/)
{
	//opening file
	std::ifstream ifs;
	ifs.open(filename, std::ios::in | std::ios::binary);

	if (ifs.fail())
		return CC_FERR_READING;

	liblas::Reader* reader = 0;
	unsigned nbOfPoints = 0;
	std::vector<std::string> dimensions;

	try
	{
		reader = new liblas::Reader(liblas::ReaderFactory().CreateWithStream(ifs));	//using factory for automatic and transparent
		//handling of compressed/uncompressed files
		liblas::Header const& header = reader->GetHeader();

		ccLog::PrintDebug(QString("[LAS FILE] %1 - signature: %2").arg(filename).arg(header.GetFileSignature().c_str()));

		//get fields present in file
		dimensions = header.GetSchema().GetDimensionNames();

		//and of course the number of points
		nbOfPoints = header.GetPointRecordsCount();
	}
	catch (...)
	{
		delete reader;
		ifs.close();
		return CC_FERR_READING;
	}

	if (nbOfPoints==0)
	{
		//strange file ;)
		delete reader;
		ifs.close();
		return CC_FERR_NO_LOAD;
	}

	//dialog to choose the fields to load
	if (!s_lasOpenDlg)
		s_lasOpenDlg = QSharedPointer<LASOpenDlg>(new LASOpenDlg());
	s_lasOpenDlg->setDimensions(dimensions);
	if (alwaysDisplayLoadDialog && !s_lasOpenDlg->autoSkipMode() && !s_lasOpenDlg->exec())
	{
		delete reader;
		ifs.close();
		return CC_FERR_CANCELED_BY_USER;
	}
	bool ignoreDefaultFields = s_lasOpenDlg->ignoreDefaultFieldsCheckBox->isChecked();

	//RGB color
	liblas::Color rgbColorMask; //(0,0,0) on construction
	if (s_lasOpenDlg->doLoad(LAS_RED))
		rgbColorMask.SetRed(~0);
	if (s_lasOpenDlg->doLoad(LAS_GREEN))
		rgbColorMask.SetGreen(~0);
	if (s_lasOpenDlg->doLoad(LAS_BLUE))
		rgbColorMask.SetBlue(~0);
	bool loadColor = (rgbColorMask[0] || rgbColorMask[1] || rgbColorMask[2]);

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,nbOfPoints);
	pdlg.setMethodTitle("Open LAS file");
	pdlg.setInfo(qPrintable(QString("Points: %1").arg(nbOfPoints)));
	pdlg.start();

	//number of points read from the begining of the current cloud part
	unsigned pointsRead = 0;
	CCVector3d Pshift(0,0,0);

	//by default we read color as 8 bits integers and we will change this to 16 bits if it's not (16 bits is the standard!)
	unsigned char colorCompBitDec = 0;
	colorType rgb[3] = {0,0,0};

	ccPointCloud* loadedCloud = 0;
	std::vector<LasField> fieldsToLoad;

	//if the file is too big, we will chunck it in multiple parts
	unsigned int fileChunkPos = 0;
	unsigned int fileChunkSize = 0;

	while (true)
	{
		//if we reach the end of the file, or the max. cloud size limit (in which case we cerate a new chunk)
		bool newPointAvailable = (nprogress.oneStep() && reader->ReadNextPoint());

		if (!newPointAvailable || pointsRead == fileChunkPos+fileChunkSize)
		{
			if (loadedCloud)
			{
				if (loadedCloud->size())
				{
					bool thisChunkHasColors = loadedCloud->hasColors();
					loadedCloud->showColors(thisChunkHasColors);
					if (loadColor && !thisChunkHasColors)
						ccLog::Warning("[LAS FILE] Color field was all black! We ignored it...");

					while (!fieldsToLoad.empty())
					{
						LasField& field = fieldsToLoad.back();
						if (field.sf)
						{
							field.sf->computeMinAndMax();

							if (field.type == LAS_CLASSIFICATION
								|| field.type == LAS_RETURN_NUMBER
								|| field.type == LAS_NUMBER_OF_RETURNS)
							{
								int cMin = (int)field.sf->getMin();
								int cMax = (int)field.sf->getMax();
								field.sf->setColorRampSteps(std::min<int>(cMax-cMin+1,256));
								//classifSF->setMinSaturation(cMin);
							}
							else if (field.type == LAS_INTENSITY)
							{
								field.sf->setColorScale(ccColorScalesManager::GetDefaultScale(ccColorScalesManager::GREY));
							}

							int sfIndex = loadedCloud->addScalarField(field.sf);
							if (!loadedCloud->hasDisplayedScalarField())
							{
								loadedCloud->setCurrentDisplayedScalarField(sfIndex);
								loadedCloud->showSF(!thisChunkHasColors);
							}
							field.sf->release();
							field.sf=0;
						}
						else
						{
							ccLog::Warning(QString("[LAS FILE] All '%1' values were the same (%2)! We ignored them...").arg(LAS_FIELD_NAMES[field.type]).arg(field.firstValue));
						}

						fieldsToLoad.pop_back();
					}

					//if we have reserved too much memory
					if (loadedCloud->size() < loadedCloud->capacity())
						loadedCloud->resize(loadedCloud->size());

					QString chunkName("unnamed - Cloud");
					unsigned n = container.getChildrenNumber();
					if (n!=0) //if we have more than one cloud, we append an index
					{
						if (n==1)  //we must also update the first one!
							container.getChild(0)->setName(chunkName+QString(" #1"));
						chunkName += QString(" #%1").arg(n+1);
					}
					loadedCloud->setName(chunkName);

					container.addChild(loadedCloud);
					loadedCloud=0;
				}
				else
				{
					//empty cloud?!
					delete loadedCloud;
					loadedCloud=0;
				}
			}

			if (!newPointAvailable)
				break; //end of the file (or cancel requested)

			//otherwise, we must create a new cloud
			fileChunkPos = pointsRead;
			fileChunkSize = std::min(nbOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);
			loadedCloud = new ccPointCloud();
			if (!loadedCloud->reserveThePointsTable(fileChunkSize))
			{
				ccLog::Warning("[LASFilter::loadFile] Not enough memory!");
				delete loadedCloud;
				delete reader;
				ifs.close();
				return CC_FERR_NOT_ENOUGH_MEMORY;
			}
			loadedCloud->setGlobalShift(Pshift);

			//DGM: from now on, we only enable scalar fields when we detect a valid value!
			if (s_lasOpenDlg->doLoad(LAS_CLASSIFICATION))
					fieldsToLoad.push_back(LasField(LAS_CLASSIFICATION,0,0,255)); //unsigned char: between 0 and 255
			if (s_lasOpenDlg->doLoad(LAS_CLASSIF_VALUE))
				fieldsToLoad.push_back(LasField(LAS_CLASSIF_VALUE,0,0,31)); //5 bits: between 0 and 31
			if (s_lasOpenDlg->doLoad(LAS_CLASSIF_SYNTHETIC))
				fieldsToLoad.push_back(LasField(LAS_CLASSIF_SYNTHETIC,0,0,1)); //1 bit: 0 or 1
			if (s_lasOpenDlg->doLoad(LAS_CLASSIF_KEYPOINT))
				fieldsToLoad.push_back(LasField(LAS_CLASSIF_KEYPOINT,0,0,1)); //1 bit: 0 or 1
			if (s_lasOpenDlg->doLoad(LAS_CLASSIF_WITHHELD))
				fieldsToLoad.push_back(LasField(LAS_CLASSIF_WITHHELD,0,0,1)); //1 bit: 0 or 1
			if (s_lasOpenDlg->doLoad(LAS_INTENSITY))
				fieldsToLoad.push_back(LasField(LAS_INTENSITY,0,0,65535)); //16 bits: between 0 and 65536
			if (s_lasOpenDlg->doLoad(LAS_TIME))
				fieldsToLoad.push_back(LasField(LAS_TIME,0,0,-1.0)); //8 bytes (double)
			if (s_lasOpenDlg->doLoad(LAS_RETURN_NUMBER))
				fieldsToLoad.push_back(LasField(LAS_RETURN_NUMBER,1,1,7)); //3 bits: between 1 and 7
			if (s_lasOpenDlg->doLoad(LAS_NUMBER_OF_RETURNS))
				fieldsToLoad.push_back(LasField(LAS_NUMBER_OF_RETURNS,1,1,7)); //3 bits: between 1 and 7
			if (s_lasOpenDlg->doLoad(LAS_SCAN_DIRECTION))
				fieldsToLoad.push_back(LasField(LAS_SCAN_DIRECTION,0,0,1)); //1 bit: 0 or 1
			if (s_lasOpenDlg->doLoad(LAS_FLIGHT_LINE_EDGE))
				fieldsToLoad.push_back(LasField(LAS_FLIGHT_LINE_EDGE,0,0,1)); //1 bit: 0 or 1
			if (s_lasOpenDlg->doLoad(LAS_SCAN_ANGLE_RANK))
				fieldsToLoad.push_back(LasField(LAS_SCAN_ANGLE_RANK,0,-90,90)); //signed char: between -90 and +90
			if (s_lasOpenDlg->doLoad(LAS_USER_DATA))
				fieldsToLoad.push_back(LasField(LAS_USER_DATA,0,0,255)); //unsigned char: between 0 and 255
			if (s_lasOpenDlg->doLoad(LAS_POINT_SOURCE_ID))
				fieldsToLoad.push_back(LasField(LAS_POINT_SOURCE_ID,0,0,65535)); //16 bits: between 0 and 65536
		}

		assert(newPointAvailable);
		const liblas::Point& p = reader->GetPoint();

		//first point: check for 'big' coordinates
		if (pointsRead == 0)
		{
			CCVector3d P( p.GetX(),p.GetY(),p.GetZ() );
			bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift);
			if (shiftAlreadyEnabled)
				Pshift = *coordinatesShift;
			bool applyAll = false;
			if (	sizeof(PointCoordinateType) < 8
				&&	ccCoordinatesShiftManager::Handle(P.u,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,applyAll))
			{
				loadedCloud->setGlobalShift(Pshift);
				ccLog::Warning("[LASFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z);

				//we save coordinates shift information
				if (applyAll && coordinatesShiftEnabled && coordinatesShift)
				{
					*coordinatesShiftEnabled = true;
					*coordinatesShift = Pshift;
				}
			}
		}

		CCVector3 P(static_cast<PointCoordinateType>(p.GetX()+Pshift.x),
					static_cast<PointCoordinateType>(p.GetY()+Pshift.y),
					static_cast<PointCoordinateType>(p.GetZ()+Pshift.z));
		loadedCloud->addPoint(P);

		//color field
		if (loadColor)
		{
			//Warning: LAS colors are stored on 16 bits!
			liblas::Color col = p.GetColor();
			col[0] &= rgbColorMask[0];
			col[1] &= rgbColorMask[1];
			col[2] &= rgbColorMask[2];

			//if we don't have reserved a color field yet, we check first that color is not black
			bool pushColor = true;
			if (!loadedCloud->hasColors())
			{
				//if the color is not black, we are sure it's a valid color field!
				if (col[0] || col[1] || col[2])
				{
					if (loadedCloud->reserveTheRGBTable())
					{
						//we must set the color (black) of all the precedently skipped points
						for (unsigned i=0;i<loadedCloud->size()-1;++i)
							loadedCloud->addRGBColor(ccColor::black);
					}
					else
					{
						ccLog::Warning("[LAS FILE] Not enough memory: color field will be ignored!");
						loadColor = false; //no need to retry with the other chunks anyway
						pushColor = false;
					}
				}
				else //otherwise we ignore it for the moment (we'll add it later if necessary)
				{
					pushColor = false;
				}
			}

			//do we need to push this color?
			if (pushColor)
			{
				//we test if the color components are on 16 bits (standard) or only on 8 bits (it happens ;)
				if (colorCompBitDec==0)
				{
					if (	(col[0] & 0xFF00)
						||  (col[1] & 0xFF00)
						||  (col[2] & 0xFF00))
					{
						//the color components are on 16 bits!
						ccLog::Print("[LAS FILE] Color components are coded on 16 bits");
						colorCompBitDec = 8;
						//we fix all the precedently read colors
						for (unsigned i=0;i<loadedCloud->size()-1;++i)
							loadedCloud->setPointColor(i,ccColor::black); //255 >> 8 = 0!
					}
				}

				rgb[0]=(colorType)(col[0]>>colorCompBitDec);
				rgb[1]=(colorType)(col[1]>>colorCompBitDec);
				rgb[2]=(colorType)(col[2]>>colorCompBitDec);

				loadedCloud->addRGBColor(rgb);
			}
		}
		
		//additional fields
		for (std::vector<LasField>::iterator it = fieldsToLoad.begin(); it != fieldsToLoad.end(); ++it)
		{
			double value = 0.0;
			switch (it->type)
			{
			case LAS_X:
			case LAS_Y:
			case LAS_Z:
				assert(false);
				break;
			case LAS_INTENSITY:
				value = (double)p.GetIntensity();
				break;
			case LAS_RETURN_NUMBER:
				value = (double)p.GetReturnNumber();
				break;
			case LAS_NUMBER_OF_RETURNS:
				value = (double)p.GetNumberOfReturns();
				break;
			case LAS_SCAN_DIRECTION:
				value = (double)p.GetScanDirection();
				break;
			case LAS_FLIGHT_LINE_EDGE:
				value = (double)p.GetFlightLineEdge();
				break;
			case LAS_CLASSIFICATION:
				value = (double)p.GetClassification().GetClass();
				break;
			case LAS_SCAN_ANGLE_RANK:
				value = (double)p.GetScanAngleRank();
				break;
			case LAS_USER_DATA:
				value = (double)p.GetUserData();
				break;
			case LAS_POINT_SOURCE_ID:
				value = (double)p.GetPointSourceID();
				break;
			case LAS_RED:
			case LAS_GREEN:
			case LAS_BLUE:
				assert(false);
				break;
			case LAS_TIME:
				value = p.GetTime();
				break;
			case LAS_CLASSIF_VALUE:
				value = (double)(p.GetClassification().GetClass() & 31); //5 bits
				break;
			case LAS_CLASSIF_SYNTHETIC:
				value = (double)(p.GetClassification().GetClass() & 32); //bit #6
				break;
			case LAS_CLASSIF_KEYPOINT:
				value = (double)(p.GetClassification().GetClass() & 64); //bit #7
				break;
			case LAS_CLASSIF_WITHHELD:
				value = (double)(p.GetClassification().GetClass() & 128); //bit #8
				break;
			case LAS_INVALID:
			default:
				assert(false);
				break;
			}

			if (it->sf)
			{
				ScalarType s = static_cast<ScalarType>(value);
				it->sf->addElement(s);
			}
			else
			{
				//first point? we track its value
				if (loadedCloud->size() == 1)
				{
					it->firstValue = value;
				}
				
				if (!ignoreDefaultFields || value != it->firstValue || it->firstValue != it->defaultValue)
				{
					it->sf = new ccScalarField(it->getName());
					if (it->sf->reserve(fileChunkSize))
					{
						it->sf->link();
						//we must set the value (firstClassifValue) of all the precedently skipped points
						ScalarType firstS = static_cast<ScalarType>(it->firstValue);
						for (unsigned i=0; i<loadedCloud->size()-1; ++i)
							it->sf->addElement(firstS);

						ScalarType s = static_cast<ScalarType>(value);
						it->sf->addElement(s);
					}
					else
					{
						ccLog::Warning(QString("[LAS FILE] Not enough memory: '%1' field will be ignored!").arg(LAS_FIELD_NAMES[it->type]));
						it->sf->release();
						it->sf = 0;
					}
				}
			}
		}

		++pointsRead;
	}

	if (reader)
		delete reader;
	reader=0;
	ifs.close();

	return CC_FERR_NO_ERROR;
}
コード例 #2
0
ファイル: PTXFilter.cpp プロジェクト: coolshahabaz/trunk
CC_FILE_ERROR PTXFilter::loadFile(	QString filename,
									ccHObject& container,
									LoadParameters& parameters)
{
	//open ASCII file for reading
	QFile file(filename);
	if (!file.open(QIODevice::ReadOnly | QIODevice::Text))
	{
		return CC_FERR_READING;
	}

	QTextStream inFile(&file);

	CCVector3d PshiftTrans(0,0,0);
	CCVector3d PshiftCloud(0,0,0);

	CC_FILE_ERROR result = CC_FERR_NO_LOAD;
	ScalarType minIntensity = 0;
	ScalarType maxIntensity = 0;

	//progress dialog
	ccProgressDialog pdlg(true, parameters.parentWidget);
	pdlg.setMethodTitle(QObject::tr("Loading PTX file"));
	pdlg.setAutoClose(false);

	//progress dialog (for normals computation)
	ccProgressDialog normalsProgressDlg(true, parameters.parentWidget);
	normalsProgressDlg.setAutoClose(false);

	for (unsigned cloudIndex = 0; result == CC_FERR_NO_ERROR || result == CC_FERR_NO_LOAD; cloudIndex++)
	{
		unsigned width = 0, height = 0;
		ccGLMatrixd sensorTransD, cloudTransD;

		//read header
		{
			QString line = inFile.readLine();
			if (line.isNull() && container.getChildrenNumber() != 0) //end of file?
				break;

			//read the width (number of columns) and the height (number of rows) on the two first lines
			//(DGM: we transpose the matrix right away)
			bool ok;
			height = line.toUInt(&ok);
			if (!ok)
				return CC_FERR_MALFORMED_FILE;
			line = inFile.readLine();
			width = line.toUInt(&ok);
			if (!ok)
				return CC_FERR_MALFORMED_FILE;

			ccLog::Print(QString("[PTX] Scan #%1 - grid size: %2 x %3").arg(cloudIndex+1).arg(height).arg(width));

			//read sensor transformation matrix
			for (int i=0; i<4; ++i)
			{
				line = inFile.readLine();
				QStringList tokens = line.split(" ",QString::SkipEmptyParts);
				if (tokens.size() != 3)
					return CC_FERR_MALFORMED_FILE;

				double* colDest = 0;
				if (i == 0)
				{
					//Translation
					colDest = sensorTransD.getTranslation();
				}
				else
				{
					//X, Y and Z axis
					colDest = sensorTransD.getColumn(i-1);
				}

				for (int j=0; j<3; ++j)
				{
					assert(colDest);
					colDest[j] = tokens[j].toDouble(&ok);
					if (!ok)
						return CC_FERR_MALFORMED_FILE;
				}
			}
			//make the transform a little bit cleaner (necessary as it's read from ASCII!)
			CleanMatrix(sensorTransD);

			//read cloud transformation matrix
			for (int i=0; i<4; ++i)
			{
				line = inFile.readLine();
				QStringList tokens = line.split(" ",QString::SkipEmptyParts);
				if (tokens.size() != 4)
					return CC_FERR_MALFORMED_FILE;

				double* col = cloudTransD.getColumn(i);
				for (int j=0; j<4; ++j)
				{
					col[j] = tokens[j].toDouble(&ok);
					if (!ok)
						return CC_FERR_MALFORMED_FILE;
				}
			}
			//make the transform a little bit cleaner (necessary as it's read from ASCII!)
			CleanMatrix(cloudTransD);

			//handle Global Shift directly on the first cloud's translation!
			if (cloudIndex == 0)
			{
				if (HandleGlobalShift(cloudTransD.getTranslationAsVec3D(),PshiftTrans,parameters))
				{
					ccLog::Warning("[PTXFilter::loadFile] Cloud has be recentered! Translation: (%.2f,%.2f,%.2f)",PshiftTrans.x,PshiftTrans.y,PshiftTrans.z);
				}
			}

			//'remove' global shift from the sensor and cloud transformation matrices
			cloudTransD.setTranslation(cloudTransD.getTranslationAsVec3D() + PshiftTrans);
			sensorTransD.setTranslation(sensorTransD.getTranslationAsVec3D() + PshiftTrans);
		}

		//now we can read the grid cells
		ccPointCloud* cloud = new ccPointCloud();
		if (container.getChildrenNumber() == 0)
		{
			cloud->setName("unnamed - Cloud");
		}
		else
		{
			if (container.getChildrenNumber() == 1)
				container.getChild(0)->setName("unnamed - Cloud 1"); //update previous cloud name!

			cloud->setName(QString("unnamed - Cloud %1").arg(container.getChildrenNumber()+1));
		}

		unsigned gridSize = width * height;
		if (!cloud->reserve(gridSize))
		{
			result = CC_FERR_NOT_ENOUGH_MEMORY;
			delete cloud;
			cloud = 0;
			break;
		}

		//set global shift
		cloud->setGlobalShift(PshiftTrans);

		//intensities
		ccScalarField* intensitySF = new ccScalarField(CC_PTX_INTENSITY_FIELD_NAME);
		if (!intensitySF->reserve(static_cast<unsigned>(gridSize)))
		{
			ccLog::Warning("[PTX] Not enough memory to load intensities!");
			intensitySF->release();
			intensitySF = 0;
		}

		//grid structure
		ccPointCloud::Grid::Shared grid(new ccPointCloud::Grid);
		grid->w = width;
		grid->h = height;
		bool hasIndexGrid = true;
		try
		{
			grid->indexes.resize(gridSize,-1); //-1 means no cell/point
		}
		catch (const std::bad_alloc&)
		{
			ccLog::Warning("[PTX] Not enough memory to load the grid structure");
			hasIndexGrid = false;
		}

		//read points
		{
			CCLib::NormalizedProgress nprogress(&pdlg, gridSize);
			pdlg.setInfo(qPrintable(QString("Number of cells: %1").arg(gridSize)));
			pdlg.start();

			bool firstPoint = true;
			bool hasColors = false;
			bool loadColors = false;
			bool loadGridColors = false;
			size_t gridIndex = 0;

			for (unsigned j=0; j<height; ++j)
			{
				for (unsigned i=0; i<width; ++i, ++gridIndex)
				{
					QString line = inFile.readLine();
					QStringList tokens = line.split(" ",QString::SkipEmptyParts);

					if (firstPoint)
					{
						hasColors = (tokens.size() == 7);
						if (hasColors)
						{
							loadColors = cloud->reserveTheRGBTable();
							if (!loadColors)
							{
								ccLog::Warning("[PTX] Not enough memory to load RGB colors!");
							}
							else if (hasIndexGrid)
							{
								//we also load the colors into the grid (as invalid/missing points can have colors!)
								try
								{
									grid->colors.resize(gridSize, ccColor::Rgb(0, 0, 0));
									loadGridColors = true;
								}
								catch (const std::bad_alloc&)
								{
									ccLog::Warning("[PTX] Not enough memory to load the grid colors");
								}
							}
						}
					}
					if ((hasColors && tokens.size() != 7) || (!hasColors && tokens.size() != 4))
					{
						result = CC_FERR_MALFORMED_FILE;
						//early stop
						j = height;
						break;
					}

					double values[4];
					for (int v=0; v<4; ++v)
					{
						bool ok;
						values[v] = tokens[v].toDouble(&ok);
						if (!ok)
						{
							result = CC_FERR_MALFORMED_FILE;
							//early stop
							j = height;
							break;
						}
					}

					//we skip "empty" cells
					bool pointIsValid = (CCVector3d::fromArray(values).norm2() != 0);
					if (pointIsValid)
					{
						const double* Pd = values;
						//first point: check for 'big' coordinates
						if (firstPoint)
						{
							if (cloudIndex == 0 && !cloud->isShifted()) //in case the trans. matrix was ok!
							{
								CCVector3d P(Pd);
								if (HandleGlobalShift(P,PshiftCloud,parameters))
								{
									cloud->setGlobalShift(PshiftCloud);
									ccLog::Warning("[PTXFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",PshiftCloud.x,PshiftCloud.y,PshiftCloud.z);
								}
							}
							firstPoint = false;
						}

						//update index grid
						if (hasIndexGrid)
						{
							grid->indexes[gridIndex] = static_cast<int>(cloud->size()); // = index (default value = -1, means no point)
						}

						//add point
						cloud->addPoint(CCVector3(	static_cast<PointCoordinateType>(Pd[0] + PshiftCloud.x),
													static_cast<PointCoordinateType>(Pd[1] + PshiftCloud.y),
													static_cast<PointCoordinateType>(Pd[2] + PshiftCloud.z)) );

						//add intensity
						if (intensitySF)
						{
							intensitySF->addElement(static_cast<ScalarType>(values[3]));
						}
					}

					//color
					if (loadColors && (pointIsValid || loadGridColors))
					{
						ccColor::Rgb color;
						for (int c=0; c<3; ++c)
						{
							bool ok;
							unsigned temp = tokens[4+c].toUInt(&ok);
							ok &= (temp <= 255);
							if (ok)
							{
								color.rgb[c] = static_cast<unsigned char>(temp);
							}
							else
							{
								result = CC_FERR_MALFORMED_FILE;
								//early stop
								j = height;
								break;
							}
						}

						if (pointIsValid)
						{
							cloud->addRGBColor(color.rgb);
						}
						if (loadGridColors)
						{
							assert(!grid->colors.empty());
							grid->colors[gridIndex] = color;
						}
					}

					if (!nprogress.oneStep())
					{
						result = CC_FERR_CANCELED_BY_USER;
						break;
					}
				}
			}
		}

		//is there at least one valid point in this grid?
		if (cloud->size() == 0)
		{
			delete cloud;
			cloud = 0;
			if (intensitySF)
				intensitySF->release();

			ccLog::Warning(QString("[PTX] Scan #%1 is empty?!").arg(cloudIndex+1));
		}
		else
		{
			if (result == CC_FERR_NO_LOAD)
				result = CC_FERR_NO_ERROR; //to make clear that we have loaded at least something!
			
			cloud->resize(cloud->size());
			if (intensitySF)
			{
				assert(intensitySF->currentSize() == cloud->size());
				intensitySF->resize(cloud->size());
				intensitySF->computeMinAndMax();
				int intensitySFIndex = cloud->addScalarField(intensitySF);

				//keep track of the min and max intensity
				if (container.getChildrenNumber() == 0)
				{
					minIntensity = intensitySF->getMin();
					maxIntensity = intensitySF->getMax();
				}
				else
				{
					minIntensity = std::min(minIntensity,intensitySF->getMin());
					maxIntensity = std::max(maxIntensity,intensitySF->getMax());
				}

				cloud->showSF(true);
				cloud->setCurrentDisplayedScalarField(intensitySFIndex);
			}

			ccGBLSensor* sensor = 0;
			if (hasIndexGrid && result != CC_FERR_CANCELED_BY_USER)
			{
				//determine best sensor parameters (mainly yaw and pitch steps)
				ccGLMatrix cloudToSensorTrans((sensorTransD.inverse() * cloudTransD).data());
				sensor = ccGriddedTools::ComputeBestSensor(cloud, grid, &cloudToSensorTrans);
			}

			//we apply the transformation
			ccGLMatrix cloudTrans(cloudTransD.data());
			cloud->applyGLTransformation_recursive(&cloudTrans);
			
			if (sensor)
			{
				ccGLMatrix sensorTrans(sensorTransD.data());
				sensor->setRigidTransformation(sensorTrans); //after cloud->applyGLTransformation_recursive!
				cloud->addChild(sensor);
			}

			//scan grid
			if (hasIndexGrid)
			{
				grid->validCount = static_cast<unsigned>(cloud->size());
				grid->minValidIndex = 0;
				grid->maxValidIndex = grid->validCount-1;
				grid->sensorPosition = sensorTransD;
				cloud->addGrid(grid);

				//by default we don't compute normals without asking the user
				if (parameters.autoComputeNormals)
				{
					cloud->computeNormalsWithGrids(LS, 2, true, &normalsProgressDlg);
				}
			}

			cloud->setVisible(true);
			cloud->showColors(cloud->hasColors());
			cloud->showNormals(cloud->hasNormals());

			container.addChild(cloud);

#ifdef QT_DEBUG
			//break;
#endif
		}
	}

	//update scalar fields saturation (globally!)
	{
		bool validIntensityRange = true;
		if (minIntensity < 0 || maxIntensity > 1.0)
		{
			ccLog::Warning("[PTX] Intensity values are invalid (they should all fall in [0 ; 1])");
			validIntensityRange = false;
		}

		for (unsigned i=0; i<container.getChildrenNumber(); ++i)
		{
			ccHObject* obj = container.getChild(i);
			assert(obj && obj->isA(CC_TYPES::POINT_CLOUD));
			CCLib::ScalarField* sf = static_cast<ccPointCloud*>(obj)->getScalarField(0);
			if (sf)
			{
				ccScalarField* ccSF = static_cast<ccScalarField*>(sf);
				ccSF->setColorScale(ccColorScalesManager::GetDefaultScale(validIntensityRange ? ccColorScalesManager::ABS_NORM_GREY : ccColorScalesManager::GREY));
				ccSF->setSaturationStart(0/*minIntensity*/);
				ccSF->setSaturationStop(maxIntensity);
			}
		}
	}

	return result;
}
コード例 #3
0
ファイル: MAFilter.cpp プロジェクト: fredericoal/trunk
CC_FILE_ERROR MAFilter::saveToFile(ccHObject* entity, QString filename)
{
	if (!entity || filename.isEmpty())
		return CC_FERR_BAD_ARGUMENT;

	//the mesh to save
	ccGenericMesh* theMesh = ccHObjectCaster::ToGenericMesh(entity);
	if (!theMesh)
	{
		ccLog::Error("[MA] This filter can only save one mesh at a time!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}
	//and its vertices
	ccGenericPointCloud* theCloud = theMesh->getAssociatedCloud();

	unsigned numberOfTriangles = theMesh->size();
	unsigned numberOfVertexes = theCloud->size();

	if (numberOfTriangles == 0 || numberOfVertexes == 0)
	{
		ccLog::Error("Mesh is empty!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}

	bool hasColors = false;
	if (theCloud->isA(CC_TYPES::POINT_CLOUD))
		static_cast<ccPointCloud*>(theCloud)->hasColors();

	//and its scalar field
	/*CCLib::ScalarField* sf = 0;
	if (theCloud->isA(CC_TYPES::POINT_CLOUD))
	sf = static_cast<ccPointCloud*>(theCloud)->getCurrentDisplayedScalarField();

	if (!sf)
		ccLog::Warning("No displayed scalar field! Values will all be 0!\n");

	//*/

	//open ASCII file for writing
	FILE* fp = fopen(qPrintable(filename) , "wt");

	if (!fp)
		return CC_FERR_WRITING;

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	unsigned palierModifier = (hasColors ? 1 : 0);
	CCLib::NormalizedProgress nprogress(&pdlg,unsigned(float((2+palierModifier)*numberOfTriangles+(3+palierModifier)*numberOfVertexes)));
	pdlg.setMethodTitle("Save MA file");
	char buffer[256];
	sprintf(buffer,"Triangles = %u",numberOfTriangles);
	pdlg.setInfo(buffer);
	pdlg.start();

	//we extract the (short) filename from the whole path
	QString baseFilename = QFileInfo(filename).fileName();

	//header
	if (fprintf(fp,"//Maya ASCII 7.0 scene\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"//Name: %s\n",qPrintable(baseFilename)) < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"//Last modified: Sat, Mai 10, 2008 00:00:00 PM\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"requires maya \"4.0\";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"currentUnit -l %s -a degree -t film;\n","centimeter") < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	//for multiple meshes handling (does not work yet)
	unsigned char currentMesh = 0;

	//transformation node
	if (fprintf(fp,"createNode transform -n \"Mesh%i\";\n",currentMesh+1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	//main node
	if (fprintf(fp,"createNode mesh -n \"MeshShape%i\" -p \"Mesh%i\";\n",currentMesh+1,currentMesh+1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	if (fprintf(fp,"\tsetAttr -k off \".v\";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	if (fprintf(fp,"\tsetAttr \".uvst[0].uvsn\" -type \"string\" \"map1\";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"\tsetAttr \".cuvs\" -type \"string\" \"map1\";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (hasColors)
	{
		if (fprintf(fp,"\tsetAttr \".dcol\" yes;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
	}
	if (fprintf(fp,"\tsetAttr \".dcc\" -type \"string\" \"Ambient+Diffuse\";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"\tsetAttr \".ccls\" -type \"string\" \"colorSet%i\";\n",currentMesh+1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"\tsetAttr \".clst[0].clsn\" -type \"string\" \"colorSet%i\";\n",currentMesh+1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (hasColors)
	{
		if (fprintf(fp,"\tsetAttr \".ndt\" 0;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".tgsp\" 1;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		//insert a secondary nodes
		if (fprintf(fp,"createNode mesh -n \"polySurfaceShape%i\" -p \"Mesh%i\";\n",currentMesh+1,currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		if (fprintf(fp,"\tsetAttr -k off \".v\";\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".io\" yes;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".uvst[0].uvsn\" -type \"string\" \"map1\";\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".cuvs\" -type \"string\" \"map1\";\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".dcol\" yes;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".dcc\" -type \"string\" \"Ambient+Diffuse\";\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".ccls\" -type \"string\" \"colorSet%i\";\n",currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".clst[0].clsn\" -type \"string\" \"colorSet%i\";\n",currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}
	}

	//save vertexes
	if (fprintf(fp,"\tsetAttr -s %u \".vt[0:%u]\"\n",numberOfVertexes,numberOfVertexes-1) < 0)
	{
		fclose(fp);
		return CC_FERR_WRITING;
	}
	{
		for (unsigned i=0; i<numberOfVertexes; ++i)
		{
			const CCVector3* P = theCloud->getPoint(i);
			CCVector3d Pglobal = theCloud->toGlobal3d<PointCoordinateType>(*P);
			if (fprintf(fp,(i+1==numberOfVertexes ? "\t\t%f %f %f;\n" : "\t\t%f %f %f\n"),
							Pglobal.x,
							Pglobal.y,
							Pglobal.z) < 0)
			{
				fclose(fp);
				return CC_FERR_WRITING;
			}

			nprogress.oneStep();
		}
	}

	//save "edges"
	edge** theEdges = new edge*[numberOfVertexes];
	memset(theEdges,0,sizeof(edge*)*numberOfVertexes);
	unsigned ind[3],a,b;
	int currentEdgeIndex=0,lastEdgeIndexPushed=-1;

	int hard=0; //Maya edges cab be "hard" or "soft" ...
	{
		theMesh->placeIteratorAtBegining();
		for (unsigned i=0;i<numberOfTriangles;++i)
		{
			const CCLib::TriangleSummitsIndexes* tsi = theMesh->getNextTriangleIndexes(); //DGM: getNextTriangleIndexes is faster for mesh groups!

			ind[0] = tsi->i1;
			ind[1] = tsi->i2;
			ind[2] = tsi->i3;

			uchar k,l;
			for (k=0; k<3; ++k)
			{
				l=(k<2 ? k+1 : 0);
				a = (ind[k]<ind[l] ? ind[k] : ind[l]);
				b = (a==ind[k] ? ind[l] : ind[k]);

				currentEdgeIndex = -1;
				edge* e = theEdges[a];
				while (e)
				{
					if (e->theOtherPoint == b)
					{
						currentEdgeIndex = e->edgeIndex;
						break;
					}
					e = e->nextEdge;
				}

				if (currentEdgeIndex < 0) //create a new edge
				{
					edge* newEdge = new edge;
					newEdge->nextEdge = NULL;
					newEdge->theOtherPoint = b;
					newEdge->positif = (a==ind[k]);
					//newEdge->edgeIndex = ++lastEdgeIndexPushed; //don't write the edge right now
					newEdge->edgeIndex = 0;
					++lastEdgeIndexPushed;
					//currentEdgeIndex = lastEdgeIndexPushed;

					//don't forget the node!
					if (theEdges[a])
					{
						e = theEdges[a];
						while (e->nextEdge) e=e->nextEdge;
						e->nextEdge = newEdge;
					}
					else theEdges[a]=newEdge;

					/*if (fprintf(fp,"\n \t\t%i %i %i",a,b,hard) < 0)
						return CC_FERR_WRITING;*/
				}
			}

			nprogress.oneStep();
		}
	}

	//now write the edges
	{
		unsigned numberOfEdges = unsigned(lastEdgeIndexPushed+1);
		if (fprintf(fp,"\tsetAttr -s %u \".ed[0:%u]\"",numberOfEdges,numberOfEdges-1) < 0)
		{
			fclose(fp);
			return CC_FERR_WRITING;
		}

		lastEdgeIndexPushed = 0;
		for (unsigned i=0; i<numberOfVertexes; ++i)
		{
			edge* e = theEdges[i];
			while (e)
			{
				e->edgeIndex = lastEdgeIndexPushed++;
				if (fprintf(fp,"\n \t\t%u %u %i",i,e->theOtherPoint,hard) < 0)
				{
					fclose(fp);
					return CC_FERR_WRITING;
				}
				e = e->nextEdge;
			}

			nprogress.oneStep();
		}
	}

	if (fprintf(fp,";\n") < 0)
	{
		fclose(fp);
		return CC_FERR_WRITING;
	}

	//write faces
	if (fprintf(fp,"\tsetAttr -s %u \".fc[0:%u]\" -type \"polyFaces\"\n",numberOfTriangles,numberOfTriangles-1) < 0)
	{
		fclose(fp);
		return CC_FERR_WRITING;
	}

	theMesh->placeIteratorAtBegining();
	{
		for (unsigned i=0; i<numberOfTriangles; ++i)
		{
			if (fprintf(fp,"\t\tf 3") < 0)
			{
				fclose(fp);
				return CC_FERR_WRITING;
			}

			CCLib::TriangleSummitsIndexes* tsi = theMesh->getNextTriangleIndexes(); //DGM: getNextTriangleIndexes is faster for mesh groups!
			ind[0] = tsi->i1;
			ind[1] = tsi->i2;
			ind[2] = tsi->i3;

			for (uchar k=0; k<3; ++k)
			{
				uchar l = (k<2 ? k+1 : 0);
				a = (ind[k]<ind[l] ? ind[k] : ind[l]);
				b = (a==ind[k] ? ind[l] : ind[k]);

				edge* e = theEdges[a];
				while (e->theOtherPoint != b)
					e = e->nextEdge;

				if (fprintf(fp," %i",((e->positif && a==ind[k]) || (!e->positif && a==ind[l]) ? e->edgeIndex : -(e->edgeIndex+1))) < 0)
				{
					fclose(fp);
					return CC_FERR_WRITING;
				}
			}

			if (fprintf(fp,(i+1==numberOfTriangles ? ";\n" : "\n")) < 0)
			{
				fclose(fp);
				return CC_FERR_WRITING;
			}

			nprogress.oneStep();
		}
	}

	//free memory
	{
		for (unsigned i=0; i<numberOfVertexes; ++i)
		{
			if (theEdges[i])
			{
				edge* e = theEdges[i]->nextEdge;
				while (e)
				{
					edge* nextE = e->nextEdge;
					delete e;
					e = nextE;
				}
				delete theEdges[i];
			}

			nprogress.oneStep();
		}
	}

	//bonus track
	if (	fprintf(fp,"\tsetAttr \".cd\" -type \"dataPolyComponent\" Index_Data Edge 0 ;\n") < 0
		||	fprintf(fp,"\tsetAttr \".ndt\" 0;\n") < 0
		||	fprintf(fp,"\tsetAttr \".tgsp\" 1;\n") < 0 )
	{
		fclose(fp);
		return CC_FERR_WRITING;
	}

	//vertex colors
	if (hasColors)
	{
		assert(theCloud->isA(CC_TYPES::POINT_CLOUD));
		ccPointCloud* pc = static_cast<ccPointCloud*>(theCloud);

		if (fprintf(fp,"createNode polyColorPerVertex -n \"polyColorPerVertex%i\";\n",currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		if (fprintf(fp,"\tsetAttr \".uopa\" yes;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		if (fprintf(fp,"\tsetAttr -s %u \".vclr\";\n",numberOfVertexes) < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		//association of each vertex with the faces it belongs to
		faceIndexes** theFacesIndexes = new faceIndexes*[numberOfVertexes];
		memset(theFacesIndexes,0,sizeof(faceIndexes*)*numberOfVertexes);
		theMesh->placeIteratorAtBegining();
		{
			for (unsigned i=0; i<numberOfTriangles; ++i)
			{
				CCLib::TriangleSummitsIndexes* tsi = theMesh->getNextTriangleIndexes(); //DGM: getNextTriangleIndexes is faster for mesh groups!
				ind[0] = tsi->i1;
				ind[1] = tsi->i2;
				ind[2] = tsi->i3;

				for (uchar j=0; j<3; ++j)
				{
					if (!theFacesIndexes[ind[j]])
					{
						faceIndexes* f = new faceIndexes;
						f->faceIndex = i;
						f->nextFace = NULL;
						theFacesIndexes[ind[j]] = f;
					}
					else
					{
						faceIndexes* f = theFacesIndexes[ind[j]];
						while (f->nextFace)
							f = f->nextFace;
						f->nextFace = new faceIndexes;
						f->nextFace->faceIndex = i;
						f->nextFace->nextFace = NULL;
					}
				}

				nprogress.oneStep();
			}
		}

		//for each vertex
		{
			float col[3];
			float coef = 1.0f/static_cast<float>(MAX_COLOR_COMP);
			for (unsigned i=0; i<numberOfVertexes; ++i)
			{
				const colorType* c = pc->getPointColor(i);
				col[0] = static_cast<float>(c[0]) * coef;
				col[1] = static_cast<float>(c[1]) * coef;
				col[2] = static_cast<float>(c[2]) * coef;

				//on compte le nombre de faces
				int nf = 0;
				faceIndexes* f = theFacesIndexes[i];
				while (f)
				{
					++nf;
					f = f->nextFace;
				}

				if (nf > 0)
				{
					if (fprintf(fp,"\tsetAttr -s %i \".vclr[%u].vfcl\";\n",nf,i) < 0)
						{fclose(fp);return CC_FERR_WRITING;}

					faceIndexes *oldf, *f = theFacesIndexes[i];
					while (f)
					{
						if (fprintf(fp,"\tsetAttr \".vclr[%u].vfcl[%i].frgb\" -type \"float3\" %f %f %f;\n",i,f->faceIndex,col[0],col[1],col[2]) < 0)
						{
							fclose(fp);
							return CC_FERR_WRITING;
						}

						oldf = f;
						f = f->nextFace;
						delete oldf;
					}
					theFacesIndexes[i] = NULL;
				}

				nprogress.oneStep();
			}
		}
		delete[] theFacesIndexes;

		if (fprintf(fp,"\tsetAttr \".cn\" -type \"string\" \"colorSet%i\";\n",currentMesh+1) < 0)
		{
			fclose(fp);
			return CC_FERR_WRITING;
		}
	}

	//Maya connections
	if (hasColors)
	{
		if (	fprintf(fp,"connectAttr \"polyColorPerVertex%i.out\" \"MeshShape%i.i\";\n",currentMesh+1,currentMesh+1) < 0
			||	fprintf(fp,"connectAttr \"polySurfaceShape%i.o\" \"polyColorPerVertex%i.ip\";\n",currentMesh+1,currentMesh+1) < 0 )
		{
			fclose(fp);
			return CC_FERR_WRITING;
		}
	}
	
	if (fprintf(fp,"connectAttr \"MeshShape%i.iog\" \":initialShadingGroup.dsm\" -na;\n",currentMesh+1) < 0)
	{
		fclose(fp);
		return CC_FERR_WRITING;
	}

	//end of file
	if (fprintf(fp,"//End of %s\n",qPrintable(baseFilename)) < 0)
	{
		fclose(fp);
		return CC_FERR_WRITING;
	}

	fclose(fp);

	return CC_FERR_NO_ERROR;
}
コード例 #4
0
ファイル: SoiFilter.cpp プロジェクト: jebd/trunk
CC_FILE_ERROR SoiFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/)
{
    //open the file
	FILE *fp = fopen(filename, "rt");
    if (!fp)
        return CC_FERR_READING;

	std::string line;
	line.resize(MAX_ASCII_FILE_LINE_LENGTH);
	unsigned nbScansTotal = 0;
	unsigned nbPointsTotal = 0;

	//we read the first line
	char* eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp);
    char* pEnd;

    //header
	while ((strcmp((char*)line.substr(0,4).c_str(),"#CC#") != 0)&&(eof != NULL))
	{
		if (strcmp(line.substr(0,4).c_str(),"#NP#")==0)
		{
			std::string numPoints (line,4,line.size()-4);
			nbPointsTotal=strtol(numPoints.c_str(),&pEnd,0);
			//ccConsole::Print("[SoiFilter::loadFile] Total number of points: %i\n",nbPointsTotal);
		}
		else if (strcmp(line.substr(0,4).c_str(),"#NS#")==0)
		{
			std::string numScans (line,4,line.size()-4);
			nbScansTotal=strtol(numScans.c_str(),&pEnd,0);
			//ccConsole::Print("[SoiFilter::loadFile] Total number of scans: %i\n",nbScansTotal);
		}

		eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp);
	}

	if ((nbScansTotal == 0)||(nbPointsTotal == 0))
	{
		ccConsole::Warning("[SoiFilter::loadFile] No points or scans defined in this file!");
		fclose(fp);
		return CC_FERR_NO_LOAD;
	}


	//Progress dialog
	ccProgressDialog pdlg(false); //cancel is not supported
	pdlg.setMethodTitle("Open SOI file");
	char buffer[256];
	sprintf(buffer,"%i scans / %i points\n",nbScansTotal,nbPointsTotal);
	CCLib::NormalizedProgress nprogress(&pdlg,nbPointsTotal);
	pdlg.setInfo(buffer);
	pdlg.start();

    //Scan by scan
	for (unsigned k=0;k<nbScansTotal;k++)
	{
		char* eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp);

        //we only look for points (we ignore the rest)
		while ((strcmp(line.substr(0,4).c_str(),"#pt#")!=0)&&(eof != NULL))
			eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp);

		unsigned nbOfPoints = 0;

		if (strcmp(line.substr(0,4).c_str(),"#pt#")==0)
		{
			std::string numPoints(line,4,line.size()-4);
			nbOfPoints=strtol(numPoints.c_str(),&pEnd,0);
			//ccConsole::Print("[SoiFilter::loadFile] Scan %i - points: %i\n",k+1,nbOfPoints);
		}
		else
		{
			ccConsole::Warning("[SoiFilter::loadFile] Can't find marker '#pt#'!\n");
			fclose(fp);
			return CC_FERR_WRONG_FILE_TYPE;
		}

		//Creation de la liste de points
		char name[64];
		sprintf(name,"unnamed - Scan #%i",k);
		ccPointCloud* loadedCloud = new ccPointCloud(name);

		if (!loadedCloud)
		{
            fclose(fp);
            return CC_FERR_NOT_ENOUGH_MEMORY;
		}

		if (nbOfPoints>0)
		{
            loadedCloud->reserveThePointsTable(nbOfPoints);
			loadedCloud->reserveTheRGBTable();
			loadedCloud->showColors(true);
		}
		else
		{
			ccConsole::Warning("[SoiFilter::loadFile] Scan #%i is empty!\n",k);
			continue;
		}

		CCVector3 P;
		int c = 0;

		//we can read points now
		for (unsigned i=0;i<nbOfPoints;i++)
		{
			fscanf(fp,"%f %f %f %i",&P.x,&P.y,&P.z,&c);

			loadedCloud->addPoint(P);
			loadedCloud->addGreyColor(colorType(c<<3)); //<<2 ? <<3 ? we lack some info. here ...

			nprogress.oneStep();
		}

		container.addChild(loadedCloud);
	}

	fclose(fp);

    return CC_FERR_NO_ERROR;
}
コード例 #5
0
ファイル: PVFilter.cpp プロジェクト: eimix/trunk
CC_FILE_ERROR PVFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/)
{
	//ccConsole::Print("[PVFilter::loadFile] Opening binary file '%s'...\n",filename);

	//opening file
	FILE *fp = fopen(filename, "rb");
    if (!fp)
        return CC_FERR_NO_ERROR;

	CCLib::CCMiscTools::fseek64(fp,0,2);		                //we reach the end of the file
	__int64 fileSize = CCLib::CCMiscTools::ftell64(fp);		//file size query
	CCLib::CCMiscTools::fseek64(fp,0,0);						//back to the begining

    //we read the points number
	unsigned nbOfPoints = (unsigned) (fileSize  / (__int64)(4*sizeof(float)));

	//ccConsole::Print("[PVFilter::loadFile] Points: %i\n",nbOfPoints);

    //if the file is too big, it will be chuncked in multiple parts
	unsigned fileChunkPos = 0;
	unsigned fileChunkSize = ccMin(nbOfPoints,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);

	//new cloud
    char name[64],k=0;
	sprintf(name,"unnamed - Cloud #%i",k);
	ccPointCloud* loadedCloud = new ccPointCloud(name);
    loadedCloud->reserveThePointsTable(fileChunkSize);
	loadedCloud->enableScalarField();

    float rBuff[3];

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,nbOfPoints);
	pdlg.setMethodTitle("Open PV file");
	char buffer[256];
	sprintf(buffer,"Points: %u",nbOfPoints);
	pdlg.setInfo(buffer);
	pdlg.start();

    //number of points read from the begining of the current cloud part
    //WARNING: different from lineCounter
	unsigned pointsRead=0;

	for (unsigned i=0;i<nbOfPoints;i++)
	{
        //if we reach the max. cloud size limit, we cerate a new chunk
		if (pointsRead == fileChunkPos+fileChunkSize)
		{
			loadedCloud->getCurrentInScalarField()->computeMinAndMax();
			int sfIdx = loadedCloud->getCurrentInScalarFieldIndex();
			loadedCloud->setCurrentDisplayedScalarField(sfIdx);
			loadedCloud->showSF(sfIdx>=0);
            container.addChild(loadedCloud);
			fileChunkPos = pointsRead;
			fileChunkSize = ccMin(nbOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);
            sprintf(name,"unnamed - Cloud #%i",++k);
			loadedCloud = new ccPointCloud(name);
            loadedCloud->reserveThePointsTable(fileChunkSize);
			loadedCloud->enableScalarField();
		}

        //we read the 3 coordinates of the point
		if (fread(rBuff,4,3,fp)>=0)
		{
		    //conversion to CCVector3
		    CCVector3 P = CCVector3(PointCoordinateType(rBuff[0]),
                                    PointCoordinateType(rBuff[1]),
                                    PointCoordinateType(rBuff[2]));
			loadedCloud->addPoint(P);
		}
		else
		{
            fclose(fp);
			return CC_FERR_NO_ERROR;
		}

        //then the scalar value
		if (fread(rBuff,4,1,fp)>=0)
		{
		    //conversion to DistanceType
		    DistanceType d = DistanceType(rBuff[0]);
		    loadedCloud->setPointScalarValue(pointsRead,d);
		}
		else
		{
            fclose(fp);
			return CC_FERR_NO_ERROR;
		}

		++pointsRead;

		if (!nprogress.oneStep())
		{
			loadedCloud->resize(i+1-fileChunkPos);
			break;
		}
    }

    loadedCloud->getCurrentInScalarField()->computeMinAndMax();
	int sfIdx = loadedCloud->getCurrentInScalarFieldIndex();
	loadedCloud->setCurrentDisplayedScalarField(sfIdx);
	loadedCloud->showSF(sfIdx>=0);

    container.addChild(loadedCloud);

    fclose(fp);

	return CC_FERR_NO_ERROR;
}
コード例 #6
0
ファイル: AsciiFilter.cpp プロジェクト: johnjsb/trunk
CC_FILE_ERROR AsciiFilter::loadCloudFromFormatedAsciiFile(	const QString& filename,
															ccHObject& container,
															const AsciiOpenDlg::Sequence& openSequence,
															char separator,
															unsigned approximateNumberOfLines,
															qint64 fileSize,
															unsigned maxCloudSize,
															unsigned skipLines,
															LoadParameters& parameters)
{
	//we may have to "slice" clouds when opening them if they are too big!
	maxCloudSize = std::min(maxCloudSize,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);
	unsigned cloudChunkSize = std::min(maxCloudSize,approximateNumberOfLines);
	unsigned cloudChunkPos = 0;
	unsigned chunkRank = 1;

	//we initialize the loading accelerator structure and point cloud
	int maxPartIndex = -1;
	cloudAttributesDescriptor cloudDesc = prepareCloud(openSequence, cloudChunkSize, maxPartIndex, separator, chunkRank);

	if (!cloudDesc.cloud)
		return CC_FERR_NOT_ENOUGH_MEMORY;

	//we re-open the file (ASCII mode)
	QFile file(filename);
	if (!file.open(QFile::ReadOnly))
	{
		//we clear already initialized data
		clearStructure(cloudDesc);
		return CC_FERR_READING;
	}
	QTextStream stream(&file);

	//we skip lines as defined on input
	{
		for (unsigned i=0; i<skipLines; ++i)
		{
			stream.readLine();
		}
	}

	//progress indicator
	ccProgressDialog pdlg(true);
	CCLib::NormalizedProgress nprogress(&pdlg,approximateNumberOfLines);
	pdlg.setMethodTitle(qPrintable(QString("Open ASCII file [%1]").arg(filename)));
	pdlg.setInfo(qPrintable(QString("Approximate number of points: %1").arg(approximateNumberOfLines)));
	pdlg.start();

	//buffers
	ScalarType D = 0;
	CCVector3d P(0,0,0);
	CCVector3d Pshift(0,0,0);
	CCVector3 N(0,0,0);
	ccColor::Rgb col;

	//other useful variables
	unsigned linesRead = 0;
	unsigned pointsRead = 0;

	CC_FILE_ERROR result = CC_FERR_NO_ERROR;

	//main process
	unsigned nextLimit = /*cloudChunkPos+*/cloudChunkSize;
	QString currentLine = stream.readLine();
	while (!currentLine.isNull())
	{
		++linesRead;

		//comment
		if (currentLine.startsWith("//"))
		{
			currentLine = stream.readLine();
			continue;
		}

		if (currentLine.size() == 0)
		{
			ccLog::Warning("[AsciiFilter::Load] Line %i is corrupted (empty)!",linesRead);
			currentLine = stream.readLine();
			continue;
		}

		//if we have reached the max. number of points per cloud
		if (pointsRead == nextLimit)
		{
			ccLog::PrintDebug("[ASCII] Point %i -> end of chunk (%i points)",pointsRead,cloudChunkSize);

			//we re-evaluate the average line size
			{
				double averageLineSize = static_cast<double>(file.pos())/(pointsRead+skipLines);
				double newNbOfLinesApproximation = std::max(1.0, static_cast<double>(fileSize)/averageLineSize - static_cast<double>(skipLines));

				//if approximation is smaller than actual one, we add 2% by default
				if (newNbOfLinesApproximation <= pointsRead)
				{
					newNbOfLinesApproximation = std::max(static_cast<double>(cloudChunkPos+cloudChunkSize)+1.0,static_cast<double>(pointsRead) * 1.02);
				}
				approximateNumberOfLines = static_cast<unsigned>(ceil(newNbOfLinesApproximation));
				ccLog::PrintDebug("[ASCII] New approximate nb of lines: %i",approximateNumberOfLines);
			}

			//we try to resize actual clouds
			if (cloudChunkSize < maxCloudSize || approximateNumberOfLines-cloudChunkPos <= maxCloudSize)
			{
				ccLog::PrintDebug("[ASCII] We choose to enlarge existing clouds");

				cloudChunkSize = std::min(maxCloudSize,approximateNumberOfLines-cloudChunkPos);
				if (!cloudDesc.cloud->reserve(cloudChunkSize))
				{
					ccLog::Error("Not enough memory! Process stopped ...");
					result = CC_FERR_NOT_ENOUGH_MEMORY;
					break;
				}
			}
			else //otherwise we have to create new clouds
			{
				ccLog::PrintDebug("[ASCII] We choose to instantiate new clouds");

				//we store (and resize) actual cloud
				if (!cloudDesc.cloud->resize(cloudChunkSize))
					ccLog::Warning("Memory reallocation failed ... some memory may have been wasted ...");
				if (!cloudDesc.scalarFields.empty())
				{
					for (unsigned k=0; k<cloudDesc.scalarFields.size(); ++k)
						cloudDesc.scalarFields[k]->computeMinAndMax();
					cloudDesc.cloud->setCurrentDisplayedScalarField(0);
					cloudDesc.cloud->showSF(true);
				}
				//we add this cloud to the output container
				container.addChild(cloudDesc.cloud);
				cloudDesc.reset();

				//and create new one
				cloudChunkPos = pointsRead;
				cloudChunkSize = std::min(maxCloudSize,approximateNumberOfLines-cloudChunkPos);
				cloudDesc = prepareCloud(openSequence, cloudChunkSize, maxPartIndex, separator, ++chunkRank);
				if (!cloudDesc.cloud)
				{
					ccLog::Error("Not enough memory! Process stopped ...");
					break;
				}
				cloudDesc.cloud->setGlobalShift(Pshift);
			}

			//we update the progress info
			nprogress.scale(approximateNumberOfLines,100,true);
			pdlg.setInfo(qPrintable(QString("Approximate number of points: %1").arg(approximateNumberOfLines)));

			nextLimit = cloudChunkPos+cloudChunkSize;
		}

		//we split current line
		QStringList parts = currentLine.split(separator,QString::SkipEmptyParts);

		int nParts = parts.size();
		if (nParts > maxPartIndex)
		{
			//(X,Y,Z)
			if (cloudDesc.xCoordIndex >= 0)
				P.x = parts[cloudDesc.xCoordIndex].toDouble();
			if (cloudDesc.yCoordIndex >= 0)
				P.y = parts[cloudDesc.yCoordIndex].toDouble();
			if (cloudDesc.zCoordIndex >= 0)
				P.z = parts[cloudDesc.zCoordIndex].toDouble();

			//first point: check for 'big' coordinates
			if (pointsRead == 0)
			{
				if (HandleGlobalShift(P,Pshift,parameters))
				{
					cloudDesc.cloud->setGlobalShift(Pshift);
					ccLog::Warning("[ASCIIFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift.x,Pshift.y,Pshift.z);
				}
			}

			//add point
			cloudDesc.cloud->addPoint(CCVector3::fromArray((P+Pshift).u));

			//Normal vector
			if (cloudDesc.hasNorms)
			{
				if (cloudDesc.xNormIndex >= 0)
					N.x = static_cast<PointCoordinateType>(parts[cloudDesc.xNormIndex].toDouble());
				if (cloudDesc.yNormIndex >= 0)
					N.y = static_cast<PointCoordinateType>(parts[cloudDesc.yNormIndex].toDouble());
				if (cloudDesc.zNormIndex >= 0)
					N.z = static_cast<PointCoordinateType>(parts[cloudDesc.zNormIndex].toDouble());
				cloudDesc.cloud->addNorm(N);
			}

			//Colors
			if (cloudDesc.hasRGBColors)
			{
				if (cloudDesc.iRgbaIndex >= 0)
				{
					const uint32_t rgb = parts[cloudDesc.iRgbaIndex].toInt();
					col.r = ((rgb >> 16) & 0x0000ff);
					col.g = ((rgb >> 8 ) & 0x0000ff);
					col.b = ((rgb      ) & 0x0000ff);

				}
				else if (cloudDesc.fRgbaIndex >= 0)
				{
					const float rgbf = parts[cloudDesc.fRgbaIndex].toFloat();
					const uint32_t rgb = (uint32_t)(*((uint32_t*)&rgbf));
					col.r = ((rgb >> 16) & 0x0000ff);
					col.g = ((rgb >> 8 ) & 0x0000ff);
					col.b = ((rgb      ) & 0x0000ff);
				}
				else
				{
					if (cloudDesc.redIndex >= 0)
					{
						float multiplier = cloudDesc.hasFloatRGBColors[0] ? static_cast<float>(ccColor::MAX) : 1.0f;
						col.r = static_cast<ColorCompType>(parts[cloudDesc.redIndex].toFloat() * multiplier);
					}
					if (cloudDesc.greenIndex >= 0)
					{
						float multiplier = cloudDesc.hasFloatRGBColors[1] ? static_cast<float>(ccColor::MAX) : 1.0f;
						col.g = static_cast<ColorCompType>(parts[cloudDesc.greenIndex].toFloat() * multiplier);
					}
					if (cloudDesc.blueIndex >= 0)
					{
						float multiplier = cloudDesc.hasFloatRGBColors[2] ? static_cast<float>(ccColor::MAX) : 1.0f;
						col.b = static_cast<ColorCompType>(parts[cloudDesc.blueIndex].toFloat() * multiplier);
					}
				}
				cloudDesc.cloud->addRGBColor(col.rgb);
			}
			else if (cloudDesc.greyIndex >= 0)
コード例 #7
0
ファイル: LASFilter.cpp プロジェクト: vivzqs/trunk
CC_FILE_ERROR LASFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/)
{
	//opening file
	std::ifstream ifs;
	ifs.open(filename, std::ios::in | std::ios::binary);

	if (ifs.fail())
		return CC_FERR_READING;

	liblas::Reader* reader = 0;
	unsigned nbOfPoints = 0;
	std::vector<std::string> dimensions;

	try
	{
		reader = new liblas::Reader(liblas::ReaderFactory().CreateWithStream(ifs));	//using factory for automatic and transparent
		//handling of compressed/uncompressed files
		liblas::Header const& header = reader->GetHeader();

#ifdef _DEBUG
		//ccConsole::Print("[LAS FILE] %s - signature: %s",filename,header.GetFileSignature().c_str());
#endif

		//get fields present in file
		dimensions = header.GetSchema().GetDimensionNames();

		//and of course the number of points
		nbOfPoints = header.GetPointRecordsCount();
	}
	catch (...)
	{
		delete reader;
		ifs.close();
		return CC_FERR_READING;
	}

	if (nbOfPoints==0)
	{
		//strange file ;)
		delete reader;
		ifs.close();
		return CC_FERR_NO_LOAD;
	}

	liblas::Color rgbColorMask; //(0,0,0) on construction
	bool hasClassif = false;
	bool hasIntensity = false;
	bool hasTime = false;
	bool hasReturnNumber = false;
	for (unsigned k=0;k<dimensions.size();++k)
	{
		QString dim = QString(dimensions[k].c_str()).toUpper();
		bool handled = true;
		if (dim == "RED")
			rgbColorMask.SetRed(~0);
		else if (dim == "BLUE")
			rgbColorMask.SetBlue(~0);
		else if (dim == "GREEN")
			rgbColorMask.SetGreen(~0);
		else if (dim == "CLASSIFICATION")
			hasClassif = true;
		else if (dim == "TIME")
			hasTime = true;
		else if (dim == "INTENSITY")
			hasIntensity = true;
		else if (dim == "RETURN NUMBER")
			hasReturnNumber = true;
		else if (dim != "X" && dim != "Y" && dim != "Z")
			handled = false;

		ccConsole::Print(QString("[LAS FILE] Found dimension '%1' (%2)").arg(dimensions[k].c_str()).arg(handled ? "handled" : "not handled"));
	}
	bool hasColor = (rgbColorMask[0] || rgbColorMask[1] || rgbColorMask[2]);

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,nbOfPoints);
	pdlg.setMethodTitle("Open LAS file");
	pdlg.setInfo(qPrintable(QString("Points: %1").arg(nbOfPoints)));
	pdlg.start();

	//number of points read from the begining of the current cloud part
	unsigned pointsRead = 0;
	double Pshift[3] = {0.0,0.0,0.0};

	//by default we read color as 8 bits integers and we will change this to 16 bits if it's not (16 bits is the standard!)
	unsigned char colorCompBitDec = 0;
	colorType rgb[3] = {0,0,0};

	ccPointCloud* loadedCloud = 0;

	ccScalarField* classifSF = 0;
	uint8_t firstClassifValue = 0;

	ccScalarField* timeSF = 0;
	double firstTime = 0.0;

	ccScalarField* intensitySF = 0;
	uint16_t firstIntensity = 0;

	ccScalarField* returnNumberSF = 0;
	uint16_t firstReturnNumber = 0;

	//if the file is too big, we will chunck it in multiple parts
	unsigned int fileChunkPos = 0;
	unsigned int fileChunkSize = 0;

	while (true)
	{
		//if we reach the end of the file, or the max. cloud size limit (in which case we cerate a new chunk)
		bool newPointAvailable = (nprogress.oneStep() && reader->ReadNextPoint());

		if (!newPointAvailable || pointsRead == fileChunkPos+fileChunkSize)
		{
			if (loadedCloud)
			{
				if (loadedCloud->size())
				{
					bool thisChunkHasColors = loadedCloud->hasColors();
					loadedCloud->showColors(thisChunkHasColors);
					if (hasColor && !thisChunkHasColors)
						ccLog::Warning("[LAS FILE] Color field was all black! We ignored it...");

					if (hasClassif)
					{
						if (classifSF)
						{
							classifSF->computeMinAndMax();
							int cMin = (int)classifSF->getMin();
							int cMax = (int)classifSF->getMax();
							classifSF->setColorRampSteps(cMax-cMin);
							//classifSF->setMinSaturation(cMin);
							int sfIndex = loadedCloud->addScalarField(classifSF);
							if (!loadedCloud->hasDisplayedScalarField())
							{
								loadedCloud->setCurrentDisplayedScalarField(sfIndex);
								loadedCloud->showSF(!thisChunkHasColors);
							}
						}
						else
						{
							ccLog::Warning(QString("[LAS FILE] All classification values were the same (%1)! We ignored them...").arg(firstClassifValue));
						}
					}

					if (hasIntensity)
					{
						if (intensitySF)
						{
							intensitySF->computeMinAndMax();
							intensitySF->setColorScale(ccColorScalesManager::GetDefaultScale(ccColorScalesManager::GREY));
							int sfIndex = loadedCloud->addScalarField(intensitySF);
							if (!loadedCloud->hasDisplayedScalarField())
							{
								loadedCloud->setCurrentDisplayedScalarField(sfIndex);
								loadedCloud->showSF(!thisChunkHasColors);
							}
						}
						else
						{
							ccLog::Warning(QString("[LAS FILE] All intensities were the same (%1)! We ignored them...").arg(firstIntensity));
						}
					}

					if (hasTime)
					{
						if (timeSF)
						{
							timeSF->computeMinAndMax();
							int sfIndex = loadedCloud->addScalarField(timeSF);
							if (!loadedCloud->hasDisplayedScalarField())
							{
								loadedCloud->setCurrentDisplayedScalarField(sfIndex);
								loadedCloud->showSF(!thisChunkHasColors);
							}
						}
						else
						{
							ccLog::Warning(QString("[LAS FILE] All timestamps were the same (%1)! We ignored them...").arg(firstTime));
						}
					}

					if (hasReturnNumber)
					{
						if (returnNumberSF)
						{
							returnNumberSF->computeMinAndMax();
							int rMin = (int)returnNumberSF->getMin();
							int rMax = (int)returnNumberSF->getMax();
							returnNumberSF->setColorRampSteps(rMax-rMin);
							int sfIndex = loadedCloud->addScalarField(returnNumberSF);
							if (!loadedCloud->hasDisplayedScalarField())
							{
								loadedCloud->setCurrentDisplayedScalarField(sfIndex);
								loadedCloud->showSF(!thisChunkHasColors);
							}
						}
						else
						{
							ccLog::Warning(QString("[LAS FILE] All return numbers were the same (%1)! We ignored them...").arg(firstReturnNumber));
						}
					}

					//if we have reserved too much memory
					if (loadedCloud->size() < loadedCloud->capacity())
						loadedCloud->resize(loadedCloud->size());

					QString chunkName("unnamed - Cloud");
					unsigned n = container.getChildrenNumber();
					if (n!=0) //if we have more than one cloud, we append an index
					{
						if (n==1)  //we must also update the first one!
							container.getChild(0)->setName(chunkName+QString(" #1"));
						chunkName += QString(" #%1").arg(n+1);
					}
					loadedCloud->setName(chunkName);

					container.addChild(loadedCloud);
					loadedCloud=0;
				}
				else
				{
					//empty cloud?!
					delete loadedCloud;
					loadedCloud=0;
				}

				if (classifSF)
					classifSF->release();
				classifSF=0;
				if (intensitySF)
					intensitySF->release();
				intensitySF=0;
				if (returnNumberSF)
					returnNumberSF->release();
				returnNumberSF=0;
				if (timeSF)
					timeSF->release();
				timeSF=0;
			}

			if (!newPointAvailable)
				break; //end of the file (or cancel requested)

			//otherwise, we must create a new cloud
			fileChunkPos = pointsRead;
			fileChunkSize = std::min(nbOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);
			loadedCloud = new ccPointCloud();
			if (!loadedCloud->reserveThePointsTable(fileChunkSize))
			{
				ccLog::Warning("[LASFilter::loadFile] Not enough memory!");
				delete loadedCloud;
				delete reader;
				ifs.close();
				return CC_FERR_NOT_ENOUGH_MEMORY;
			}
			loadedCloud->setOriginalShift(Pshift[0],Pshift[1],Pshift[2]);

			//DGM: from now on, we only enable scalar fields when we detect a valid value!
			if (hasClassif)
			{
				assert(!classifSF);
				firstClassifValue = 0;
			}

			if (hasTime)
			{
				assert(!timeSF);
				firstTime = 0.0;
			}

			if (hasIntensity)
			{
				assert(!intensitySF);
				firstIntensity=0;
			}

			if (hasReturnNumber)
			{
				assert(!returnNumberSF);
				firstReturnNumber = 0;
			}
		}

		assert(newPointAvailable);
		const liblas::Point& p = reader->GetPoint();

		//first point: check for 'big' coordinates
		if (pointsRead==0)
		{
			double P[3]={p.GetX(),p.GetY(),p.GetZ()};
			bool shiftAlreadyEnabled = (coordinatesShiftEnabled && *coordinatesShiftEnabled && coordinatesShift);
			if (shiftAlreadyEnabled)
				memcpy(Pshift,coordinatesShift,sizeof(double)*3);
			bool applyAll=false;
			if (ccCoordinatesShiftManager::Handle(P,0,alwaysDisplayLoadDialog,shiftAlreadyEnabled,Pshift,0,applyAll))
			{
				loadedCloud->setOriginalShift(Pshift[0],Pshift[1],Pshift[2]);
				ccConsole::Warning("[LASFilter::loadFile] Cloud has been recentered! Translation: (%.2f,%.2f,%.2f)",Pshift[0],Pshift[1],Pshift[2]);

				//we save coordinates shift information
				if (applyAll && coordinatesShiftEnabled && coordinatesShift)
				{
					*coordinatesShiftEnabled = true;
					coordinatesShift[0] = Pshift[0];
					coordinatesShift[1] = Pshift[1];
					coordinatesShift[2] = Pshift[2];
				}
			}
		}

		CCVector3 P(p.GetX()+Pshift[0],p.GetY()+Pshift[1],p.GetZ()+Pshift[2]);
		loadedCloud->addPoint(P);

		//color field
		if (hasColor)
		{
			//Warning: LAS colors are stored on 16 bits!
			liblas::Color col = p.GetColor();
			col[0] &= rgbColorMask[0];
			col[1] &= rgbColorMask[1];
			col[2] &= rgbColorMask[2];

			//if we don't have reserved a color field yet, we check first that color is not black
			bool pushColor = true;
			if (!loadedCloud->hasColors())
			{
				//if the color is not black, we are sure it's a valid color field!
				if (col[0] || col[1] || col[2])
				{
					if (loadedCloud->reserveTheRGBTable())
					{
						//we must set the color (black) of all the precedently skipped points
						for (unsigned i=0;i<loadedCloud->size()-1;++i)
							loadedCloud->addRGBColor(ccColor::black);
					}
					else
					{
						ccConsole::Warning("[LAS FILE] Not enough memory: color field will be ignored!");
						hasColor = false; //no need to retry with the other chunks anyway
						pushColor = false;
					}
				}
				else //otherwise we ignore it for the moment (we'll add it later if necessary)
				{
					pushColor = false;
				}
			}

			//do we need to push this color?
			if (pushColor)
			{
				//we test if the color components are on 16 bits (standard) or only on 8 bits (it happens ;)
				if (colorCompBitDec==0)
				{
					if (	(col[0] & 0xFF00)
						||  (col[1] & 0xFF00)
						||  (col[2] & 0xFF00))
					{
						//the color components are on 16 bits!
						ccLog::Print("[LAS FILE] Color components are coded on 16 bits");
						colorCompBitDec = 8;
						//we fix all the precedently read colors
						for (unsigned i=0;i<loadedCloud->size()-1;++i)
							loadedCloud->setPointColor(i,ccColor::black); //255 >> 8 = 0!
					}
				}

				rgb[0]=(colorType)(col[0]>>colorCompBitDec);
				rgb[1]=(colorType)(col[1]>>colorCompBitDec);
				rgb[2]=(colorType)(col[2]>>colorCompBitDec);

				loadedCloud->addRGBColor(rgb);
			}
		}

		if (hasClassif)
		{
			uint8_t intValue = p.GetClassification().GetClass();
			if (classifSF)
			{
				classifSF->addElement(intValue);
			}
			else
			{
				//first point? we track its value
				if (loadedCloud->size()==1)
				{
					firstClassifValue = intValue;
				}
				
				if (intValue != firstClassifValue || firstClassifValue > 1) //0 = Created, never classified, 1 = Unclassified
				{
					classifSF = new ccScalarField(CC_LAS_CLASSIFICATION_FIELD_NAME);
					if (classifSF->reserve(fileChunkSize))
					{
						classifSF->link();
						//we must set the classification value (firstClassifValue) of all the precedently skipped points
						for (unsigned i=0;i<loadedCloud->size()-1;++i)
							classifSF->addElement(firstClassifValue);
						classifSF->addElement(intValue);
					}
					else
					{
						ccConsole::Warning("[LAS FILE] Not enough memory: classificaiton field will be ignored!");
						hasClassif = false; //no need to retry with the other chunks anyway
						classifSF->release();
						classifSF=0;
					}
				}
			}
		}

		if (hasTime)
		{
			double timeValue = p.GetTime();

			if (timeSF)
			{
				timeSF->addElement(timeValue);
			}
			else
			{
				//first point? we track its value
				if (loadedCloud->size()==1)
				{
					firstTime = timeValue;
				}
				else if (timeValue != firstTime)
				{
					timeSF = new ccScalarField(CC_SCAN_TIME_FIELD_NAME);
					if (timeSF->reserve(fileChunkSize))
					{
						timeSF->link();
						//we must set the timestamp value (firstTime) of all the precedently skipped points
						for (unsigned i=0;i<loadedCloud->size()-1;++i)
							timeSF->addElement(firstTime);
						timeSF->addElement(timeValue);
					}
					else
					{
						ccConsole::Warning("[LAS FILE] Not enough memory: 'time' field will be ignored!");
						hasTime = false; //no need to retry with the other chunks anyway
						timeSF->release();
						timeSF=0;
					}
				}
			}
		}

		if (hasIntensity)
		{
			uint16_t intValue = p.GetIntensity();
			if (intensitySF)
			{
				intensitySF->addElement(intValue);
			}
			else
			{
				//first point? we track its value
				if (loadedCloud->size()==1)
				{
					firstIntensity = intValue;
				}
				
				if (intValue != firstIntensity || (firstIntensity != 0 && firstIntensity != 65535))
				{
					intensitySF = new ccScalarField(CC_SCAN_INTENSITY_FIELD_NAME);
					if (intensitySF->reserve(fileChunkSize))
					{
						intensitySF->link();
						//we must set the intensity (firstIntensity) of all the precedently skipped points
						for (unsigned i=0;i<loadedCloud->size()-1;++i)
							intensitySF->addElement(firstIntensity);
						intensitySF->addElement(intValue);
					}
					else
					{
						ccConsole::Warning("[LAS FILE] Not enough memory: intensity field will be ignored!");
						hasIntensity = false; //no need to retry with the other chunks anyway
						intensitySF->release();
						intensitySF=0;
					}
				}
			}
		}

		if (hasReturnNumber)
		{
			uint16_t intValue = p.GetReturnNumber();
			if (returnNumberSF)
			{
				returnNumberSF->addElement(intValue);
			}
			else
			{
				//first point? we track its value
				if (loadedCloud->size()==1)
				{
					firstReturnNumber = intValue;
				}
				
				if (intValue != firstReturnNumber)
				{
					returnNumberSF = new ccScalarField(CC_SCAN_RETURN_INDEX_FIELD_NAME);
					if (returnNumberSF->reserve(fileChunkSize))
					{
						returnNumberSF->link();
						//we must set the return index (firstReturnNumber) of all the precedently skipped points
						for (unsigned i=0;i<loadedCloud->size()-1;++i)
							returnNumberSF->addElement(firstReturnNumber);
						returnNumberSF->addElement(intValue);
					}
					else
					{
						ccConsole::Warning("[LAS FILE] Not enough memory: return number field will be ignored!");
						hasReturnNumber = false; //no need to retry with the other chunks anyway
						returnNumberSF->release();
						returnNumberSF=0;
					}
				}
			}

		}

		++pointsRead;
	}

	if (reader)
		delete reader;
	reader=0;
	ifs.close();

	return CC_FERR_NO_ERROR;
}
コード例 #8
0
ファイル: PNFilter.cpp プロジェクト: kod3r/trunk
CC_FILE_ERROR PNFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/)
{
	//opening file
	QFile in(filename);
	if (!in.open(QIODevice::ReadOnly))
		return CC_FERR_READING;

    //we deduce the points number from the file size
	qint64 fileSize = in.size();
	qint64 singlePointSize = 6*sizeof(float);
	//check that size is ok
	if (fileSize == 0)
		return CC_FERR_NO_LOAD;
	if ((fileSize % singlePointSize) != 0)
		return CC_FERR_MALFORMED_FILE;
	unsigned numberOfPoints = (unsigned) (fileSize  / singlePointSize);

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints);
	pdlg.setMethodTitle("Open PN file");
	pdlg.setInfo(qPrintable(QString("Points: %1").arg(numberOfPoints)));
	pdlg.start();

	ccPointCloud* loadedCloud = 0;
    //if the file is too big, it will be chuncked in multiple parts
	unsigned chunkIndex = 0;
	unsigned fileChunkPos = 0;
	unsigned fileChunkSize = 0;
    //number of points read for the current cloud part
	unsigned pointsRead = 0;
	CC_FILE_ERROR result = CC_FERR_NO_ERROR;

	for (unsigned i=0;i<numberOfPoints;i++)
	{
        //if we reach the max. cloud size limit, we cerate a new chunk
		if (pointsRead == fileChunkPos+fileChunkSize)
		{
			if (loadedCloud)
				container.addChild(loadedCloud);
			fileChunkPos = pointsRead;
			fileChunkSize = std::min<unsigned>(numberOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);
			loadedCloud = new ccPointCloud(QString("unnamed - Cloud #%1").arg(++chunkIndex));
            if (!loadedCloud || !loadedCloud->reserveThePointsTable(fileChunkSize) || !loadedCloud->reserveTheNormsTable())
			{
				result = CC_FERR_NOT_ENOUGH_MEMORY;
				if (loadedCloud)
					delete loadedCloud;
				loadedCloud=0;
				break;
			}
			loadedCloud->showNormals(true);
		}

        //we read the 3 coordinates of the point
		float rBuff[3];
		if (in.read((char*)rBuff,3*sizeof(float))>=0)
		{
		    //conversion to CCVector3
		    CCVector3 P((PointCoordinateType)rBuff[0],
						(PointCoordinateType)rBuff[1],
						(PointCoordinateType)rBuff[2]);
			loadedCloud->addPoint(P);
		}
		else
		{
			result = CC_FERR_READING;
			break;
		}

        //then the 3 components of the normal vector
		if (in.read((char*)rBuff,3*sizeof(float))>=0)
		{
		    //conversion to PointCoordinateType[3]
			PointCoordinateType N[3] = {(PointCoordinateType)rBuff[0],
										(PointCoordinateType)rBuff[1],
										(PointCoordinateType)rBuff[2]};
			loadedCloud->addNorm(N);
		}
		else
		{
			//add fake normal for consistency then break
			loadedCloud->addNorm(s_defaultNorm);
			result = CC_FERR_READING;
			break;
		}

		++pointsRead;

		if (!nprogress.oneStep())
		{
			result = CC_FERR_CANCELED_BY_USER;
			break;
		}
    }

	in.close();

	if (loadedCloud)
	{
		if (loadedCloud->size() < loadedCloud->capacity())
			loadedCloud->resize(loadedCloud->size());
		container.addChild(loadedCloud);
	}

	return result;
}
コード例 #9
0
ファイル: PNFilter.cpp プロジェクト: kod3r/trunk
CC_FILE_ERROR PNFilter::saveToFile(ccHObject* entity, const char* filename)
{
	if (!entity || !filename)
        return CC_FERR_BAD_ARGUMENT;

	ccHObject::Container clouds;
	if (entity->isKindOf(CC_POINT_CLOUD))
        clouds.push_back(entity);
    else
        entity->filterChildren(clouds, true, CC_POINT_CLOUD);

    if (clouds.empty())
    {
        ccLog::Error("No point cloud in input selection!");
        return CC_FERR_BAD_ENTITY_TYPE;
    }
    else if (clouds.size()>1)
    {
        ccLog::Error("Can't save more than one cloud per PN file!");
        return CC_FERR_BAD_ENTITY_TYPE;
    }

    //the cloud to save
    ccGenericPointCloud* theCloud = ccHObjectCaster::ToGenericPointCloud(clouds[0]);
	unsigned numberOfPoints = theCloud->size();

	if (numberOfPoints==0)
	{
        ccLog::Error("Cloud is empty!");
        return CC_FERR_BAD_ENTITY_TYPE;
	}

    //open binary file for writing
	QFile out(filename);
	if (!out.open(QIODevice::WriteOnly))
		return CC_FERR_WRITING;

    //Has the cloud been recentered?
	const double* shift = theCloud->getOriginalShift();
	if (fabs(shift[0])+fabs(shift[0])+fabs(shift[0])>0.0)
        ccLog::Warning(QString("[PNFilter::save] Can't recenter cloud '%1' when saving it in a PN file!").arg(theCloud->getName()));

	bool hasNorms = theCloud->hasNormals();
	if (!hasNorms)
        ccLog::Warning(QString("[PNFilter::save] Cloud '%1' has no normal (we will save points with a default normal)!").arg(theCloud->getName()));
	float norm[3] = {(float)s_defaultNorm[0], (float)s_defaultNorm[1], (float)s_defaultNorm[2]};

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints);
	pdlg.setMethodTitle("Save PN file");
	pdlg.setInfo(qPrintable(QString("Points: %1").arg(numberOfPoints)));
	pdlg.start();

	CC_FILE_ERROR result = CC_FERR_NO_ERROR;

	for (unsigned i=0;i<numberOfPoints;i++)
	{
		//write point
		{
			const CCVector3* P = theCloud->getPoint(i);
			
			//conversion to float
			float wBuff[3] = {(float)P->x, (float)P->y, (float)P->z};
			if (out.write((const char*)wBuff,3*sizeof(float))<0)
			{
				result = CC_FERR_WRITING;
				break;
			}
		}
			
		//write normal
		if (hasNorms)
		{
            const PointCoordinateType* N = theCloud->getPointNormal(i);
			//conversion to float
            norm[0] = (float)N[0];
            norm[1] = (float)N[1];
            norm[2] = (float)N[2];
		}
		if (out.write((const char*)norm,3*sizeof(float))<0)
		{
			result = CC_FERR_WRITING;
			break;
		}

		if (!nprogress.oneStep())
		{
			result = CC_FERR_CANCELED_BY_USER;
			break;
		}
	}

	out.close();

	return result;
}
コード例 #10
0
ファイル: ccGriddedTools.cpp プロジェクト: cnyinfei/trunk
bool ccGriddedTools::ComputeNormals(ccPointCloud* cloud,
									const std::vector<int>& indexGrid,
									int width,
									int height,
									bool* canceledByUser/*=0*/,
									int kernelWidth/*=3*/ )
{
	//init
	bool result = true;
	if (canceledByUser)
		*canceledByUser = false;

	//try to compute normals
	if (cloud->reserveTheNormsTable())
	{
		//progress dialog
		ccProgressDialog pdlg(true);
		CCLib::NormalizedProgress nprogress(&pdlg,cloud->size());
		pdlg.setMethodTitle("Compute normals");
		pdlg.setInfo(qPrintable(QString("Number of points: %1").arg(cloud->size())));
		pdlg.start();

		const int* _indexGrid = &(indexGrid[0]);
		CCLib::ReferenceCloud knn(cloud);
		
		//neighborhood 'half-width' (total width = 1 + 2*kernelWidth) 
		//max number of neighbours: (1+2*nw)^2
		knn.reserve((1+2*kernelWidth)*(1+2*kernelWidth));

		//for each grid cell
		for (int j=0; j<height; ++j)
		{
			for (int i=0; i<width; ++i, ++_indexGrid)
			{
				if (*_indexGrid >= 0)
				{
					unsigned pointIndex = static_cast<unsigned>(*_indexGrid);
					//add the point itself
					knn.clear(false);
					knn.addPointIndex(pointIndex); //warning: indexes are shifted (0 = no point)
					const CCVector3* P = cloud->getPoint(pointIndex);

					//look for neighbors
					for (int v=std::max(0,j-kernelWidth); v<std::min<int>(height,j+kernelWidth); ++v)
					{
						if (v != j)
						{
							for (int u=std::max(0,i-kernelWidth); u<std::min<int>(width,i+kernelWidth); ++u)
							{
								if (u != i)
								{
									int indexN = indexGrid[v*width + u];
									if (indexN >= 0)
									{
										//we don't consider points with a too much different depth than the central point
										const CCVector3* Pn = cloud->getPoint(static_cast<unsigned>(indexN));
										if (fabs(Pn->z - P->z) <= std::max(fabs(Pn->x - P->x),fabs(Pn->y - P->y)))
											knn.addPointIndex(static_cast<unsigned>(indexN)); //warning: indexes are shifted (0 = no point)
									}
								}
							}
						}
					}

					CCVector3 N(0,0,1);
					if (knn.size() >= 3)
					{
						CCLib::Neighbourhood Z(&knn);

						//compute normal with quadratic func. (if we have enough points)
						if (false/*knn.size() >= 6*/)
						{
							uchar hfDims[3];
							const PointCoordinateType* h = Z.getHeightFunction(hfDims);
							if (h)
							{
								const CCVector3* gv = Z.getGravityCenter();
								assert(gv);

								const uchar& iX = hfDims[0];
								const uchar& iY = hfDims[1];
								const uchar& iZ = hfDims[2];

								PointCoordinateType lX = P->u[iX] - gv->u[iX];
								PointCoordinateType lY = P->u[iY] - gv->u[iY];

								N.u[iX] = h[1] + (2 * h[3] * lX) + (h[4] * lY);
								N.u[iY] = h[2] + (2 * h[5] * lY) + (h[4] * lX);
								N.u[iZ] = -PC_ONE;

								N.normalize();
							}
						}
						else
#define USE_LSQ_PLANE
#ifdef USE_LSQ_PLANE
						{
							//compute normal with best fit plane
							const CCVector3* _N = Z.getLSQPlaneNormal();
							if (_N)
								N = *_N;
						}
#else
						{
							//compute normals with 2D1/2 triangulation
							CCLib::GenericIndexedMesh* theMesh = Z.triangulateOnPlane();
							if (theMesh)
							{
								unsigned faceCount = theMesh->size();
								N.z = 0;

								//for all triangles
								theMesh->placeIteratorAtBegining();
								for (unsigned j=0; j<faceCount; ++j)
								{
									const CCLib::TriangleSummitsIndexes* tsi = theMesh->getNextTriangleIndexes();
									//we look if the central point is one of the triangle's vertices
									if (tsi->i1 == 0 || tsi->i2 == 0|| tsi->i3 == 0)
									{
										const CCVector3 *A = knn.getPoint(tsi->i1);
										const CCVector3 *B = knn.getPoint(tsi->i2);
										const CCVector3 *C = knn.getPoint(tsi->i3);

										CCVector3 no = (*B - *A).cross(*C - *A);
										//no.normalize();
										N += no;
									}
								}

								delete theMesh;
								theMesh = 0;

								//normalize the 'mean' vector
								N.normalize();
							}
						}
#endif
					}

					//check normal vector sign
					CCVector3 viewVector = *P /*- cloudTrans.getTranslationAsVec3D()*/; //clouds are still in their local coordinate system!
					if (viewVector.dot(N) > 0)
						N *= -PC_ONE;
					cloud->addNorm(N);

					//progress
					if (!nprogress.oneStep())
					{
						result = false;
						if (canceledByUser)
							*canceledByUser = true;
						ccLog::Warning("[ComputeNormals] Process canceled by user!");
						//early stop
						j = height;
						break;
					}
				}
			}
		}

		if (!result)
		{
			cloud->unallocateNorms();
		}
	}
	else
	{
		ccLog::Warning("[ComputeNormals] Not enough memory!");
	}

	return result;
}
コード例 #11
0
ファイル: ObjFilter.cpp プロジェクト: ORNis/CloudCompare
CC_FILE_ERROR ObjFilter::saveToFile(ccHObject* entity, QString filename, SaveParameters& parameters)
{
	if (!entity)
		return CC_FERR_BAD_ARGUMENT;

	if (!entity->isKindOf(CC_TYPES::MESH))
	{
		ccLog::Warning("[OBJ] This filter can only save one mesh (optionally with sub-meshes) at a time!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}

	//mesh
	ccGenericMesh* mesh = ccHObjectCaster::ToGenericMesh(entity);
	if (!mesh || mesh->size() == 0)
	{
		ccLog::Warning("[OBJ] Input mesh is empty!");
		return CC_FERR_NO_SAVE;
	}

	//vertices
	ccGenericPointCloud* vertices = mesh->getAssociatedCloud();
	if (!vertices || vertices->size() == 0)
	{
		ccLog::Warning("[OBJ] Input mesh has no vertices?!");
		return CC_FERR_NO_SAVE;
	}
	unsigned nbPoints = vertices->size();

	//try to open file for saving
	QFile file(filename);
	if (!file.open(QFile::Text | QFile::WriteOnly))
		return CC_FERR_WRITING;

	//progress
	ccProgressDialog pdlg(true);
	unsigned numberOfTriangles = mesh->size();
	CCLib::NormalizedProgress nprogress(&pdlg,numberOfTriangles);
	pdlg.setMethodTitle(qPrintable(QString("Saving mesh [%1]").arg(mesh->getName())));
	pdlg.setInfo(qPrintable(QString("Triangles: %1").arg(numberOfTriangles)));
	pdlg.start();
	
	QTextStream stream(&file);
	stream.setRealNumberPrecision(sizeof(PointCoordinateType) == 4 ? 8 : 12);

	stream << "#OBJ Generated by CloudCompare (TELECOM PARISTECH/EDF R&D)" << endl;
	if (file.error() != QFile::NoError)
		return CC_FERR_WRITING;

	for (unsigned i=0; i<nbPoints; ++i)
	{
		const CCVector3* P = vertices->getPoint(i);
		CCVector3d Pglobal = vertices->toGlobal3d<PointCoordinateType>(*P);
		stream << "v " << Pglobal.x << " " << Pglobal.y << " " << Pglobal.z << endl;
		if (file.error() != QFile::NoError)
			return CC_FERR_WRITING;
	}

	//normals
	bool withTriNormals = mesh->hasTriNormals();
	bool withVertNormals = vertices->hasNormals();
	bool withNormals = withTriNormals || withVertNormals;
	if (withNormals)
	{
		//per-triangle normals
		if (withTriNormals)
		{
			NormsIndexesTableType* normsTable = mesh->getTriNormsTable();
			if (normsTable)
			{
				for (unsigned i=0; i<normsTable->currentSize(); ++i)
				{
					const CCVector3& normalVec = ccNormalVectors::GetNormal(normsTable->getValue(i));
					stream << "vn " << normalVec.x << " " << normalVec.y << " " << normalVec.z << endl;
					if (file.error() != QFile::NoError)
						return CC_FERR_WRITING;
				}
			}
			else
			{
				assert(false);
				withTriNormals = false;
			}
		}
		//per-vertices normals
		else //if (withVertNormals)
		{
			for (unsigned i=0; i<nbPoints; ++i)
			{
				const CCVector3& normalVec = vertices->getPointNormal(i);
				stream << "vn " << normalVec.x << " " << normalVec.y << " " << normalVec.z << endl;
				if (file.error() != QFile::NoError)
					return CC_FERR_WRITING;
			}
		}
	}

	//materials
	const ccMaterialSet* materials = mesh->getMaterialSet();
	bool withMaterials = (materials && mesh->hasMaterials());
	if (withMaterials)
	{
		//save mtl file
		QStringList errors;
		QString baseName = QFileInfo(filename).baseName();
		if (materials->saveAsMTL(QFileInfo(filename).absolutePath(),baseName,errors))
		{
			stream << "mtllib " << baseName << ".mtl" << endl;
			if (file.error() != QFile::NoError)
				return CC_FERR_WRITING;
		}
		else
		{
			materials = 0;
			withMaterials = false;
		}

		//display potential 'errors'
		for (int i=0; i<errors.size(); ++i)
		{
			ccLog::Warning(QString("[OBJ][Material file writer] ")+errors[i]);
		}
	}

	//save texture coordinates
	bool withTexCoordinates = withMaterials && mesh->hasPerTriangleTexCoordIndexes();
	if (withTexCoordinates)
	{
		TextureCoordsContainer* texCoords = mesh->getTexCoordinatesTable();
		if (texCoords)
		{
			for (unsigned i=0; i<texCoords->currentSize(); ++i)
			{
				const float* tc = texCoords->getValue(i);
				stream << "vt " << tc[0] << " " << tc[1] << endl;
				if (file.error() != QFile::NoError)
					return CC_FERR_WRITING;
			}
		}
		else
		{
			withTexCoordinates = false;
		}
	}

	ccHObject::Container subMeshes;
	//look for sub-meshes
	mesh->filterChildren(subMeshes,false,CC_TYPES::SUB_MESH);
	//check that the number of facets is the same as the full mesh!
	{
		unsigned faceCount = 0;
		for (ccHObject::Container::const_iterator it = subMeshes.begin(); it != subMeshes.end(); ++it)
			faceCount += static_cast<ccSubMesh*>(*it)->size();

		//if there's no face (i.e. no sub-mesh) or less face than the total mesh, we save the full mesh!
		if (faceCount < mesh->size())
		{
			subMeshes.clear();
			subMeshes.push_back(mesh);
		}
	}

	//mesh or sub-meshes
	unsigned indexShift = 0;
	for (ccHObject::Container::const_iterator it = subMeshes.begin(); it != subMeshes.end(); ++it)
	{
		ccGenericMesh* st = static_cast<ccGenericMesh*>(*it);

		stream << "g " << (st->getName().isNull() ? "mesh" : st->getName()) << endl;
		if (file.error() != QFile::NoError)
			return CC_FERR_WRITING;

		unsigned triNum = st->size();
		st->placeIteratorAtBegining();

		int lastMtlIndex = -1;
		int t1 = -1, t2 = -1, t3 = -1;

		for (unsigned i=0; i<triNum; ++i)
		{
			if (withMaterials)
			{
				int mtlIndex = mesh->getTriangleMtlIndex(indexShift+i);
				if (mtlIndex != lastMtlIndex)
				{
					if (mtlIndex >= 0 && mtlIndex < static_cast<int>(materials->size()))
					{
						ccMaterial::CShared mat = materials->at(mtlIndex);
						stream << "usemtl " << mat->getName() << endl;
					}
					else
					{
						stream << "usemtl " << endl;
					}
					if (file.error() != QFile::NoError)
						return CC_FERR_WRITING;
					lastMtlIndex = mtlIndex;
				}

				if (withTexCoordinates)
				{
					mesh->getTriangleTexCoordinatesIndexes(indexShift+i,t1,t2,t3);
					if (t1 >= 0) ++t1;
					if (t2 >= 0) ++t2;
					if (t3 >= 0) ++t3;
				}
			}

			const CCLib::VerticesIndexes* tsi = st->getNextTriangleVertIndexes();
			//for per-triangle normals
			unsigned i1 = tsi->i1 + 1;
			unsigned i2 = tsi->i2 + 1;
			unsigned i3 = tsi->i3 + 1;

			stream << "f";
			if (withNormals)
			{
				int n1 = static_cast<int>(i1);
				int n2 = static_cast<int>(i2);
				int n3 = static_cast<int>(i3);
				if (withTriNormals)
				{
					st->getTriangleNormalIndexes(i,n1,n2,n3);
					if (n1 >= 0) ++n1;
					if (n2 >= 0) ++n2;
					if (n3 >= 0) ++n3;
				}

				if (withTexCoordinates)
				{
					stream << " " << i1 << "/" << t1 << "/" << n1;
					stream << " " << i2 << "/" << t2 << "/" << n2;
					stream << " " << i3 << "/" << t3 << "/" << n3;
				}
				else
				{
					stream << " " << i1 << "//" << n1;
					stream << " " << i2 << "//" << n2;
					stream << " " << i3 << "//" << n3;
				}
			}
			else
			{
				if (withTexCoordinates)
				{
					stream << " " << i1 << "/" << t1;
					stream << " " << i2 << "/" << t2;
					stream << " " << i3 << "/" << t3;
				}
				else
				{
					stream << " " << i1;
					stream << " " << i2;
					stream << " " << i3;
				}
			}
			stream << endl;

			if (file.error() != QFile::NoError)
				return CC_FERR_WRITING;

			if (!nprogress.oneStep()) //cancel requested
				return CC_FERR_CANCELED_BY_USER;
		}

		stream << "#" << triNum << " faces" << endl;
		if (file.error() != QFile::NoError)
			return CC_FERR_WRITING;

		indexShift += triNum;
	}

	return CC_FERR_NO_ERROR;
}
コード例 #12
0
ファイル: PNFilter.cpp プロジェクト: asmaloney/trunk
CC_FILE_ERROR PNFilter::loadFile(const QString& filename, ccHObject& container, LoadParameters& parameters)
{
	//opening file
	QFile in(filename);
	if (!in.open(QIODevice::ReadOnly))
		return CC_FERR_READING;

	//we deduce the points number from the file size
	qint64 fileSize = in.size();
	qint64 singlePointSize = 6*sizeof(float);
	//check that size is ok
	if (fileSize == 0)
		return CC_FERR_NO_LOAD;
	if ((fileSize % singlePointSize) != 0)
		return CC_FERR_MALFORMED_FILE;
	unsigned numberOfPoints = static_cast<unsigned>(fileSize  / singlePointSize);

	//progress dialog
	QScopedPointer<ccProgressDialog> pDlg(0);
	if (parameters.parentWidget)
	{
		pDlg.reset(new ccProgressDialog(true, parameters.parentWidget)); //cancel available
		pDlg->setMethodTitle(QObject::tr("Open PN file"));
		pDlg->setInfo(QObject::tr("Points: %L1").arg( numberOfPoints ));
		pDlg->start();
	}
	CCLib::NormalizedProgress nprogress(pDlg.data(), numberOfPoints);

	ccPointCloud* loadedCloud = 0;
	//if the file is too big, it will be chuncked in multiple parts
	unsigned chunkIndex = 0;
	unsigned fileChunkPos = 0;
	unsigned fileChunkSize = 0;
	//number of points read for the current cloud part
	unsigned pointsRead = 0;
	CC_FILE_ERROR result = CC_FERR_NO_ERROR;

	for (unsigned i = 0; i < numberOfPoints; i++)
	{
		//if we reach the max. cloud size limit, we cerate a new chunk
		if (pointsRead == fileChunkPos + fileChunkSize)
		{
			if (loadedCloud)
				container.addChild(loadedCloud);
			fileChunkPos = pointsRead;
			fileChunkSize = std::min<unsigned>(numberOfPoints - pointsRead, CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);
			loadedCloud = new ccPointCloud(QString("unnamed - Cloud #%1").arg(++chunkIndex));
			if (!loadedCloud || !loadedCloud->reserveThePointsTable(fileChunkSize) || !loadedCloud->reserveTheNormsTable())
			{
				result = CC_FERR_NOT_ENOUGH_MEMORY;
				if (loadedCloud)
					delete loadedCloud;
				loadedCloud = 0;
				break;
			}
			loadedCloud->showNormals(true);
		}

		//we read the 3 coordinates of the point
		float rBuff[3];
		if (in.read((char*)rBuff, 3 * sizeof(float)) >= 0)
		{
			//conversion to CCVector3
			CCVector3 P = CCVector3::fromArray(rBuff);
			loadedCloud->addPoint(P);
		}
		else
		{
			result = CC_FERR_READING;
			break;
		}

		//then the 3 components of the normal vector
		if (in.read((char*)rBuff,3*sizeof(float))>=0)
		{
			loadedCloud->addNorm(CCVector3::fromArray(rBuff));
		}
		else
		{
			//add fake normal for consistency then break
			loadedCloud->addNorm(s_defaultNorm);
			result = CC_FERR_READING;
			break;
		}

		++pointsRead;

		if (pDlg && !nprogress.oneStep())
		{
			result = CC_FERR_CANCELED_BY_USER;
			break;
		}
	}

	in.close();

	if (loadedCloud)
	{
		loadedCloud->shrinkToFit();
		container.addChild(loadedCloud);
	}

	return result;
}
コード例 #13
0
ファイル: UltFilter.cpp プロジェクト: dshean/trunk
CC_FILE_ERROR UltFilter::loadFile(const char* filename, ccHObject& container, bool alwaysDisplayLoadDialog/*=true*/, bool* coordinatesShiftEnabled/*=0*/, double* coordinatesShift/*=0*/)
{
	//ccConsole::Print("[BinFilter::loadFile] Opening binary file '%s'...\n",filename);

	assert(filename);

    //file size
    long size = QFileInfo(filename).size();

    if ( size == 0 || ((size % sizeof(MarkersFrame)) != 0))
		return CC_FERR_MALFORMED_FILE;

    //number of transformations in file
    long count = size / sizeof(MarkersFrame);
	ccConsole::Print("[TransBuffer] Found %i trans. in file '%s'",count,filename);
	if (count<1)
		return CC_FERR_NO_LOAD;

	ccPointCloud* cloud = new ccPointCloud();
	if (!cloud->reserve(count) || !cloud->enableScalarField())
	{
		delete cloud;
		return CC_FERR_NOT_ENOUGH_MEMORY;
	}

    ccProgressDialog pdlg(true);
    pdlg.setMethodTitle("Open Ult File");
	CCLib::NormalizedProgress nprogress(&pdlg,count);
	pdlg.reset();
	pdlg.setInfo(qPrintable(QString("Transformations: %1").arg(count)));
	pdlg.start();
	QApplication::processEvents();

    FILE* fp = fopen(filename,"rb");
    if (!fp)
	{
		delete cloud;
        return CC_FERR_READING;
	}

	//which marker is the reference?
	QMessageBox::StandardButton tibiaIsRef = QMessageBox::question(0, "Choose reference", "Tibia as reference (yes)? Or femur (no)? Or none (no to all)", QMessageBox::Yes | QMessageBox::No | QMessageBox::NoToAll, QMessageBox::Yes );
	MARKER_ROLE referenceRole = MARKER_LOCALIZER;
	if (tibiaIsRef == QMessageBox::Yes)
		referenceRole = MARKER_TIBIA;
	else if (tibiaIsRef == QMessageBox::No)
		referenceRole = MARKER_FEMUR;

	//To apply a predefined pointer tip
	//CCVector3 tip(0,0,0);
	CCVector3 tip(-90.07f, -17.68f, 18.29f);

	MarkersFrame currentframe;
	MarkerState& currentMarker = currentframe.states[MARKER_POINTER];
	MarkerState* referenceMarker = 0;
	if (referenceRole != MARKER_LOCALIZER)
		referenceMarker = currentframe.states+referenceRole;

	unsigned MarkersFrameSize = sizeof(MarkersFrame);
	unsigned realCount=0;
	for (long i=0;i<count;++i)
	{
		if (fread(&currentframe,MarkersFrameSize,1,fp)==0)
		{
			fclose(fp);
			delete cloud;
			return CC_FERR_READING;
		}

		if (currentMarker.visible && (!referenceMarker || referenceMarker->visible))
		{
			CCVector3 P(tip);
			ccGLMatrix trans = currentMarker.pos;
			if (referenceMarker)
				trans = referenceMarker->pos.inverse() * trans;
			trans.apply(P);

			cloud->addPoint(P);
			cloud->setPointScalarValue(realCount,currentMarker.pos.timestamp);
			++realCount;
		}

		if (!nprogress.oneStep())
			break;
	}

	fclose(fp);

	if (realCount==0)
	{
		delete cloud;
		return CC_FERR_NO_LOAD;
	}

	cloud->resize(realCount);
    //we update scalar field
	CCLib::ScalarField* sf = cloud->getCurrentInScalarField();
    if (sf)
    {
        sf->setPositive(true);
        sf->computeMinAndMax();
        cloud->setCurrentDisplayedScalarField(cloud->getCurrentInScalarFieldIndex());
    }

	container.addChild(cloud);

	return CC_FERR_NO_ERROR;
}
コード例 #14
0
ファイル: PVFilter.cpp プロジェクト: eimix/trunk
CC_FILE_ERROR PVFilter::saveToFile(ccHObject* entity, const char* filename)
{
	if (!entity || !filename)
        return CC_FERR_BAD_ARGUMENT;

	ccHObject::Container clouds;
	if (entity->isKindOf(CC_POINT_CLOUD))
        clouds.push_back(entity);
    else
        entity->filterChildren(clouds, true, CC_POINT_CLOUD);

    if (clouds.empty())
    {
        ccConsole::Error("No point cloud in input selection!");
        return CC_FERR_BAD_ENTITY_TYPE;
    }
    else if (clouds.size()>1)
    {
        ccConsole::Error("Can't save more than one cloud per PV file!");
        return CC_FERR_BAD_ENTITY_TYPE;
    }

    //the cloud to save
    ccGenericPointCloud* theCloud = static_cast<ccGenericPointCloud*>(clouds[0]);
    //and its scalar field
	CCLib::ScalarField* sf = 0;
	if (theCloud->isA(CC_POINT_CLOUD))
	    sf = static_cast<ccPointCloud*>(theCloud)->getCurrentDisplayedScalarField();

    if (!sf)
        ccConsole::Warning("No displayed scalar field! Values will all be 0!\n");

    unsigned numberOfPoints = theCloud->size();
	if (numberOfPoints==0)
	{
        ccConsole::Error("Cloud is empty!");
        return CC_FERR_BAD_ENTITY_TYPE;
	}

    //open binary file for writing
	FILE* theFile = fopen(filename , "wb");
	if (!theFile)
        return CC_FERR_WRITING;

    //Has the cloud been recentered?
	const double* shift = theCloud->getOriginalShift();
	if (fabs(shift[0])+fabs(shift[0])+fabs(shift[0])>0.0)
        ccConsole::Warning(QString("[PVFilter::save] Can't recenter cloud %1 on PV file save!").arg(theCloud->getName()));

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints);
	pdlg.setMethodTitle("Save PV file");
	char buffer[256];
	sprintf(buffer,"Points: %i",numberOfPoints);
	pdlg.setInfo(buffer);
	pdlg.start();

	float wBuff[3];
	float val=0.0;

	for (unsigned i=0;i<numberOfPoints;i++)
	{
	    //conversion to float
	    const CCVector3* P = theCloud->getPoint(i);
	    wBuff[0]=float(P->x);
	    wBuff[1]=float(P->y);
	    wBuff[2]=float(P->z);

		if (fwrite(wBuff,sizeof(float),3,theFile) < 0)
			{fclose(theFile);return CC_FERR_WRITING;}

		if (sf)
            val = (float)sf->getValue(i);

        if (fwrite(&val,sizeof(float),1,theFile) < 0)
            {fclose(theFile);return CC_FERR_WRITING;}

		if (!nprogress.oneStep())
			break;
	}

	fclose(theFile);

	return CC_FERR_NO_ERROR;
}
コード例 #15
0
ファイル: LASFilter.cpp プロジェクト: imight/trunk
CC_FILE_ERROR LASFilter::saveToFile(ccHObject* entity, const char* filename)
{
	if (!entity || !filename)
		return CC_FERR_BAD_ARGUMENT;

	ccHObject::Container clouds;
	if (entity->isKindOf(CC_POINT_CLOUD))
		clouds.push_back(entity);
	else
		entity->filterChildren(clouds, true, CC_POINT_CLOUD);

	if (clouds.empty())
	{
		ccLog::Error("No point cloud in input selection!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}
	else if (clouds.size()>1)
	{
		ccLog::Error("Can't save more than one cloud per LAS file!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}

	//the cloud to save
	ccGenericPointCloud* theCloud = ccHObjectCaster::ToGenericPointCloud(clouds[0]);
	unsigned numberOfPoints = theCloud->size();

	if (numberOfPoints==0)
	{
		ccLog::Error("Cloud is empty!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}

	//colors
	bool hasColor = theCloud->hasColors();

	//additional fields (as scalar fields)
	std::vector<LasField> fieldsToSave;

	if (theCloud->isA(CC_POINT_CLOUD))
	{
		ccPointCloud* pc = static_cast<ccPointCloud*>(theCloud);

		//Match cloud SFs with official LAS fields
		{
			//official LAS fields
			std::vector<LasField> lasFields;
			{
				lasFields.push_back(LasField(LAS_CLASSIFICATION,0,0,255)); //unsigned char: between 0 and 255
				lasFields.push_back(LasField(LAS_CLASSIF_VALUE,0,0,31)); //5 bits: between 0 and 31
				lasFields.push_back(LasField(LAS_CLASSIF_SYNTHETIC,0,0,1)); //1 bit: 0 or 1
				lasFields.push_back(LasField(LAS_CLASSIF_KEYPOINT,0,0,1)); //1 bit: 0 or 1
				lasFields.push_back(LasField(LAS_CLASSIF_WITHHELD,0,0,1)); //1 bit: 0 or 1
				lasFields.push_back(LasField(LAS_INTENSITY,0,0,65535)); //16 bits: between 0 and 65536
				lasFields.push_back(LasField(LAS_TIME,0,0,-1.0)); //8 bytes (double)
				lasFields.push_back(LasField(LAS_RETURN_NUMBER,1,1,7)); //3 bits: between 1 and 7
				lasFields.push_back(LasField(LAS_NUMBER_OF_RETURNS,1,1,7)); //3 bits: between 1 and 7
				lasFields.push_back(LasField(LAS_SCAN_DIRECTION,0,0,1)); //1 bit: 0 or 1
				lasFields.push_back(LasField(LAS_FLIGHT_LINE_EDGE,0,0,1)); //1 bit: 0 or 1
				lasFields.push_back(LasField(LAS_SCAN_ANGLE_RANK,0,-90,90)); //signed char: between -90 and +90
				lasFields.push_back(LasField(LAS_USER_DATA,0,0,255)); //unsigned char: between 0 and 255
				lasFields.push_back(LasField(LAS_POINT_SOURCE_ID,0,0,65535)); //16 bits: between 0 and 65536
			}

			//we are going to check now the existing cloud SFs
			for (unsigned i=0; i<pc->getNumberOfScalarFields(); ++i)
			{
				ccScalarField* sf = static_cast<ccScalarField*>(pc->getScalarField(i));
				//find an equivalent in official LAS fields
				QString sfName = QString(sf->getName()).toUpper();
				bool outBounds = false;
				for (size_t j=0; j<lasFields.size(); ++j)
				{
					//if the name matches
					if (sfName == QString(lasFields[j].getName()).toUpper())
					{
						//check bounds
						if (sf->getMin() < lasFields[j].minValue || (lasFields[j].maxValue != -1.0 && sf->getMax() > lasFields[j].maxValue)) //outbounds?
						{
							ccLog::Warning(QString("[LASFilter] Found a '%1' scalar field, but its values outbound LAS specifications (%2-%3)...").arg(sf->getName()).arg(lasFields[j].minValue).arg(lasFields[j].maxValue));
							outBounds = true;
						}
						else
						{
							//we add the SF to the list of saved fields
							fieldsToSave.push_back(lasFields[j]);
							fieldsToSave.back().sf = sf;
						}
						break;
					}
				}
				
				//no correspondance was found?
				if (!outBounds && (fieldsToSave.empty() || fieldsToSave.back().sf != sf))
				{
					ccLog::Warning(QString("[LASFilter] Found a '%1' scalar field, but it doesn't match with any of the official LAS fields... we will ignore it!").arg(sf->getName()));
				}
			}
		}
	}

	//open binary file for writing
	std::ofstream ofs;
	ofs.open(filename, std::ios::out | std::ios::binary);

	if (ofs.fail())
		return CC_FERR_WRITING;

	liblas::Writer* writer = 0;
	try
	{
		liblas::Header header;

		//LAZ support based on extension!
		if (QFileInfo(filename).suffix().toUpper() == "LAZ")
		{
			header.SetCompressed(true);
		}

		//header.SetDataFormatId(liblas::ePointFormat3);
		ccBBox bBox = theCloud->getBB();
		if (bBox.isValid())
		{
			CCVector3d bbMin = theCloud->toGlobal3d<PointCoordinateType>(bBox.minCorner());
			CCVector3d bbMax = theCloud->toGlobal3d<PointCoordinateType>(bBox.maxCorner());
			
			header.SetMin(	bbMin.x, bbMin.y, bbMin.z );
			header.SetMax(	bbMax.x, bbMax.y, bbMax.z );
			CCVector3 diag = bBox.getDiagVec();

			//Set offset & scale, as points will be stored as boost::int32_t values (between 0 and 4294967296)
			//int_value = (double_value-offset)/scale
			header.SetOffset(	bbMin.x, bbMin.y, bbMin.z );
			
			header.SetScale(1.0e-9 * std::max<double>(diag.x,ZERO_TOLERANCE), //result must fit in 32bits?!
							1.0e-9 * std::max<double>(diag.y,ZERO_TOLERANCE),
							1.0e-9 * std::max<double>(diag.z,ZERO_TOLERANCE));
		}
		header.SetPointRecordsCount(numberOfPoints);

		//DGM FIXME: doesn't seem to do anything;)
		//if (!hasColor) //we must remove the colors dimensions!
		//{
		//	liblas::Schema schema = header.GetSchema();
		//	boost::optional< liblas::Dimension const& > redDim = schema.GetDimension("Red");
		//	if (redDim)
		//		schema.RemoveDimension(redDim.get());
		//	boost::optional< liblas::Dimension const& > greenDim = schema.GetDimension("Green");
		//	if (greenDim)
		//		schema.RemoveDimension(greenDim.get());
		//	boost::optional< liblas::Dimension const& > blueDim = schema.GetDimension("Blue");
		//	if (blueDim)
		//		schema.RemoveDimension(blueDim.get());
		//	header.SetSchema(schema);
		//}
		//header.SetDataFormatId(Header::ePointFormat1);

		writer = new liblas::Writer(ofs, header);
	}
	catch (...)
	{
		return CC_FERR_WRITING;
	}

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints);
	pdlg.setMethodTitle("Save LAS file");
	char buffer[256];
	sprintf(buffer,"Points: %u",numberOfPoints);
	pdlg.setInfo(buffer);
	pdlg.start();

	//liblas::Point point(boost::shared_ptr<liblas::Header>(new liblas::Header(writer->GetHeader())));
	liblas::Point point(&writer->GetHeader());
	liblas::Classification classif = point.GetClassification();

	for (unsigned i=0; i<numberOfPoints; i++)
	{
		const CCVector3* P = theCloud->getPoint(i);
		{
			CCVector3d Pglobal = theCloud->toGlobal3d<PointCoordinateType>(*P);
			point.SetCoordinates(Pglobal.x, Pglobal.y, Pglobal.z);
		}
		
		if (hasColor)
		{
			const colorType* rgb = theCloud->getPointColor(i);
			point.SetColor(liblas::Color(rgb[0]<<8,rgb[1]<<8,rgb[2]<<8)); //DGM: LAS colors are stored on 16 bits!
		}

		//additional fields
		for (std::vector<LasField>::const_iterator it = fieldsToSave.begin(); it != fieldsToSave.end(); ++it)
		{
			assert(it->sf);
			switch(it->type)
			{
			case LAS_X:
			case LAS_Y:
			case LAS_Z:
				assert(false);
				break;
			case LAS_INTENSITY:
				point.SetIntensity((boost::uint16_t)it->sf->getValue(i));
				break;
			case LAS_RETURN_NUMBER:
				point.SetReturnNumber((boost::uint16_t)it->sf->getValue(i));
				break;
			case LAS_NUMBER_OF_RETURNS:
				point.SetNumberOfReturns((boost::uint16_t)it->sf->getValue(i));
				break;
			case LAS_SCAN_DIRECTION:
				point.SetScanDirection((boost::uint16_t)it->sf->getValue(i));
				break;
			case LAS_FLIGHT_LINE_EDGE:
				point.SetFlightLineEdge((boost::uint16_t)it->sf->getValue(i));
				break;
			case LAS_CLASSIFICATION:
				{
					boost::uint32_t val = (boost::uint32_t)it->sf->getValue(i);
					classif.SetClass(val & 31);		//first 5 bits
					classif.SetSynthetic(val & 32); //6th bit
					classif.SetKeyPoint(val & 64);	//7th bit
					classif.SetWithheld(val & 128);	//8th bit
				}
				break;
			case LAS_SCAN_ANGLE_RANK:
				point.SetScanAngleRank((boost::uint8_t)it->sf->getValue(i));
				break;
			case LAS_USER_DATA:
				point.SetUserData((boost::uint8_t)it->sf->getValue(i));
				break;
			case LAS_POINT_SOURCE_ID:
				point.SetPointSourceID((boost::uint16_t)it->sf->getValue(i));
				break;
			case LAS_RED:
			case LAS_GREEN:
			case LAS_BLUE:
				assert(false);
				break;
			case LAS_TIME:
				point.SetTime((double)it->sf->getValue(i));
				break;
			case LAS_CLASSIF_VALUE:
				classif.SetClass((boost::uint32_t)it->sf->getValue(i));
				break;
			case LAS_CLASSIF_SYNTHETIC:
				classif.SetSynthetic((boost::uint32_t)it->sf->getValue(i));
				break;
			case LAS_CLASSIF_KEYPOINT:
				classif.SetKeyPoint((boost::uint32_t)it->sf->getValue(i));
				break;
			case LAS_CLASSIF_WITHHELD:
				classif.SetWithheld((boost::uint32_t)it->sf->getValue(i));
				break;
			case LAS_INVALID:
			default:
				assert(false);
				break;
			}
		}

		//set classification (it's mandatory anyway ;)
		point.SetClassification(classif);

		writer->WritePoint(point);

		if (!nprogress.oneStep())
			break;
	}

	delete writer;
	ofs.close();

	return CC_FERR_NO_ERROR;
}
コード例 #16
0
ファイル: PNFilter.cpp プロジェクト: fredericoal/trunk
CC_FILE_ERROR PNFilter::saveToFile(ccHObject* entity, QString filename)
{
	if (!entity || filename.isEmpty())
		return CC_FERR_BAD_ARGUMENT;

	//the cloud to save
	ccGenericPointCloud* theCloud = ccHObjectCaster::ToGenericPointCloud(entity);
	if (!theCloud)
	{
		ccLog::Warning("[PN] This filter can only save one cloud at a time!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}
	unsigned numberOfPoints = theCloud->size();

	if (numberOfPoints == 0)
	{
		ccLog::Warning("[PN] Input cloud is empty!");
		return CC_FERR_NO_SAVE;
	}

	//open binary file for writing
	QFile out(filename);
	if (!out.open(QIODevice::WriteOnly))
		return CC_FERR_WRITING;

	//Has the cloud been recentered?
	if (theCloud->isShifted())
		ccLog::Warning(QString("[PNFilter::save] Can't recenter or rescale cloud '%1' when saving it in a PN file!").arg(theCloud->getName()));

	bool hasNorms = theCloud->hasNormals();
	if (!hasNorms)
		ccLog::Warning(QString("[PNFilter::save] Cloud '%1' has no normal (we will save points with a default normal)!").arg(theCloud->getName()));
	float norm[3] = {(float)s_defaultNorm[0], (float)s_defaultNorm[1], (float)s_defaultNorm[2]};

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints);
	pdlg.setMethodTitle("Save PN file");
	pdlg.setInfo(qPrintable(QString("Points: %1").arg(numberOfPoints)));
	pdlg.start();

	CC_FILE_ERROR result = CC_FERR_NO_ERROR;

	for (unsigned i=0; i<numberOfPoints; i++)
	{
		//write point
		{
			const CCVector3* P = theCloud->getPoint(i);
			
			//conversion to float
			Vector3Tpl<float> Pfloat = Vector3Tpl<float>::fromArray(P->u);
			if (out.write(reinterpret_cast<const char*>(Pfloat.u),3*sizeof(float)) < 0)
			{
				result = CC_FERR_WRITING;
				break;
			}
		}
			
		//write normal
		if (hasNorms)
		{
			const CCVector3& N = theCloud->getPointNormal(i);
			//conversion to float
			norm[0] = static_cast<float>(N.x);
			norm[1] = static_cast<float>(N.y);
			norm[2] = static_cast<float>(N.z);
		}
		if (out.write(reinterpret_cast<const char*>(norm),3*sizeof(float)) < 0)
		{
			result = CC_FERR_WRITING;
			break;
		}

		if (!nprogress.oneStep())
		{
			result = CC_FERR_CANCELED_BY_USER;
			break;
		}
	}

	out.close();

	return result;
}
コード例 #17
0
ファイル: MAFilter.cpp プロジェクト: jebd/trunk
CC_FILE_ERROR MAFilter::saveToFile(ccHObject* entity, const char* filename)
{
	if (!entity || !filename)
        return CC_FERR_BAD_ARGUMENT;

	ccHObject::Container meshes;
	if (entity->isKindOf(CC_MESH))
        meshes.push_back(entity);
    else
        entity->filterChildren(meshes, true, CC_MESH);

    if (meshes.empty())
    {
        ccConsole::Error("No mesh in input selection!");
        return CC_FERR_BAD_ENTITY_TYPE;
    }
    else if (meshes.size()>1)
    {
        ccConsole::Error("Can't save more than one mesh per MA file!");
        return CC_FERR_BAD_ENTITY_TYPE;
    }

    //we extract the (short) filename from the whole path
	QString baseFilename = QFileInfo(filename).fileName();

    //the mesh to save
    ccGenericMesh* theMesh = static_cast<ccGenericMesh*>(meshes[0]);
    //and its vertices
    ccGenericPointCloud* theCloud = theMesh->getAssociatedCloud();

	unsigned numberOfTriangles = theMesh->size();
	unsigned numberOfVertexes = theCloud->size();

	if (numberOfTriangles==0 || numberOfVertexes==0)
	{
        ccConsole::Error("Mesh is empty!");
        return CC_FERR_BAD_ENTITY_TYPE;
	}

	bool hasColors = false;
	if (theCloud->isA(CC_POINT_CLOUD))
	    static_cast<ccPointCloud*>(theCloud)->hasColors();

    //and its scalar field
	/*CCLib::ScalarField* sf = 0;
	if (theCloud->isA(CC_POINT_CLOUD))
	    sf = static_cast<ccPointCloud*>(theCloud)->getCurrentDisplayedScalarField();

    if (!sf)
        ccConsole::Warning("No displayed scalar field! Values will all be 0!\n");

	//*/

    //open ASCII file for writing
	FILE* fp = fopen(filename , "wt");

	if (!fp)
        return CC_FERR_WRITING;

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
    unsigned palierModifier = (hasColors ? 1 : 0);
	CCLib::NormalizedProgress nprogress(&pdlg,unsigned(float((2+palierModifier)*numberOfTriangles+(3+palierModifier)*numberOfVertexes)));
	pdlg.setMethodTitle("Save MA file");
	char buffer[256];
    sprintf(buffer,"Triangles = %i",numberOfTriangles);
	pdlg.setInfo(buffer);
	pdlg.start();

	//header
	if (fprintf(fp,"//Maya ASCII 7.0 scene\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"//Name: %s\n",qPrintable(baseFilename)) < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"//Last modified: Sat, Mai 10, 2008 00:00:00 PM\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"requires maya \"4.0\";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"currentUnit -l %s -a degree -t film;\n","centimeter") < 0)
		{fclose(fp);return CC_FERR_WRITING;}


    //for multiple meshes handling (does not work yet)
    unsigned char currentMesh = 0;

	//NOEUD DE TRANSFORMATION
	if (fprintf(fp,"createNode transform -n \"Mesh%i\";\n",currentMesh+1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}

    //NOEUD "PRINCIPAL"
	if (fprintf(fp,"createNode mesh -n \"MeshShape%i\" -p \"Mesh%i\";\n",currentMesh+1,currentMesh+1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	if (fprintf(fp,"\tsetAttr -k off \".v\";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	if (fprintf(fp,"\tsetAttr \".uvst[0].uvsn\" -type \"string\" \"map1\";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"\tsetAttr \".cuvs\" -type \"string\" \"map1\";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (hasColors)
	{
		if (fprintf(fp,"\tsetAttr \".dcol\" yes;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
	}
	if (fprintf(fp,"\tsetAttr \".dcc\" -type \"string\" \"Ambient+Diffuse\";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"\tsetAttr \".ccls\" -type \"string\" \"colorSet%i\";\n",currentMesh+1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"\tsetAttr \".clst[0].clsn\" -type \"string\" \"colorSet%i\";\n",currentMesh+1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (hasColors)
	{
		if (fprintf(fp,"\tsetAttr \".ndt\" 0;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".tgsp\" 1;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		//ON INSERE UN NOEUD "SECONDAIRE"
		if (fprintf(fp,"createNode mesh -n \"polySurfaceShape%i\" -p \"Mesh%i\";\n",currentMesh+1,currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		if (fprintf(fp,"\tsetAttr -k off \".v\";\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".io\" yes;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".uvst[0].uvsn\" -type \"string\" \"map1\";\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".cuvs\" -type \"string\" \"map1\";\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".dcol\" yes;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".dcc\" -type \"string\" \"Ambient+Diffuse\";\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".ccls\" -type \"string\" \"colorSet%i\";\n",currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"\tsetAttr \".clst[0].clsn\" -type \"string\" \"colorSet%i\";\n",currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}
	}

	//ecriture des "vertexes"
	if (fprintf(fp,"\tsetAttr -s %i \".vt[0:%i]\"\n",numberOfVertexes,numberOfVertexes-1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	const double* shift = theCloud->getOriginalShift();

	unsigned i;
	for (i=0;i<numberOfVertexes;++i)
	{
		const CCVector3* P = theCloud->getPoint(i);
		if (fprintf(fp,(i+1==numberOfVertexes ? "\t\t%f %f %f;\n" : "\t\t%f %f %f\n"),
				-shift[0]+(double)P->x,
				-shift[2]+(double)P->z,
				shift[1]-(double)P->y) < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		nprogress.oneStep();
	}

	//ectitures des "edges"
	//structure permettant la gestion des indexs uniques des edges
	edge** theEdges = new edge*[numberOfVertexes];
	memset(theEdges,0,sizeof(edge*)*numberOfVertexes);
	unsigned ind[3],a,b;
	int currentEdgeIndex=0,lastEdgeIndexPushed=-1;

	int hard=0; //les arrêtes dans Maya peuvent être "hard" ou "soft" ...

	theMesh->placeIteratorAtBegining();
	for (i=0;i<numberOfTriangles;++i)
	{
		const CCLib::TriangleSummitsIndexes* tsi = theMesh->getNextTriangleIndexes(); //DGM: getNextTriangleIndexes is faster for mesh groups!

		ind[0] = tsi->i1;
		ind[1] = tsi->i2;
		ind[2] = tsi->i3;

		uchar k,l;
		for (k=0;k<3;++k)
		{
			l=(k<2 ? k+1 : 0);
			a = (ind[k]<ind[l] ? ind[k] : ind[l]);
			b = (a==ind[k] ? ind[l] : ind[k]);

			currentEdgeIndex = -1;
			edge* e = theEdges[a];
			while (e)
			{
				if (e->theOtherPoint == b)
				{
					currentEdgeIndex = e->edgeIndex;
					break;
				}
				e = e->nextEdge;
			}

			if (currentEdgeIndex<0) //on cree une nouvelle "edge"
			{
				edge* newEdge = new edge;
				newEdge->nextEdge = NULL;
				newEdge->theOtherPoint = b;
				newEdge->positif = (a==ind[k]);
				//newEdge->edgeIndex = ++lastEdgeIndexPushed; //non ! On n'ecrit pas l'arrête maintenant, donc ce n'est plus vrai
				newEdge->edgeIndex = 0;
				++lastEdgeIndexPushed;
				//currentEdgeIndex = lastEdgeIndexPushed;

				//on doit rajoute le noeud a la fin !!!
				if (theEdges[a])
				{
					e = theEdges[a];
					while (e->nextEdge) e=e->nextEdge;
					e->nextEdge = newEdge;
				}
				else theEdges[a]=newEdge;

				/*if (fprintf(fp,"\n \t\t%i %i %i",a,b,hard) < 0)
					return CC_FERR_WRITING;*/
			}
		}

		nprogress.oneStep();
	}

	//ecriture effective des edges
	unsigned numberOfEdges = unsigned(lastEdgeIndexPushed+1);
	if (fprintf(fp,"\tsetAttr -s %i \".ed[0:%i]\"",numberOfEdges,numberOfEdges-1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	lastEdgeIndexPushed=0;
	for (i=0;i<numberOfVertexes;++i)
	{
		if (theEdges[i])
		{
			edge* e = theEdges[i];
			while (e)
			{
				e->edgeIndex = lastEdgeIndexPushed++;
				if (fprintf(fp,"\n \t\t%i %i %i",i,e->theOtherPoint,hard) < 0)
					{fclose(fp);return CC_FERR_WRITING;}
				e=e->nextEdge;
			}
		}

		nprogress.oneStep();
	}

	if (fprintf(fp,";\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	//ectitures des "faces"
	if (fprintf(fp,"\tsetAttr -s %i \".fc[0:%i]\" -type \"polyFaces\"\n",numberOfTriangles,numberOfTriangles-1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	theMesh->placeIteratorAtBegining();
	for (i=0;i<numberOfTriangles;++i)
	{
		if (fprintf(fp,"\t\tf 3") < 0) {fclose(fp);return CC_FERR_WRITING;}

		CCLib::TriangleSummitsIndexes* tsi = theMesh->getNextTriangleIndexes(); //DGM: getNextTriangleIndexes is faster for mesh groups!
		ind[0] = tsi->i1;
		ind[1] = tsi->i2;
		ind[2] = tsi->i3;

		uchar k,l;
		for (k=0;k<3;++k)
		{
			l=(k<2 ? k+1 : 0);
			a = (ind[k]<ind[l] ? ind[k] : ind[l]);
			b = (a==ind[k] ? ind[l] : ind[k]);

			edge* e = theEdges[a];
			while (e->theOtherPoint != b) e=e->nextEdge;

			if (fprintf(fp," %i",((e->positif && a==ind[k]) || (!e->positif && a==ind[l]) ? e->edgeIndex : -(e->edgeIndex+1))) < 0) {fclose(fp);return CC_FERR_WRITING;}
		}

		if (fprintf(fp,(i+1==numberOfTriangles ? ";\n" : "\n")) < 0) {fclose(fp);return CC_FERR_WRITING;}

		nprogress.oneStep();
	}

	//on libere la memoire
	for (i=0;i<numberOfVertexes;++i)
	{
		if (theEdges[i])
		{
			edge* e = theEdges[i]->nextEdge;
			edge* nextE;
			while (e)
			{
				nextE=e->nextEdge;
				delete e;
				e=nextE;
			}
			delete theEdges[i];
		}

		nprogress.oneStep();
	}

	//cadeaux bonux 2
	if (fprintf(fp,"\tsetAttr \".cd\" -type \"dataPolyComponent\" Index_Data Edge 0 ;\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"\tsetAttr \".ndt\" 0;\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}
	if (fprintf(fp,"\tsetAttr \".tgsp\" 1;\n") < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	//NOEUD DES VERTEX COLORS
	if (hasColors)
	{
	    assert(theCloud->isA(CC_POINT_CLOUD));
        ccPointCloud* pc = static_cast<ccPointCloud*>(theCloud);

		if (fprintf(fp,"createNode polyColorPerVertex -n \"polyColorPerVertex%i\";\n",currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		if (fprintf(fp,"\tsetAttr \".uopa\" yes;\n") < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		if (fprintf(fp,"\tsetAttr -s %i \".vclr\";\n",numberOfVertexes) < 0)
			{fclose(fp);return CC_FERR_WRITING;}

		//on construit une structure qui associe chaque vertex aux faces auxquelles elle appartient
		faceIndexes** theFacesIndexes = new faceIndexes*[numberOfVertexes];
		memset(theFacesIndexes,0,sizeof(faceIndexes*)*numberOfVertexes);
		theMesh->placeIteratorAtBegining();
		for (i=0;i<numberOfTriangles;++i)
		{
			CCLib::TriangleSummitsIndexes* tsi = theMesh->getNextTriangleIndexes(); //DGM: getNextTriangleIndexes is faster for mesh groups!
			ind[0] = tsi->i1;
			ind[1] = tsi->i2;
			ind[2] = tsi->i3;

			for (uchar j=0;j<3;++j)
			{
				if (!theFacesIndexes[ind[j]])
				{
					faceIndexes* f = new faceIndexes;
					f->faceIndex = i;
					f->nextFace = NULL;
					theFacesIndexes[ind[j]] = f;
				}
				else
				{
					faceIndexes* f = theFacesIndexes[ind[j]];
					while (f->nextFace) f=f->nextFace;
					f->nextFace = new faceIndexes;
					f->nextFace->faceIndex = i;
					f->nextFace->nextFace = NULL;
				}
			}

			nprogress.oneStep();
		}

		//pour chaque vertex
		float col[3],coef=1.0/float(MAX_COLOR_COMP);
		for (i=0;i<numberOfVertexes;++i)
		{
			const colorType* c = pc->getPointColor(i);
			col[0]=float(c[0])*coef;
			col[1]=float(c[1])*coef;
			col[2]=float(c[2])*coef;

			//on compte le nombre de faces
			int nf = 0;
			faceIndexes* f = theFacesIndexes[i];
			while (f)
			{
				++nf;
				f=f->nextFace;
			}

			if (nf>0)
			{
				if (fprintf(fp,"\tsetAttr -s %i \".vclr[%i].vfcl\";\n",nf,i) < 0)
					{fclose(fp);return CC_FERR_WRITING;}

				faceIndexes *oldf,*f = theFacesIndexes[i];
				while (f)
				{
					if (fprintf(fp,"\tsetAttr \".vclr[%i].vfcl[%i].frgb\" -type \"float3\" %f %f %f;\n",i,f->faceIndex,col[0],col[1],col[2]) < 0)
					{fclose(fp);return CC_FERR_WRITING;}

					oldf = f;
					f=f->nextFace;
					delete oldf;
				}
				theFacesIndexes[i]=NULL;
			}

			nprogress.oneStep();
		}
		delete[] theFacesIndexes;

		if (fprintf(fp,"\tsetAttr \".cn\" -type \"string\" \"colorSet%i\";\n",currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}
	}

	//Maya connections
	if (hasColors)
	{
		if (fprintf(fp,"connectAttr \"polyColorPerVertex%i.out\" \"MeshShape%i.i\";\n",currentMesh+1,currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}
		if (fprintf(fp,"connectAttr \"polySurfaceShape%i.o\" \"polyColorPerVertex%i.ip\";\n",currentMesh+1,currentMesh+1) < 0)
			{fclose(fp);return CC_FERR_WRITING;}
	}
	if (fprintf(fp,"connectAttr \"MeshShape%i.iog\" \":initialShadingGroup.dsm\" -na;\n",currentMesh+1) < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	//fin du fichier
	if (fprintf(fp,"//End of %s\n",qPrintable(baseFilename)) < 0)
		{fclose(fp);return CC_FERR_WRITING;}

	fclose(fp);

	return CC_FERR_NO_ERROR;
}
コード例 #18
0
ファイル: SoiFilter.cpp プロジェクト: Bingyu1413/trunk
CC_FILE_ERROR SoiFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters)
{
	//open the file
	FILE *fp = fopen(qPrintable(filename), "rt");
	if (!fp)
		return CC_FERR_READING;

	std::string line;
	line.resize(MAX_ASCII_FILE_LINE_LENGTH);
	unsigned nbScansTotal = 0;
	unsigned nbPointsTotal = 0;

	//we read the first line
	char* eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp);
	char* pEnd;

	//header
	while ((strcmp((char*)line.substr(0,4).c_str(),"#CC#") != 0)&&(eof != NULL))
	{
		if (strcmp(line.substr(0,4).c_str(),"#NP#")==0)
		{
			std::string numPoints (line,4,line.size()-4);
			nbPointsTotal=strtol(numPoints.c_str(),&pEnd,0);
			//ccLog::Print("[SoiFilter::loadFile] Total number of points: %i",nbPointsTotal);
		}
		else if (strcmp(line.substr(0,4).c_str(),"#NS#")==0)
		{
			std::string numScans (line,4,line.size()-4);
			nbScansTotal=strtol(numScans.c_str(),&pEnd,0);
			//ccLog::Print("[SoiFilter::loadFile] Total number of scans: %i",nbScansTotal);
		}

		eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp);
	}

	if ((nbScansTotal == 0)||(nbPointsTotal == 0))
	{
		ccLog::Warning("[SoiFilter::loadFile] No points or scans defined in this file!");
		fclose(fp);
		return CC_FERR_NO_LOAD;
	}

	//Progress dialog
	ccProgressDialog pdlg(false); //cancel is not supported
	pdlg.setMethodTitle("Open SOI file");
	char buffer[256];
	sprintf(buffer,"%u scans / %u points\n",nbScansTotal,nbPointsTotal);
	CCLib::NormalizedProgress nprogress(&pdlg,nbPointsTotal);
	pdlg.setInfo(buffer);
	pdlg.start();

	//Scan by scan
	for (unsigned k=0; k<nbScansTotal; k++)
	{
		char* eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp);

		//we only look for points (we ignore the rest)
		while ((strcmp(line.substr(0,4).c_str(),"#pt#")!=0)&&(eof != NULL))
			eof = fgets ((char*)line.c_str(), MAX_ASCII_FILE_LINE_LENGTH , fp);

		unsigned nbOfPoints = 0;

		if (strcmp(line.substr(0,4).c_str(),"#pt#")==0)
		{
			std::string numPoints(line,4,line.size()-4);
			nbOfPoints = strtol(numPoints.c_str(),&pEnd,0);
			//ccLog::Print("[SoiFilter::loadFile] Scan %i - points: %i",k+1,nbOfPoints);
		}
		else
		{
			ccLog::Warning("[SoiFilter::loadFile] Can't find marker '#pt#'!");
			fclose(fp);
			return CC_FERR_WRONG_FILE_TYPE;
		}
		
		if (nbOfPoints == 0)
		{
			ccLog::Warning("[SoiFilter::loadFile] Scan #%i is empty!",k);
			continue;
		}

		//Creation de la liste de points
		QString name = QString("unnamed - Scan #%1").arg(k);

		ccPointCloud* loadedCloud = new ccPointCloud(name);
		if ( !loadedCloud->reserveThePointsTable(nbOfPoints) || !loadedCloud->reserveTheRGBTable() )
		{
			fclose(fp);
			delete loadedCloud;
			return CC_FERR_NOT_ENOUGH_MEMORY;
		}
		loadedCloud->showColors(true);

		//we can read points now
		for (unsigned i=0; i<nbOfPoints; i++)
		{
			float P[3];
			int c = 0;
			fscanf(fp,"%f %f %f %i",P,P+1,P+2,&c);

			loadedCloud->addPoint(CCVector3::fromArray(P));
			loadedCloud->addGreyColor(static_cast<colorType>(c<<3)); //<<2 ? <<3 ? we lack some info. here ...

			nprogress.oneStep();
		}

		container.addChild(loadedCloud);
	}

	fclose(fp);

	return CC_FERR_NO_ERROR;
}
コード例 #19
0
ファイル: AsciiFilter.cpp プロジェクト: johnjsb/trunk
CC_FILE_ERROR AsciiFilter::saveToFile(ccHObject* entity, QString filename, SaveParameters& parameters)
{
	assert(entity && !filename.isEmpty());

	AsciiSaveDlg* saveDialog = GetSaveDialog(parameters.parentWidget);
	assert(saveDialog);

	//if the dialog shouldn't be shown, we'll simply take the default values!
	if (parameters.alwaysDisplaySaveDialog && saveDialog->autoShow() && !saveDialog->exec())
	{
		return CC_FERR_CANCELED_BY_USER;
	}

	if (!entity->isKindOf(CC_TYPES::POINT_CLOUD))
	{
		if (entity->isA(CC_TYPES::HIERARCHY_OBJECT)) //multiple clouds?
		{
			QFileInfo fi(filename);
			QString extension = fi.suffix();
			QString baseName = fi.completeBaseName();
			QString path = fi.path();

			unsigned count = entity->getChildrenNumber();
			//we count the number of clouds first
			unsigned cloudCount = 0;
			{
				for (unsigned i=0; i<count; ++i)
				{
					ccHObject* child = entity->getChild(i);
					if (child->isKindOf(CC_TYPES::POINT_CLOUD))
						++cloudCount;
				}
			}
			
			//we can now create the corresponding file(s)
			if (cloudCount > 1)
			{
				unsigned counter = 0;
				//disable the save dialog so that it doesn't appear again!
				AsciiSaveDlg* saveDialog = GetSaveDialog();
				assert(saveDialog);

				bool autoShow = saveDialog->autoShow();
				saveDialog->setAutoShow(false);

				for (unsigned i=0; i<count; ++i)
				{
					ccHObject* child = entity->getChild(i);
					if (child->isKindOf(CC_TYPES::POINT_CLOUD))
					{
						QString subFilename = path+QString("/");
						subFilename += QString(baseName).replace("cloudname",child->getName(),Qt::CaseInsensitive);
						counter++;
						assert(counter <= cloudCount);
						subFilename += QString("_%1").arg(cloudCount-counter,6,10,QChar('0'));
						if (!extension.isEmpty())
							subFilename += QString(".") + extension;
						
						CC_FILE_ERROR result = saveToFile(entity->getChild(i),subFilename,parameters);
						if (result != CC_FERR_NO_ERROR)
						{
							return result;
						}
						else
						{
							ccLog::Print(QString("[ASCII] Cloud '%1' has been saved in: %2").arg(child->getName()).arg(subFilename));
						}
					}
					else
					{
						ccLog::Warning(QString("[ASCII] Entity '%1' can't be saved this way!").arg(child->getName()));
					}
				}

				//restore previous state
				saveDialog->setAutoShow(autoShow);

				return CC_FERR_NO_ERROR;
			}
		}
		else
		{
			return CC_FERR_BAD_ARGUMENT;
		}
	}

	QFile file(filename);
	if (!file.open(QFile::WriteOnly | QFile::Truncate))
		return CC_FERR_WRITING;
	QTextStream stream(&file);

	ccGenericPointCloud* cloud = ccHObjectCaster::ToGenericPointCloud(entity);

	unsigned numberOfPoints = cloud->size();
	bool writeColors = cloud->hasColors();
	bool writeNorms = cloud->hasNormals();
	std::vector<ccScalarField*> theScalarFields;
	if (cloud->isKindOf(CC_TYPES::POINT_CLOUD))
	{
		ccPointCloud* ccCloud = static_cast<ccPointCloud*>(cloud);
		for (unsigned i=0; i<ccCloud->getNumberOfScalarFields(); ++i)
			theScalarFields.push_back(static_cast<ccScalarField*>(ccCloud->getScalarField(i)));
	}
	bool writeSF = (theScalarFields.size() != 0);

	//progress dialog
	ccProgressDialog pdlg(true);
	CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints);
	pdlg.setMethodTitle(qPrintable(QString("Saving cloud [%1]").arg(cloud->getName())));
	pdlg.setInfo(qPrintable(QString("Number of points: %1").arg(numberOfPoints)));
	pdlg.start();

	//output precision
	const int s_coordPrecision = saveDialog->coordsPrecision();
	const int s_sfPrecision = saveDialog->sfPrecision(); 
	const int s_nPrecision = 2+sizeof(PointCoordinateType);

	//other parameters
	bool saveColumnsHeader = saveDialog->saveColumnsNamesHeader();
	bool savePointCountHeader = saveDialog->savePointCountHeader();
	bool swapColorAndSFs = saveDialog->swapColorAndSF();
	QChar separator(saveDialog->getSeparator());
	bool saveFloatColors = saveDialog->saveFloatColors();

	if (saveColumnsHeader)
	{
		QString header("//");
		header.append(AsciiHeaderColumns::X());
		header.append(separator);
		header.append(AsciiHeaderColumns::Y());
		header.append(separator);
		header.append(AsciiHeaderColumns::Z());

		if (writeColors && !swapColorAndSFs)
		{
			header.append(separator);
			header.append(saveFloatColors ? AsciiHeaderColumns::Rf() : AsciiHeaderColumns::R());
			header.append(separator);
			header.append(saveFloatColors ? AsciiHeaderColumns::Gf() : AsciiHeaderColumns::G());
			header.append(separator);
			header.append(saveFloatColors ? AsciiHeaderColumns::Bf() : AsciiHeaderColumns::B());
		}

		if (writeSF)
		{
			//add each associated SF name
			for (std::vector<ccScalarField*>::const_iterator it = theScalarFields.begin(); it != theScalarFields.end(); ++it)
			{
				QString sfName((*it)->getName());
				sfName.replace(separator,'_');
				header.append(separator);
				header.append(sfName);
			}
		}

		if (writeColors && swapColorAndSFs)
		{
			header.append(separator);
			header.append(saveFloatColors ? AsciiHeaderColumns::Rf() : AsciiHeaderColumns::R());
			header.append(separator);
			header.append(saveFloatColors ? AsciiHeaderColumns::Gf() : AsciiHeaderColumns::G());
			header.append(separator);
			header.append(saveFloatColors ? AsciiHeaderColumns::Bf() : AsciiHeaderColumns::B());
		}

		if (writeNorms)
		{
			header.append(separator);
			header.append(AsciiHeaderColumns::Nx());
			header.append(separator);
			header.append(AsciiHeaderColumns::Ny());
			header.append(separator);
			header.append(AsciiHeaderColumns::Nz());
		}
		
		stream << header << "\n";
	}

	if (savePointCountHeader)
	{
		stream << QString::number(numberOfPoints) << "\n";
	}

	CC_FILE_ERROR result = CC_FERR_NO_ERROR;
	for (unsigned i=0; i<numberOfPoints; ++i)
	{
		//line for the current point
		QString line;

		//write current point coordinates
		const CCVector3* P = cloud->getPoint(i);
		CCVector3d Pglobal = cloud->toGlobal3d<PointCoordinateType>(*P);
		line.append(QString::number(Pglobal.x,'f',s_coordPrecision));
		line.append(separator);
		line.append(QString::number(Pglobal.y,'f',s_coordPrecision));
		line.append(separator);
		line.append(QString::number(Pglobal.z,'f',s_coordPrecision));

		QString colorLine;
		if (writeColors)
		{
			//add rgb color
			const ColorCompType* col = cloud->getPointColor(i);
			if (saveFloatColors)
			{
				colorLine.append(separator);
				colorLine.append(QString::number(static_cast<double>(col[0])/ccColor::MAX));
				colorLine.append(separator);
				colorLine.append(QString::number(static_cast<double>(col[1])/ccColor::MAX));
				colorLine.append(separator);
				colorLine.append(QString::number(static_cast<double>(col[2])/ccColor::MAX));
			}
			else
			{
				colorLine.append(separator);
				colorLine.append(QString::number(col[0]));
				colorLine.append(separator);
				colorLine.append(QString::number(col[1]));
				colorLine.append(separator);
				colorLine.append(QString::number(col[2]));
			}

			if (!swapColorAndSFs)
				line.append(colorLine);
		}

		if (writeSF)
		{
			//add each associated SF values
			for (std::vector<ccScalarField*>::const_iterator it = theScalarFields.begin(); it != theScalarFields.end(); ++it)
			{
				line.append(separator);
				double sfVal = (*it)->getGlobalShift() + (*it)->getValue(i);
				line.append(QString::number(sfVal,'f',s_sfPrecision));
			}
		}

		if (writeColors && swapColorAndSFs)
			line.append(colorLine);

		if (writeNorms)
		{
			//add normal vector
			const CCVector3& N = cloud->getPointNormal(i);
			line.append(separator);
			line.append(QString::number(N.x,'f',s_nPrecision));
			line.append(separator);
			line.append(QString::number(N.y,'f',s_nPrecision));
			line.append(separator);
			line.append(QString::number(N.z,'f',s_nPrecision));
		}

		stream << line << "\n";

		if (!nprogress.oneStep())
		{
			result = CC_FERR_CANCELED_BY_USER;
			break;
		}
	}

	return result;
}
コード例 #20
0
ファイル: PVFilter.cpp プロジェクト: fredericoal/trunk
CC_FILE_ERROR PVFilter::loadFile(QString filename, ccHObject& container, LoadParameters& parameters)
{
	//opening file
	QFile in(filename);
	if (!in.open(QIODevice::ReadOnly))
		return CC_FERR_READING;

	//we deduce the points number from the file size
	qint64 fileSize = in.size();
	qint64 singlePointSize = 4*sizeof(float);
	//check that size is ok
	if (fileSize == 0)
		return CC_FERR_NO_LOAD;
	if ((fileSize % singlePointSize) != 0)
		return CC_FERR_MALFORMED_FILE;
	unsigned numberOfPoints = static_cast<unsigned>(fileSize  / singlePointSize);

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints);
	pdlg.setMethodTitle("Open PV file");
	pdlg.setInfo(qPrintable(QString("Points: %1").arg(numberOfPoints)));
	pdlg.start();

	ccPointCloud* loadedCloud = 0;
	//if the file is too big, it will be chuncked in multiple parts
	unsigned chunkIndex = 0;
	unsigned fileChunkPos = 0;
	unsigned fileChunkSize = 0;
	//number of points read for the current cloud part
	unsigned pointsRead = 0;
	CC_FILE_ERROR result = CC_FERR_NO_ERROR;

	for (unsigned i=0;i<numberOfPoints;i++)
	{
		//if we reach the max. cloud size limit, we cerate a new chunk
		if (pointsRead == fileChunkPos+fileChunkSize)
		{
			if (loadedCloud)
			{
				int sfIdx = loadedCloud->getCurrentInScalarFieldIndex();
				if (sfIdx>=0)
				{
					CCLib::ScalarField* sf = loadedCloud->getScalarField(sfIdx);
					sf->computeMinAndMax();
					loadedCloud->setCurrentDisplayedScalarField(sfIdx);
					loadedCloud->showSF(true);
				}
				container.addChild(loadedCloud);
			}
			fileChunkPos = pointsRead;
			fileChunkSize = std::min<unsigned>(numberOfPoints-pointsRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);
			loadedCloud = new ccPointCloud(QString("unnamed - Cloud #%1").arg(++chunkIndex));
			if (!loadedCloud || !loadedCloud->reserveThePointsTable(fileChunkSize) || !loadedCloud->enableScalarField())
			{
				result = CC_FERR_NOT_ENOUGH_MEMORY;
				if (loadedCloud)
					delete loadedCloud;
				loadedCloud=0;
				break;
			}
		}

		//we read the 3 coordinates of the point
		float rBuff[3];
		if (in.read((char*)rBuff,3*sizeof(float))>=0)
		{
			//conversion to CCVector3
			CCVector3 P((PointCoordinateType)rBuff[0],
						(PointCoordinateType)rBuff[1],
						(PointCoordinateType)rBuff[2]);
			loadedCloud->addPoint(P);
		}
		else
		{
			result = CC_FERR_READING;
			break;
		}

		//then the scalar value
		if (in.read((char*)rBuff,sizeof(float))>=0)
		{
			loadedCloud->setPointScalarValue(pointsRead,(ScalarType)rBuff[0]);
		}
		else
		{
			//add fake scalar value for consistency then break
			loadedCloud->setPointScalarValue(pointsRead,0);
			result = CC_FERR_READING;
			break;
		}

		++pointsRead;

		if (!nprogress.oneStep())
		{
			result = CC_FERR_CANCELED_BY_USER;
			break;
		}
	}

	in.close();

	if (loadedCloud)
	{
		if (loadedCloud->size() < loadedCloud->capacity())
			loadedCloud->resize(loadedCloud->size());
		int sfIdx = loadedCloud->getCurrentInScalarFieldIndex();
		if (sfIdx>=0)
		{
			CCLib::ScalarField* sf = loadedCloud->getScalarField(sfIdx);
			sf->computeMinAndMax();
			loadedCloud->setCurrentDisplayedScalarField(sfIdx);
			loadedCloud->showSF(true);
		}
		container.addChild(loadedCloud);
	}

	return result;
}
コード例 #21
0
ファイル: LASFilter.cpp プロジェクト: vivzqs/trunk
CC_FILE_ERROR LASFilter::saveToFile(ccHObject* entity, const char* filename)
{
	if (!entity || !filename)
		return CC_FERR_BAD_ARGUMENT;

	ccHObject::Container clouds;
	if (entity->isKindOf(CC_POINT_CLOUD))
		clouds.push_back(entity);
	else
		entity->filterChildren(clouds, true, CC_POINT_CLOUD);

	if (clouds.empty())
	{
		ccConsole::Error("No point cloud in input selection!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}
	else if (clouds.size()>1)
	{
		ccConsole::Error("Can't save more than one cloud per LAS file!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}

	//the cloud to save
	ccGenericPointCloud* theCloud = static_cast<ccGenericPointCloud*>(clouds[0]);
	unsigned numberOfPoints = theCloud->size();

	if (numberOfPoints==0)
	{
		ccConsole::Error("Cloud is empty!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}

	//colors
	bool hasColor = theCloud->hasColors();

	//additional fields (as scalar fields)
	CCLib::ScalarField* classifSF = 0;
	CCLib::ScalarField* intensitySF = 0;
	CCLib::ScalarField* timeSF = 0;
	CCLib::ScalarField* returnNumberSF = 0;

	if (theCloud->isA(CC_POINT_CLOUD))
	{
		ccPointCloud* pc = static_cast<ccPointCloud*>(theCloud);

		//Classification
		{
			int sfIdx = pc->getScalarFieldIndexByName(CC_LAS_CLASSIFICATION_FIELD_NAME);
			if (sfIdx>=0)
			{
				classifSF = pc->getScalarField(sfIdx);
				assert(classifSF);
				if ((int)classifSF->getMin()<0 || (int)classifSF->getMax()>255) //outbounds unsigned char?
				{
					ccConsole::Warning("[LASFilter] Found a 'Classification' scalar field, but its values outbound LAS specifications (0-255)...");
					classifSF = 0;
				}
			}
		}
		//Classification end

		//intensity (as a scalar field)
		{
			int sfIdx = pc->getScalarFieldIndexByName(CC_SCAN_INTENSITY_FIELD_NAME);
			if (sfIdx>=0)
			{
				intensitySF = pc->getScalarField(sfIdx);
				assert(intensitySF);
				if ((int)intensitySF->getMin()<0 || (int)intensitySF->getMax()>65535) //outbounds unsigned short?
				{
					ccConsole::Warning("[LASFilter] Found a 'Intensity' scalar field, but its values outbound LAS specifications (0-65535)...");
					intensitySF = 0;
				}
			}
		}
		//Intensity end

		//Time (as a scalar field)
		{
			int sfIdx = pc->getScalarFieldIndexByName(CC_SCAN_TIME_FIELD_NAME);
			if (sfIdx>=0)
			{
				timeSF = pc->getScalarField(sfIdx);
				assert(timeSF);
			}
		}
		//Time end

		//Return number (as a scalar field)
		{
			int sfIdx = pc->getScalarFieldIndexByName(CC_SCAN_RETURN_INDEX_FIELD_NAME);
			if (sfIdx>=0)
			{
				returnNumberSF = pc->getScalarField(sfIdx);
				assert(returnNumberSF);
				if ((int)returnNumberSF->getMin()<0 || (int)returnNumberSF->getMax()>7) //outbounds 3 bits?
				{
					ccConsole::Warning("[LASFilter] Found a 'Return number' scalar field, but its values outbound LAS specifications (0-7)...");
					returnNumberSF = 0;
				}
			}
		}
		//Return number end
	}

	//open binary file for writing
	std::ofstream ofs;
	ofs.open(filename, std::ios::out | std::ios::binary);

	if (ofs.fail())
		return CC_FERR_WRITING;

	const double* shift = theCloud->getOriginalShift();

	liblas::Writer* writer = 0;
	try
	{
		liblas::Header header;

		//LAZ support based on extension!
		if (QFileInfo(filename).suffix().toUpper() == "LAZ")
		{
			header.SetCompressed(true);
		}

		//header.SetDataFormatId(liblas::ePointFormat3);
		ccBBox bBox = theCloud->getBB();
		if (bBox.isValid())
		{
			header.SetMin(-shift[0]+(double)bBox.minCorner().x,-shift[1]+(double)bBox.minCorner().y,-shift[2]+(double)bBox.minCorner().z);
			header.SetMax(-shift[0]+(double)bBox.maxCorner().x,-shift[1]+(double)bBox.maxCorner().y,-shift[2]+(double)bBox.maxCorner().z);
			CCVector3 diag = bBox.getDiagVec();

			//Set offset & scale, as points will be stored as boost::int32_t values (between 0 and 4294967296)
			//int_value = (double_value-offset)/scale
			header.SetOffset(-shift[0]+(double)bBox.minCorner().x,-shift[1]+(double)bBox.minCorner().y,-shift[2]+(double)bBox.minCorner().z);
			header.SetScale(1.0e-9*std::max<double>(diag.x,ZERO_TOLERANCE), //result must fit in 32bits?!
				1.0e-9*std::max<double>(diag.y,ZERO_TOLERANCE),
				1.0e-9*std::max<double>(diag.z,ZERO_TOLERANCE));
		}
		header.SetPointRecordsCount(numberOfPoints);
		//header.SetDataFormatId(Header::ePointFormat1);

		writer = new liblas::Writer(ofs, header);
	}
	catch (...)
	{
		return CC_FERR_WRITING;
	}

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints);
	pdlg.setMethodTitle("Save LAS file");
	char buffer[256];
	sprintf(buffer,"Points: %i",numberOfPoints);
	pdlg.setInfo(buffer);
	pdlg.start();

	//liblas::Point point(boost::shared_ptr<liblas::Header>(new liblas::Header(writer->GetHeader())));
	liblas::Point point(&writer->GetHeader());

	for (unsigned i=0; i<numberOfPoints; i++)
	{
		const CCVector3* P = theCloud->getPoint(i);
		{
			double x=-shift[0]+(double)P->x;
			double y=-shift[1]+(double)P->y;
			double z=-shift[2]+(double)P->z;
			point.SetCoordinates(x, y, z);
		}
		if (hasColor)
		{
			const colorType* rgb = theCloud->getPointColor(i);
			point.SetColor(liblas::Color(rgb[0]<<8,rgb[1]<<8,rgb[2]<<8)); //DGM: LAS colors are stored on 16 bits!
		}

		if (classifSF)
		{
			liblas::Classification classif;
			classif.SetClass((boost::uint32_t)classifSF->getValue(i));
			point.SetClassification(classif);
		}

		if (intensitySF)
		{
			point.SetIntensity((boost::uint16_t)intensitySF->getValue(i));
		}

		if (timeSF)
		{
			point.SetTime((double)timeSF->getValue(i));
		}

		if (returnNumberSF)
		{
			point.SetReturnNumber((boost::uint16_t)returnNumberSF->getValue(i));
			point.SetNumberOfReturns((boost::uint16_t)returnNumberSF->getMax());
		}

		writer->WritePoint(point);

		if (!nprogress.oneStep())
			break;
	}

	delete writer;
	//ofs.close();

	return CC_FERR_NO_ERROR;
}
コード例 #22
0
ファイル: PVFilter.cpp プロジェクト: fredericoal/trunk
CC_FILE_ERROR PVFilter::saveToFile(ccHObject* entity, QString filename)
{
	if (!entity || filename.isEmpty())
		return CC_FERR_BAD_ARGUMENT;

	//the cloud to save
	ccGenericPointCloud* theCloud = ccHObjectCaster::ToGenericPointCloud(entity);
	if (!theCloud)
	{
		ccLog::Warning("[PV] This filter can only save one cloud at a time!");
		return CC_FERR_BAD_ENTITY_TYPE;
	}
	unsigned numberOfPoints = theCloud->size();

	if (numberOfPoints == 0)
	{
		ccLog::Warning("[PV] Input cloud is empty!");
		return CC_FERR_NO_SAVE;
	}

	//open binary file for writing
	QFile out(filename);
	if (!out.open(QIODevice::WriteOnly))
		return CC_FERR_WRITING;

	//Has the cloud been recentered?
	if (theCloud->isShifted())
		ccLog::Warning(QString("[PVFilter::save] Can't recenter or rescale cloud '%1' when saving it in a PN file!").arg(theCloud->getName()));

	//for point clouds with multiple SFs, we must set the currently displayed one as 'input' SF
	//if (theCloud->isA(CC_TYPES::POINT_CLOUD))
	//{
	//	ccPointCloud* pc = static_cast<ccPointCloud*>(theCloud);
	//	pc->setCurrentInScalarField(pc->getCurrentDisplayedScalarFieldIndex());
	//}
	bool hasSF = theCloud->hasDisplayedScalarField();
	if (!hasSF)
		ccLog::Warning(QString("[PVFilter::save] Cloud '%1' has no displayed scalar field (we will save points with a default scalar value)!").arg(theCloud->getName()));

	float val = std::numeric_limits<float>::quiet_NaN();

	//progress dialog
	ccProgressDialog pdlg(true); //cancel available
	CCLib::NormalizedProgress nprogress(&pdlg,numberOfPoints);
	pdlg.setMethodTitle("Save PV file");
	pdlg.setInfo(qPrintable(QString("Points: %1").arg(numberOfPoints)));
	pdlg.start();

	CC_FILE_ERROR result = CC_FERR_NO_ERROR;

	for (unsigned i=0; i<numberOfPoints; i++)
	{
		//write point
		{
			const CCVector3* P = theCloud->getPoint(i);
			
			//conversion to float
			float wBuff[3] = {(float)P->x, (float)P->y, (float)P->z};
			if (out.write((const char*)wBuff,3*sizeof(float)) < 0)
			{
				result = CC_FERR_WRITING;
				break;
			}
		}
			
		//write scalar value
		if (hasSF)
			val = static_cast<float>(theCloud->getPointScalarValue(i));
		if (out.write((const char*)&val,sizeof(float)) < 0)
		{
			result = CC_FERR_WRITING;
			break;
		}

		if (!nprogress.oneStep())
		{
			result = CC_FERR_CANCELED_BY_USER;
			break;
		}
	}

	out.close();

	return result;
}
コード例 #23
0
ファイル: BinFilter.cpp プロジェクト: FrankHXW/trunk
CC_FILE_ERROR BinFilter::LoadFileV1(QFile& in, ccHObject& container, unsigned nbScansTotal, const LoadParameters& parameters)
{
	ccLog::Print("[BIN] Version 1.0");

	if (nbScansTotal > 99)
	{
		if (QMessageBox::question(0, QString("Oups"), QString("Hum, do you really expect %1 point clouds?").arg(nbScansTotal), QMessageBox::Yes, QMessageBox::No) == QMessageBox::No)
			return CC_FERR_WRONG_FILE_TYPE;
	}
	else if (nbScansTotal == 0)
	{
		return CC_FERR_NO_LOAD;
	}

	ccProgressDialog pdlg(true, parameters.parentWidget);
	pdlg.setMethodTitle(QObject::tr("Open Bin file (old style)"));

	for (unsigned k=0; k<nbScansTotal; k++)
	{
		HeaderFlags header;
		unsigned nbOfPoints = 0;
		if (ReadEntityHeader(in, nbOfPoints, header) < 0)
		{
			return CC_FERR_READING;
		}

		//Console::print("[BinFilter::loadModelFromBinaryFile] Entity %i : %i points, color=%i, norms=%i, dists=%i\n",k,nbOfPoints,color,norms,distances);

		if (nbOfPoints == 0)
		{
			//Console::print("[BinFilter::loadModelFromBinaryFile] rien a faire !\n");
			continue;
		}

		//progress for this cloud
		CCLib::NormalizedProgress nprogress(&pdlg, nbOfPoints);
		if (parameters.alwaysDisplayLoadDialog)
		{
			pdlg.reset();
			pdlg.setInfo(QObject::tr("cloud %1/%2 (%3 points)").arg(k + 1).arg(nbScansTotal).arg(nbOfPoints));
			pdlg.start();
			QApplication::processEvents();
		}

		//Cloud name
		char cloudName[256] = "unnamed";
		if (header.name)
		{
			for (int i=0; i<256; ++i)
			{
				if (in.read(cloudName+i,1) < 0)
				{
					//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the cloud name!\n");
					return CC_FERR_READING;
				}
				if (cloudName[i] == 0)
				{
					break;
				}
			}
			//we force the end of the name in case it is too long!
			cloudName[255] = 0;
		}
		else
		{
			sprintf(cloudName,"unnamed - Cloud #%u",k);
		}

		//Cloud name
		char sfName[1024] = "unnamed";
		if (header.sfName)
		{
			for (int i=0; i<1024; ++i)
			{
				if (in.read(sfName+i,1) < 0)
				{
					//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the cloud name!\n");
					return CC_FERR_READING;
				}
				if (sfName[i] == 0)
					break;
			}
			//we force the end of the name in case it is too long!
			sfName[1023] = 0;
		}
		else
		{
			strcpy(sfName,"Loaded scalar field");
		}
		
		//Creation
		ccPointCloud* loadedCloud = new ccPointCloud(cloudName);
		if (!loadedCloud)
			return CC_FERR_NOT_ENOUGH_MEMORY;

		unsigned fileChunkPos = 0;
		unsigned fileChunkSize = std::min(nbOfPoints,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);

		loadedCloud->reserveThePointsTable(fileChunkSize);
		if (header.colors)
		{
			loadedCloud->reserveTheRGBTable();
			loadedCloud->showColors(true);
		}
		if (header.normals)
		{
			loadedCloud->reserveTheNormsTable();
			loadedCloud->showNormals(true);
		}
		if (header.scalarField)
			loadedCloud->enableScalarField();

		unsigned lineRead = 0;
		int parts = 0;

		const ScalarType FORMER_HIDDEN_POINTS = (ScalarType)-1.0;

		//lecture du fichier
		for (unsigned i=0; i<nbOfPoints; ++i)
		{
			if (lineRead == fileChunkPos+fileChunkSize)
			{
				if (header.scalarField)
					loadedCloud->getCurrentInScalarField()->computeMinAndMax();

				container.addChild(loadedCloud);
				fileChunkPos = lineRead;
				fileChunkSize = std::min(nbOfPoints-lineRead,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);
				char partName[64];
				++parts;
				sprintf(partName,"%s.part_%i",cloudName,parts);
				loadedCloud = new ccPointCloud(partName);
				loadedCloud->reserveThePointsTable(fileChunkSize);

				if (header.colors)
				{
					loadedCloud->reserveTheRGBTable();
					loadedCloud->showColors(true);
				}
				if (header.normals)
				{
					loadedCloud->reserveTheNormsTable();
					loadedCloud->showNormals(true);
				}
				if (header.scalarField)
					loadedCloud->enableScalarField();
			}

			float Pf[3];
			if (in.read((char*)Pf,sizeof(float)*3) < 0)
			{
				//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity point !\n",k);
				return CC_FERR_READING;
			}
			loadedCloud->addPoint(CCVector3::fromArray(Pf));

			if (header.colors)
			{
				unsigned char C[3];
				if (in.read((char*)C,sizeof(ColorCompType)*3) < 0)
				{
					//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity colors !\n",k);
					return CC_FERR_READING;
				}
				loadedCloud->addRGBColor(C);
			}

			if (header.normals)
			{
				CCVector3 N;
				if (in.read((char*)N.u,sizeof(float)*3) < 0)
				{
					//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity norms !\n",k);
					return CC_FERR_READING;
				}
				loadedCloud->addNorm(N);
			}

			if (header.scalarField)
			{
				double D;
				if (in.read((char*)&D,sizeof(double)) < 0)
				{
					//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity distance!\n",k);
					return CC_FERR_READING;
				}
				ScalarType d = static_cast<ScalarType>(D);
				loadedCloud->setPointScalarValue(i,d);
			}

			lineRead++;

			if (parameters.alwaysDisplayLoadDialog && !nprogress.oneStep())
			{
				loadedCloud->resize(i+1-fileChunkPos);
				k=nbScansTotal;
				i=nbOfPoints;
			}
		}

		if (parameters.alwaysDisplayLoadDialog)
		{
			pdlg.stop();
			QApplication::processEvents();
		}

		if (header.scalarField)
		{
			CCLib::ScalarField* sf = loadedCloud->getCurrentInScalarField();
			assert(sf);
			sf->setName(sfName);

			//replace HIDDEN_VALUES by NAN_VALUES
			for (unsigned i=0; i<sf->currentSize(); ++i)
			{
				if (sf->getValue(i) == FORMER_HIDDEN_POINTS)
					sf->setValue(i,NAN_VALUE);
			}
			sf->computeMinAndMax();

			loadedCloud->setCurrentDisplayedScalarField(loadedCloud->getCurrentInScalarFieldIndex());
			loadedCloud->showSF(true);
		}

		container.addChild(loadedCloud);
	}

	return CC_FERR_NO_ERROR;
}
コード例 #24
0
ファイル: BinFilter.cpp プロジェクト: eimix/trunk
CC_FILE_ERROR BinFilter::loadFileV1(QFile& in, ccHObject& container, unsigned nbScansTotal)
{
	if (nbScansTotal>99)
	{
		if (QMessageBox::question(0, QString("Oups"), QString("Hum, do you really expect %1 point clouds?").arg(nbScansTotal))==QMessageBox::No)
			return CC_FERR_WRONG_FILE_TYPE;
	}
	else if (nbScansTotal==0)
	{
		return CC_FERR_NO_LOAD;
	}

	ccProgressDialog pdlg(true);
	pdlg.setMethodTitle("Open Bin file (old style)");

	for (unsigned k=0;k<nbScansTotal;k++)
	{
		HeaderFlags header;
		unsigned nbOfPoints=0;
		if (ReadEntityHeader(in,nbOfPoints,header) < 0)
			return CC_FERR_READING;

		//Console::print("[BinFilter::loadModelFromBinaryFile] Entity %i : %i points, color=%i, norms=%i, dists=%i\n",k,nbOfPoints,color,norms,distances);

		//progress for this cloud
		CCLib::NormalizedProgress nprogress(&pdlg,nbOfPoints);
		pdlg.reset();
		char buffer[256];
		sprintf(buffer,"cloud %i/%i (%i points)",k+1,nbScansTotal,nbOfPoints);
		pdlg.setInfo(buffer);
		pdlg.start();
		QApplication::processEvents();

		if (nbOfPoints==0)
		{
			//Console::print("[BinFilter::loadModelFromBinaryFile] rien a faire !\n");
			continue;
		}

		//Cloud name
		char cloudName[256]="unnamed";
		if (header.name)
		{
			for (int i=0;i<256;++i)
			{
				if (in.read(cloudName+i,1)<0)
				{
					//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the cloud name!\n");
					return CC_FERR_READING;
				}
				if (cloudName[i]==0)
					break;
			}
			//we force the end of the name in case it is too long!
			cloudName[255]=0;
		}
		else
		{
			sprintf(cloudName,"unnamed - Cloud #%i",k);
		}

		//Cloud name
		char sfName[1024]="unnamed";
		if (header.sfName)
		{
			for (int i=0;i<1024;++i)
			{
				if (in.read(sfName+i,1)<0)
				{
					//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the cloud name!\n");
					return CC_FERR_READING;
				}
				if (sfName[i]==0)
					break;
			}
			//we force the end of the name in case it is too long!
			sfName[1023]=0;
		}
		else
		{
			strcpy(sfName,"Loaded scalar field");
		}
		//Creation
		ccPointCloud* loadedCloud = new ccPointCloud(cloudName);
		if (!loadedCloud)
			return CC_FERR_NOT_ENOUGH_MEMORY;

		unsigned fileChunkPos = 0;
		unsigned fileChunkSize = ccMin(nbOfPoints,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);

		loadedCloud->reserveThePointsTable(fileChunkSize);
		if (header.colors)
		{
			loadedCloud->reserveTheRGBTable();
			loadedCloud->showColors(true);
		}
		if (header.normals)
		{
			loadedCloud->reserveTheNormsTable();
			loadedCloud->showNormals(true);
		}
		if (header.scalarField)
			loadedCloud->enableScalarField();

		CCVector3 P;
		unsigned char C[3];
		double D;

		//does the associated scalar field is negative?
		bool negSF = false;

		unsigned lineReaded=0;
		int parts = 0;

		//lecture du fichier
		for (unsigned i=0;i<nbOfPoints;++i)
		{
			if (lineReaded == fileChunkPos+fileChunkSize)
			{
				if (header.scalarField)
					loadedCloud->getCurrentInScalarField()->computeMinAndMax();

				container.addChild(loadedCloud);
				fileChunkPos = lineReaded;
				fileChunkSize = ccMin(nbOfPoints-lineReaded,CC_MAX_NUMBER_OF_POINTS_PER_CLOUD);
				char partName[64];
				++parts;
				sprintf(partName,"%s.part_%i",cloudName,parts);
				loadedCloud = new ccPointCloud(partName);
				loadedCloud->reserveThePointsTable(fileChunkSize);

				if (header.colors)
				{
					loadedCloud->reserveTheRGBTable();
					loadedCloud->showColors(true);
				}
				if (header.normals)
				{
					loadedCloud->reserveTheNormsTable();
					loadedCloud->showNormals(true);
				}
				if (header.scalarField)
					loadedCloud->enableScalarField();
			}

			if (in.read((char*)P.u,sizeof(float)*3)<0)
			{
				//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity point !\n",k);
				return CC_FERR_READING;
			}
			loadedCloud->addPoint(P);

			if (header.colors)
			{
				if (in.read((char*)C,sizeof(colorType)*3)<0)
				{
					//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity colors !\n",k);
					return CC_FERR_READING;
				}
				loadedCloud->addRGBColor(C);
			}

			if (header.normals)
			{
				if (in.read((char*)P.u,sizeof(float)*3)<0)
				{
					//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity norms !\n",k);
					return CC_FERR_READING;
				}
				loadedCloud->addNorm(P.u);
			}

			if (header.scalarField)
			{
				if (in.read((char*)&D,sizeof(double))<0)
				{
					//Console::print("[BinFilter::loadModelFromBinaryFile] Error reading the %ith entity distance !\n",k);
					return CC_FERR_READING;
				}
				DistanceType d = (DistanceType)D;
				//if there are negative values, we test if they are particular values (HIDDEN_VALUE, etc.)
				//or not particular (in which case we have a non strictly positive SF)
				if (d<0.0 && !negSF)
				{
					//we must test if the value is a particular one
					if (d != HIDDEN_VALUE &&
						d != OUT_VALUE &&
						d != SEGMENTED_VALUE)
						negSF = true;
				}
				loadedCloud->setPointScalarValue(i,d);
			}

			lineReaded++;

			if (!nprogress.oneStep())
			{
				loadedCloud->resize(i+1-fileChunkPos);
				k=nbScansTotal;
				i=nbOfPoints;
			}
		}

		if (header.scalarField)
		{
			CCLib::ScalarField* sf = loadedCloud->getCurrentInScalarField();
			assert(sf);
			sf->setName(sfName);
			sf->setPositive(!negSF);
			sf->computeMinAndMax();

			loadedCloud->setCurrentDisplayedScalarField(loadedCloud->getCurrentInScalarFieldIndex());
			loadedCloud->showSF(true);
		}

		container.addChild(loadedCloud);
	}

	return CC_FERR_NO_ERROR;
}