コード例 #1
0
ファイル: CameraModel.cpp プロジェクト: AndriiDSD/rtabmap
CameraModel::CameraModel(
		const std::string & name,
		double fx,
		double fy,
		double cx,
		double cy,
		const Transform & localTransform,
		double Tx,
		const cv::Size & imageSize) :
		name_(name),
		imageSize_(imageSize),
		K_(cv::Mat::eye(3, 3, CV_64FC1)),
		localTransform_(localTransform)
{
	UASSERT_MSG(fx > 0.0, uFormat("fx=%f", fx).c_str());
	UASSERT_MSG(fy > 0.0, uFormat("fy=%f", fy).c_str());
	UASSERT_MSG(cx >= 0.0, uFormat("cx=%f", cx).c_str());
	UASSERT_MSG(cy >= 0.0, uFormat("cy=%f", cy).c_str());
	UASSERT(!localTransform.isNull());
	if(Tx != 0.0)
	{
		P_ = cv::Mat::eye(3, 4, CV_64FC1),
		P_.at<double>(0,0) = fx;
		P_.at<double>(1,1) = fy;
		P_.at<double>(0,2) = cx;
		P_.at<double>(1,2) = cy;
		P_.at<double>(0,3) = Tx;
	}

	K_.at<double>(0,0) = fx;
	K_.at<double>(1,1) = fy;
	K_.at<double>(0,2) = cx;
	K_.at<double>(1,2) = cy;
}
コード例 #2
0
CameraModel::CameraModel(
		const std::string & name,
		double fx,
		double fy,
		double cx,
		double cy,
		const Transform & localTransform,
		double Tx) :
		name_(name),
		K_(cv::Mat::eye(3, 3, CV_64FC1)),
		D_(cv::Mat::zeros(1, 5, CV_64FC1)),
		R_(cv::Mat::eye(3, 3, CV_64FC1)),
		P_(cv::Mat::eye(3, 4, CV_64FC1)),
		localTransform_(localTransform)
{
	UASSERT_MSG(fx >= 0.0, uFormat("fx=%f", fx).c_str());
	UASSERT_MSG(fy >= 0.0, uFormat("fy=%f", fy).c_str());
	UASSERT_MSG(cx >= 0.0, uFormat("cx=%f", cx).c_str());
	UASSERT_MSG(cy >= 0.0, uFormat("cy=%f", cy).c_str());
	P_.at<double>(0,0) = fx;
	P_.at<double>(1,1) = fy;
	P_.at<double>(0,2) = cx;
	P_.at<double>(1,2) = cy;
	P_.at<double>(0,3) = Tx;

	K_.at<double>(0,0) = fx;
	K_.at<double>(1,1) = fy;
	K_.at<double>(0,2) = cx;
	K_.at<double>(1,2) = cy;
}
コード例 #3
0
ファイル: Signature.cpp プロジェクト: konanrobot/Rtabmap_IMU
void Signature::addLink(const Link & link)
{
	UDEBUG("Add link %d to %d (type=%d)", link.to(), this->id(), (int)link.type());
	UASSERT_MSG(link.from() == this->id(), uFormat("%d->%d for signature %d (type=%d)", link.from(), link.to(), this->id(), link.type()).c_str());
	UASSERT_MSG(link.to() != this->id(), uFormat("%d->%d for signature %d (type=%d)", link.from(), link.to(), this->id(), link.type()).c_str());
	std::pair<std::map<int, Link>::iterator, bool> pair = _links.insert(std::make_pair(link.to(), link));
	UASSERT_MSG(pair.second, uFormat("Link %d (type=%d) already added to signature %d!", link.to(), link.type(), this->id()).c_str());
	_linksModified = true;
}
コード例 #4
0
// generate color stereo input
void generateColorStereoInput(TriclopsContext const &context,
	FlyCapture2::Image const &grabbedImage,
	ImageContainer &imageCont,
	TriclopsColorStereoPair &stereoPair)
{
	Fc2Triclops::ErrorType fc2TriclopsError;
	TriclopsError te;

	TriclopsColorImage triclopsImageContainer[2];
	FlyCapture2::Image *tmpImage = imageCont.tmp;
	FlyCapture2::Image *unprocessedImage = imageCont.unprocessed;

	// Convert the pixel interleaved raw data to de-interleaved and color processed data
	fc2TriclopsError = Fc2Triclops::unpackUnprocessedRawOrMono16Image(
		grabbedImage,
		true /*assume little endian*/,
		tmpImage[0],
		tmpImage[1]);

	UASSERT_MSG(fc2TriclopsError == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)fc2TriclopsError).c_str());

	// preprocess color image
	for (int i = 0; i < 2; ++i) {
		FlyCapture2::Error fc2Error;
		fc2Error = tmpImage[i].SetColorProcessing(FlyCapture2::HQ_LINEAR);
		UASSERT_MSG(fc2Error == FlyCapture2::PGRERROR_OK, fc2Error.GetDescription());

		// convert preprocessed color image to BGRU format
		fc2Error = tmpImage[i].Convert(FlyCapture2::PIXEL_FORMAT_BGRU,
			&unprocessedImage[i]);
		UASSERT_MSG(fc2Error == FlyCapture2::PGRERROR_OK, fc2Error.GetDescription());
	}

	// create triclops image for right and left lens
	for (size_t i = 0; i < 2; ++i) {
		TriclopsColorImage *image = &triclopsImageContainer[i];
		te = triclopsLoadColorImageFromBuffer(
			reinterpret_cast<TriclopsColorPixel *>(unprocessedImage[i].GetData()),
			unprocessedImage[i].GetRows(),
			unprocessedImage[i].GetCols(),
			unprocessedImage[i].GetStride(),
			image);
		UASSERT_MSG(te == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)te).c_str());
	}

	// create stereo input from the triclops images constructed above
	// pack image data into a TriclopsColorStereoPair structure
	te = triclopsBuildColorStereoPairFromBuffers(
		context,
		&triclopsImageContainer[1],
		&triclopsImageContainer[0],
		&stereoPair);
	UASSERT_MSG(te == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)te).c_str());
}
コード例 #5
0
ファイル: UAudioCaptureMic.cpp プロジェクト: matlabbe/utilite
void UAudioCaptureMic::mainLoopEnd()
{
    UDEBUG("");
    FMOD_RESULT result;
    FMOD_BOOL isRecording;
    result = UAudioSystem::isRecording(_driver, &isRecording); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));

    if(isRecording)
    {
        result = UAudioSystem::recordStop(_driver); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
    }
    UAudioCapture::mainLoopEnd();
}
コード例 #6
0
QStringList ParametersToolBox::resetPage(int index)
{
	QStringList paramChanged;
	const QObjectList & children = stackedWidget_->widget(index)->children().first()->children().first()->children();
	for(int j=0; j<children.size();++j)
	{
		QString key = children.at(j)->objectName();
		// ignore working memory
		QString group = key.split("/").first();
		if(parameters_.find(key.toStdString())!=parameters_.end())
		{
			UASSERT_MSG(parameters_.find(key.toStdString()) != parameters_.end(), uFormat("key=%s", key.toStdString().c_str()).c_str());
			std::string value = Parameters::getDefaultParameters().at(key.toStdString());
			parameters_.at(key.toStdString()) = value;

			if(qobject_cast<QComboBox*>(children.at(j)))
			{
				if(((QComboBox*)children.at(j))->currentIndex() != QString::fromStdString(value).split(':').first().toInt())
				{
					((QComboBox*)children.at(j))->setCurrentIndex(QString::fromStdString(value).split(':').first().toInt());
					paramChanged.append(key);
				}
			}
			else if(qobject_cast<QSpinBox*>(children.at(j)))
			{
				if(((QSpinBox*)children.at(j))->value() != uStr2Int(value))
				{
					((QSpinBox*)children.at(j))->setValue(uStr2Int(value));
					paramChanged.append(key);
				}
			}
			else if(qobject_cast<QDoubleSpinBox*>(children.at(j)))
			{
				if(((QDoubleSpinBox*)children.at(j))->value() != uStr2Double(value))
				{
					((QDoubleSpinBox*)children.at(j))->setValue(uStr2Double(value));
					paramChanged.append(key);
				}
			}
			else if(qobject_cast<QCheckBox*>(children.at(j)))
			{
				if(((QCheckBox*)children.at(j))->isChecked() != uStr2Bool(value))
				{
					((QCheckBox*)children.at(j))->setChecked(uStr2Bool(value));
					paramChanged.append(key);
				}
			}
			else if(qobject_cast<QLineEdit*>(children.at(j)))
			{
				if(((QLineEdit*)children.at(j))->text().compare(QString::fromStdString(value)) != 0)
				{
					((QLineEdit*)children.at(j))->setText(QString::fromStdString(value));
					paramChanged.append(key);
				}
			}
		}
	}
	return paramChanged;
}
コード例 #7
0
ファイル: UAudioCaptureMic.cpp プロジェクト: matlabbe/utilite
void UAudioCaptureMic::mainLoopBegin()
{
	_lastRecordPos = 0;
	if(_sound)
	{
		FMOD_RESULT result;
		result = UAudioSystem::recordStart(_driver, _sound, true); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
	}
}
コード例 #8
0
ファイル: UAudioCaptureMic.cpp プロジェクト: matlabbe/utilite
std::vector<std::string> UAudioCaptureMic::getRecordDrivers()
{
    /*
        Enumerate record devices
    */
    FMOD_RESULT result;
    int numdrivers = 0;
    std::vector<std::string> listDrivers;

    result = UAudioSystem::getRecordNumDrivers(&numdrivers); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));

    for (int count=0; count < numdrivers; count++)
    {
        char name[256] = {0};

        result = UAudioSystem::getRecordDriverInfo(count, name, 256, 0); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));

        listDrivers.push_back(name);
    }
    return listDrivers;
}
コード例 #9
0
ファイル: Link.cpp プロジェクト: dyouakim/rtabmap
void Link::setInfMatrix(const cv::Mat & infMatrix) {
	UASSERT(infMatrix.cols == 6 && infMatrix.rows == 6 && infMatrix.type() == CV_64FC1);
	UASSERT_MSG(uIsFinite(infMatrix.at<double>(0,0)) && infMatrix.at<double>(0,0)>0, "Transitional information should not be null! (set to 1 if unknown)");
	UASSERT_MSG(uIsFinite(infMatrix.at<double>(1,1)) && infMatrix.at<double>(1,1)>0, "Transitional information should not be null! (set to 1 if unknown)");
	UASSERT_MSG(uIsFinite(infMatrix.at<double>(2,2)) && infMatrix.at<double>(2,2)>0, "Transitional information should not be null! (set to 1 if unknown)");
	UASSERT_MSG(uIsFinite(infMatrix.at<double>(3,3)) && infMatrix.at<double>(3,3)>0, "Rotational information should not be null! (set to 1 if unknown)");
	UASSERT_MSG(uIsFinite(infMatrix.at<double>(4,4)) && infMatrix.at<double>(4,4)>0, "Rotational information should not be null! (set to 1 if unknown)");
	UASSERT_MSG(uIsFinite(infMatrix.at<double>(5,5)) && infMatrix.at<double>(5,5)>0, "Rotational information should not be null! (set to 1 if unknown)");
	infMatrix_ = infMatrix;
}
コード例 #10
0
void ParametersToolBox::updateParameter(const std::string & key, const std::string & value)
{
	QString group = QString::fromStdString(key).split("/").first();
	if(!ignoredGroups_.contains(group))
	{
		UASSERT_MSG(parameters_.find(key) != parameters_.end(), uFormat("key=\"%s\"", key.c_str()).c_str());
		parameters_.at(key) = value;
		QWidget * widget = this->findChild<QWidget*>(key.c_str());
		QString type = QString::fromStdString(Parameters::getType(key));
		if(type.compare("string") == 0)
		{
			QString valueQt = QString::fromStdString(value);
			if(valueQt.contains(';'))
			{
				// It's a list, just change the index
				QStringList splitted = valueQt.split(':');
				((QComboBox*)widget)->setCurrentIndex(splitted.first().toInt());
			}
			else
			{
				((QLineEdit*)widget)->setText(valueQt);
			}
		}
		else if(type.compare("int") == 0)
		{
			((QSpinBox*)widget)->setValue(uStr2Int(value));
		}
		else if(type.compare("uint") == 0)
		{
			((QSpinBox*)widget)->setValue(uStr2Int(value));
		}
		else if(type.compare("double") == 0)
		{
			((QDoubleSpinBox*)widget)->setValue(uStr2Double(value));
		}
		else if(type.compare("float") == 0)
		{
			((QDoubleSpinBox*)widget)->setValue(uStr2Float(value));
		}
		else if(type.compare("bool") == 0)
		{
			((QCheckBox*)widget)->setChecked(uStr2Bool(value));
		}
	}
}
コード例 #11
0
ファイル: UAudioCaptureMic.cpp プロジェクト: matlabbe/utilite
void UAudioCaptureMic::close()
{
	if(_sound)
	{
		if(_fp)
		{
			// Write back the wav header now that we know its length.
			int channels, bits;
			float rate;
			FMOD_Sound_GetFormat(_sound, 0, 0, &channels, &bits);
			FMOD_Sound_GetDefaults(_sound, &rate, 0, 0, 0);
			UWav::writeWavHeader(_fp, _dataLength, rate, channels, bits);
			fclose(_fp);
			_fp = 0;

			if(_encodeToMp3)
			{
#ifdef BUILT_WITH_LAME
				// Encode to mp3
				UMp3Encoder mp3Encoder;

				// Rename the wav file
				std::string tempFileName = _fileName;
				tempFileName.append("TMP");
				UFile::rename(_fileName, tempFileName);

				if(mp3Encoder.encode(tempFileName, _fileName) == 0)
				{
					//Erase the wav file
					UFile::erase(tempFileName);
				}
#endif
			}
		}
		FMOD_RESULT result;
		result = FMOD_Sound_Release(_sound); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
		_sound = 0;
	}
	if(_fp)
	{
		fclose(_fp);
		_fp = 0;
	}
}
コード例 #12
0
ファイル: DBDriver.cpp プロジェクト: FNicolai/rtabmap
void DBDriver::loadNodeData(std::list<Signature *> & signatures, bool loadMetricData) const
{
	// Don't look in the trash, we assume that if we want to load
	// data of a signature, it is not in thrash! Print an error if so.
	_trashesMutex.lock();
	if(_trashSignatures.size())
	{
		for(std::list<Signature *>::iterator iter=signatures.begin(); iter!=signatures.end(); ++iter)
		{
			UASSERT(*iter != 0);
			UASSERT_MSG(!uContains(_trashSignatures, (*iter)->id()), uFormat("Signature %d should not be used when transferred to trash!!!!", (*iter)->id()).c_str());
		}
	}
	_trashesMutex.unlock();

	_dbSafeAccessMutex.lock();
	this->loadNodeDataQuery(signatures, loadMetricData);
	_dbSafeAccessMutex.unlock();
}
コード例 #13
0
ファイル: SensorData.cpp プロジェクト: Huangyan9188/rtabmap
	// Metric constructor + 2d depth
SensorData::SensorData(const cv::Mat & laserScan,
		  int laserScanMaxPts,
		  const cv::Mat & image,
		  const cv::Mat & depthOrRightImage,
		  float fx,
		  float fyOrBaseline,
		  float cx,
		  float cy,
		  const Transform & localTransform,
		  const Transform & pose,
		  float poseRotVariance,
		  float poseTransVariance,
		  int id,
		  double stamp,
		  const std::vector<unsigned char> & userData) :
	_image(image),
	_id(id),
	_stamp(stamp),
	_depthOrRightImage(depthOrRightImage),
	_laserScan(laserScan),
	_fx(fx),
	_fyOrBaseline(fyOrBaseline),
	_cx(cx),
	_cy(cy),
	_pose(pose),
	_localTransform(localTransform),
	_poseRotVariance(poseRotVariance),
	_poseTransVariance(poseTransVariance),
	_laserScanMaxPts(laserScanMaxPts),
	_userData(userData)
{
	UASSERT(_laserScan.empty() || _laserScan.type() == CV_32FC2);
	UASSERT(image.empty() ||
			image.type() == CV_8UC1 || // Mono
			image.type() == CV_8UC3);  // RGB
	UASSERT(depthOrRightImage.empty() ||
			depthOrRightImage.type() == CV_32FC1 || // Depth in meter
			depthOrRightImage.type() == CV_16UC1 || // Depth in millimetre
			depthOrRightImage.type() == CV_8U);     // Right stereo image
	UASSERT(!_localTransform.isNull());
	UASSERT_MSG(uIsFinite(_poseRotVariance) && _poseRotVariance>0 && uIsFinite(_poseTransVariance) && _poseTransVariance>0, "Rotational and transitional variances should not be null! (set to 1 if unknown)");
}
コード例 #14
0
cv::Mat uncompressData(const unsigned char * bytes, unsigned long size)
{
	cv::Mat data;
	if(bytes && size>=3*sizeof(int))
	{
		//last 3 int elements are matrix size and type
		int height = *((int*)&bytes[size-3*sizeof(int)]);
		int width = *((int*)&bytes[size-2*sizeof(int)]);
		int type = *((int*)&bytes[size-1*sizeof(int)]);

		// If the size is higher, it may be a wrong data format.
		UASSERT_MSG(height>=0 && height<10000 &&
				    width>=0 && width<10000,
				    uFormat("size=%d, height=%d width=%d type=%d", size, height, width, type).c_str());

		data = cv::Mat(height, width, type);
		uLongf totalUncompressed = uLongf(data.total())*uLongf(data.elemSize());

		int errCode = uncompress(
						(Bytef*)data.data,
						&totalUncompressed,
						(const Bytef*)bytes,
						uLong(size));

		if(errCode == Z_MEM_ERROR)
		{
			UERROR("Z_MEM_ERROR : Insufficient memory.");
		}
		else if(errCode == Z_BUF_ERROR)
		{
			UERROR("Z_BUF_ERROR : The buffer dest was not large enough to hold the uncompressed data.");
		}
		else if(errCode == Z_DATA_ERROR)
		{
			UERROR("Z_DATA_ERROR : The compressed data (referenced by source) was corrupted.");
		}
	}
	return data;
}
コード例 #15
0
ファイル: CameraThread.cpp プロジェクト: qinhuaping/rtabmap
void CameraThread::postUpdate(SensorData * dataPtr, CameraInfo * info) const
{
	UASSERT(dataPtr!=0);
	SensorData & data = *dataPtr;
	if(_colorOnly && !data.depthRaw().empty())
	{
		data.setDepthOrRightRaw(cv::Mat());
	}

	if(_distortionModel && !data.depthRaw().empty())
	{
		UTimer timer;
		if(_distortionModel->getWidth() == data.depthRaw().cols &&
		   _distortionModel->getHeight() == data.depthRaw().rows	)
		{
			cv::Mat depth = data.depthRaw().clone();// make sure we are not modifying data in cached signatures.
			_distortionModel->undistort(depth);
			data.setDepthOrRightRaw(depth);
		}
		else
		{
			UERROR("Distortion model size is %dx%d but dpeth image is %dx%d!",
					_distortionModel->getWidth(), _distortionModel->getHeight(),
					data.depthRaw().cols, data.depthRaw().rows);
		}
		if(info) info->timeUndistortDepth = timer.ticks();
	}

	if(_bilateralFiltering && !data.depthRaw().empty())
	{
		UTimer timer;
		data.setDepthOrRightRaw(util2d::fastBilateralFiltering(data.depthRaw(), _bilateralSigmaS, _bilateralSigmaR));
		if(info) info->timeBilateralFiltering = timer.ticks();
	}

	if(_imageDecimation>1 && !data.imageRaw().empty())
	{
		UDEBUG("");
		UTimer timer;
		if(!data.depthRaw().empty() &&
		   !(data.depthRaw().rows % _imageDecimation == 0 && data.depthRaw().cols % _imageDecimation == 0))
		{
			UERROR("Decimation of depth images should be exact (decimation=%d, size=(%d,%d))! "
				   "Images won't be resized.", _imageDecimation, data.depthRaw().cols, data.depthRaw().rows);
		}
		else
		{
			data.setImageRaw(util2d::decimate(data.imageRaw(), _imageDecimation));
			data.setDepthOrRightRaw(util2d::decimate(data.depthOrRightRaw(), _imageDecimation));
			std::vector<CameraModel> models = data.cameraModels();
			for(unsigned int i=0; i<models.size(); ++i)
			{
				if(models[i].isValidForProjection())
				{
					models[i] = models[i].scaled(1.0/double(_imageDecimation));
				}
			}
			data.setCameraModels(models);
			StereoCameraModel stereoModel = data.stereoCameraModel();
			if(stereoModel.isValidForProjection())
			{
				stereoModel.scale(1.0/double(_imageDecimation));
				data.setStereoCameraModel(stereoModel);
			}
		}
		if(info) info->timeImageDecimation = timer.ticks();
	}
	if(_mirroring && !data.imageRaw().empty() && data.cameraModels().size() == 1)
	{
		UDEBUG("");
		UTimer timer;
		cv::Mat tmpRgb;
		cv::flip(data.imageRaw(), tmpRgb, 1);
		data.setImageRaw(tmpRgb);
		UASSERT_MSG(data.cameraModels().size() <= 1 && !data.stereoCameraModel().isValidForProjection(), "Only single RGBD cameras are supported for mirroring.");
		if(data.cameraModels().size() && data.cameraModels()[0].cx())
		{
			CameraModel tmpModel(
					data.cameraModels()[0].fx(),
					data.cameraModels()[0].fy(),
					float(data.imageRaw().cols) - data.cameraModels()[0].cx(),
					data.cameraModels()[0].cy(),
					data.cameraModels()[0].localTransform());
			data.setCameraModel(tmpModel);
		}
		if(!data.depthRaw().empty())
		{
			cv::Mat tmpDepth;
			cv::flip(data.depthRaw(), tmpDepth, 1);
			data.setDepthOrRightRaw(tmpDepth);
		}
		if(info) info->timeMirroring = timer.ticks();
	}
	if(_stereoToDepth && !data.imageRaw().empty() && data.stereoCameraModel().isValidForProjection() && !data.rightRaw().empty())
	{
		UDEBUG("");
		UTimer timer;
		cv::Mat depth = util2d::depthFromDisparity(
				_stereoDense->computeDisparity(data.imageRaw(), data.rightRaw()),
				data.stereoCameraModel().left().fx(),
				data.stereoCameraModel().baseline());
		// set Tx for stereo bundle adjustment (when used)
		CameraModel model = CameraModel(
				data.stereoCameraModel().left().fx(),
				data.stereoCameraModel().left().fy(),
				data.stereoCameraModel().left().cx(),
				data.stereoCameraModel().left().cy(),
				data.stereoCameraModel().localTransform(),
				-data.stereoCameraModel().baseline()*data.stereoCameraModel().left().fx());
		data.setCameraModel(model);
		data.setDepthOrRightRaw(depth);
		data.setStereoCameraModel(StereoCameraModel());
		if(info) info->timeDisparity = timer.ticks();
	}
	if(_scanFromDepth &&
		data.cameraModels().size() &&
		data.cameraModels().at(0).isValidForProjection() &&
		!data.depthRaw().empty())
	{
		UDEBUG("");
		if(data.laserScanRaw().empty())
		{
			UASSERT(_scanDecimation >= 1);
			UTimer timer;
			pcl::IndicesPtr validIndices(new std::vector<int>);
			pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = util3d::cloudFromSensorData(
					data,
					_scanDecimation,
					_scanMaxDepth,
					_scanMinDepth,
					validIndices.get());
			float maxPoints = (data.depthRaw().rows/_scanDecimation)*(data.depthRaw().cols/_scanDecimation);
			cv::Mat scan;
			const Transform & baseToScan = data.cameraModels()[0].localTransform();
			if(validIndices->size())
			{
				if(_scanVoxelSize>0.0f)
				{
					cloud = util3d::voxelize(cloud, validIndices, _scanVoxelSize);
					float ratio = float(cloud->size()) / float(validIndices->size());
					maxPoints = ratio * maxPoints;
				}
				else if(!cloud->is_dense)
				{
					pcl::PointCloud<pcl::PointXYZ>::Ptr denseCloud(new pcl::PointCloud<pcl::PointXYZ>);
					pcl::copyPointCloud(*cloud, *validIndices, *denseCloud);
					cloud = denseCloud;
				}

				if(cloud->size())
				{
					if(_scanNormalsK>0)
					{
						Eigen::Vector3f viewPoint(baseToScan.x(), baseToScan.y(), baseToScan.z());
						pcl::PointCloud<pcl::Normal>::Ptr normals = util3d::computeNormals(cloud, _scanNormalsK, viewPoint);
						pcl::PointCloud<pcl::PointNormal>::Ptr cloudNormals(new pcl::PointCloud<pcl::PointNormal>);
						pcl::concatenateFields(*cloud, *normals, *cloudNormals);
						scan = util3d::laserScanFromPointCloud(*cloudNormals, baseToScan.inverse());
					}
					else
					{
						scan = util3d::laserScanFromPointCloud(*cloud, baseToScan.inverse());
					}
				}
			}
			data.setLaserScanRaw(scan, LaserScanInfo((int)maxPoints, _scanMaxDepth, baseToScan));
			if(info) info->timeScanFromDepth = timer.ticks();
		}
		else
		{
			UWARN("Option to create laser scan from depth image is enabled, but "
				  "there is already a laser scan in the captured sensor data. Scan from "
				  "depth will not be created.");
		}
	}
}
コード例 #16
0
SensorData CameraStereoFlyCapture2::captureImage(CameraInfo * info)
{
	SensorData data;
#ifdef RTABMAP_FLYCAPTURE2
	if(camera_ && triclopsCtx_ && camera_->IsConnected())
	{
		// grab image from camera.
		// this image contains both right and left imagesCount
		FlyCapture2::Image grabbedImage;
		if(camera_->RetrieveBuffer(&grabbedImage) == FlyCapture2::PGRERROR_OK)
		{
			// right and left image extracted from grabbed image
			ImageContainer imageCont;

			TriclopsColorStereoPair colorStereoInput;
			generateColorStereoInput(triclopsCtx_, grabbedImage, imageCont, colorStereoInput);

			// rectify images
			TriclopsError triclops_status = triclopsColorRectify(triclopsCtx_, &colorStereoInput);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());

			// get images
			cv::Mat left,right;
			TriclopsColorImage color_image;
			triclops_status = triclopsGetColorImage(triclopsCtx_, TriImg_RECTIFIED_COLOR, TriCam_LEFT, &color_image);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());
			cv::cvtColor(cv::Mat(color_image.nrows, color_image.ncols, CV_8UC4, color_image.data), left, CV_RGBA2RGB);
			triclops_status = triclopsGetColorImage(triclopsCtx_, TriImg_RECTIFIED_COLOR, TriCam_RIGHT, &color_image);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());
			cv::cvtColor(cv::Mat(color_image.nrows, color_image.ncols, CV_8UC4, color_image.data), right, CV_RGBA2GRAY);

			// Set calibration stuff
			float fx, cy, cx, baseline;
			triclopsGetFocalLength(triclopsCtx_, &fx);
			triclopsGetImageCenter(triclopsCtx_, &cy, &cx);
			triclopsGetBaseline(triclopsCtx_, &baseline);
			cx *= left.cols;
			cy *= left.rows;

			StereoCameraModel model(
				fx,
				fx,
				cx,
				cy,
				baseline,
				this->getLocalTransform(),
				left.size());
			data = SensorData(left, right, model, this->getNextSeqID(), UTimer::now());

			// Compute disparity
			/*triclops_status = triclopsStereo(triclopsCtx_);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());

			TriclopsImage16 disparity_image;
			triclops_status = triclopsGetImage16(triclopsCtx_, TriImg16_DISPARITY, TriCam_REFERENCE, &disparity_image);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());
			cv::Mat depth(disparity_image.nrows, disparity_image.ncols, CV_32FC1);
			int pixelinc = disparity_image.rowinc / 2;
			float x, y;
			for (int i = 0, k = 0; i < disparity_image.nrows; i++) {
				unsigned short *row = disparity_image.data + i * pixelinc;
				float *rowOut = (float *)depth.row(i).data;
				for (int j = 0; j < disparity_image.ncols; j++, k++) {
					unsigned short disparity = row[j];

					// do not save invalid points
					if (disparity < 0xFFF0) {
						// convert the 16 bit disparity value to floating point x,y,z
						triclopsRCD16ToXYZ(triclopsCtx_, i, j, disparity, &x, &y, &rowOut[j]);
					}
				}
			}
			CameraModel model(fx, fx, cx, cy, this->getLocalTransform(), 0, left.size());
			data = SensorData(left, depth, model, this->getNextSeqID(), UTimer::now());
			*/
		}
	}

#else
	UERROR("CameraStereoFlyCapture2: RTAB-Map is not built with Triclops support!");
#endif
	return data;
}
コード例 #17
0
ファイル: Odometry.cpp プロジェクト: hylrh2008/rtabmap
Transform Odometry::process(SensorData & data, OdometryInfo * info)
{
	if(_pose.isNull())
	{
		_pose.setIdentity(); // initialized
	}

	UASSERT(!data.imageRaw().empty());

	if(!data.stereoCameraModel().isValidForProjection() &&
	   (data.cameraModels().size() == 0 || !data.cameraModels()[0].isValidForProjection()))
	{
		UERROR("Rectified images required! Calibrate your camera.");
		return Transform();
	}

	double dt = previousStamp_>0.0f?data.stamp() - previousStamp_:0.0;
	Transform guess = dt && guessFromMotion_?Transform::getIdentity():Transform();
	UASSERT_MSG(dt>0.0 || (dt == 0.0 && previousVelocityTransform_.isNull()), uFormat("dt=%f previous transform=%s", dt, previousVelocityTransform_.prettyPrint().c_str()).c_str());
	if(!previousVelocityTransform_.isNull())
	{
		if(guessFromMotion_)
		{
			if(_filteringStrategy == 1)
			{
				// use Kalman predict transform
				float vx,vy,vz, vroll,vpitch,vyaw;
				predictKalmanFilter(dt, &vx,&vy,&vz,&vroll,&vpitch,&vyaw);
				guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
			else
			{
				float vx,vy,vz, vroll,vpitch,vyaw;
				previousVelocityTransform_.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);
				guess = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
		}
		else if(_filteringStrategy == 1)
		{
			predictKalmanFilter(dt);
		}
	}

	UTimer time;
	Transform t;
	if(_imageDecimation > 1)
	{
		// Decimation of images with calibrations
		SensorData decimatedData = data;
		decimatedData.setImageRaw(util2d::decimate(decimatedData.imageRaw(), _imageDecimation));
		decimatedData.setDepthOrRightRaw(util2d::decimate(decimatedData.depthOrRightRaw(), _imageDecimation));
		std::vector<CameraModel> cameraModels = decimatedData.cameraModels();
		for(unsigned int i=0; i<cameraModels.size(); ++i)
		{
			cameraModels[i] = cameraModels[i].scaled(1.0/double(_imageDecimation));
		}
		decimatedData.setCameraModels(cameraModels);
		StereoCameraModel stereoModel = decimatedData.stereoCameraModel();
		if(stereoModel.isValidForProjection())
		{
			stereoModel.scale(1.0/double(_imageDecimation));
		}
		decimatedData.setStereoCameraModel(stereoModel);

		// compute transform
		t = this->computeTransform(decimatedData, guess, info);

		// transform back the keypoints in the original image
		std::vector<cv::KeyPoint> kpts = decimatedData.keypoints();
		double log2value = log(double(_imageDecimation))/log(2.0);
		for(unsigned int i=0; i<kpts.size(); ++i)
		{
			kpts[i].pt.x *= _imageDecimation;
			kpts[i].pt.y *= _imageDecimation;
			kpts[i].size *= _imageDecimation;
			kpts[i].octave += log2value;
		}
		data.setFeatures(kpts, decimatedData.descriptors());

		if(info)
		{
			UASSERT(info->newCorners.size() == info->refCorners.size());
			for(unsigned int i=0; i<info->newCorners.size(); ++i)
			{
				info->refCorners[i].x *= _imageDecimation;
				info->refCorners[i].y *= _imageDecimation;
				info->newCorners[i].x *= _imageDecimation;
				info->newCorners[i].y *= _imageDecimation;
			}
			for(std::multimap<int, cv::KeyPoint>::iterator iter=info->words.begin(); iter!=info->words.end(); ++iter)
			{
				iter->second.pt.x *= _imageDecimation;
				iter->second.pt.y *= _imageDecimation;
				iter->second.size *= _imageDecimation;
				iter->second.octave += log2value;
			}
		}
	}
	else
	{
		t = this->computeTransform(data, guess, info);
	}

	if(info)
	{
		info->timeEstimation = time.ticks();
		info->lost = t.isNull();
		info->stamp = data.stamp();
		info->interval = dt;
		info->transform = t;

		if(!data.groundTruth().isNull())
		{
			if(!previousGroundTruthPose_.isNull())
			{
				info->transformGroundTruth = previousGroundTruthPose_.inverse() * data.groundTruth();
			}
			previousGroundTruthPose_ = data.groundTruth();
		}
	}

	if(!t.isNull())
	{
		_resetCurrentCount = _resetCountdown;

		float vx,vy,vz, vroll,vpitch,vyaw;
		t.getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);

		// transform to velocity
		if(dt)
		{
			vx /= dt;
			vy /= dt;
			vz /= dt;
			vroll /= dt;
			vpitch /= dt;
			vyaw /= dt;
		}

		if(_force3DoF || !_holonomic || particleFilters_.size() || _filteringStrategy==1)
		{
			if(_filteringStrategy == 1)
			{
				if(previousVelocityTransform_.isNull())
				{
					// reset Kalman
					if(dt)
					{
						initKalmanFilter(t, vx,vy,vz,vroll,vpitch,vyaw);
					}
					else
					{
						initKalmanFilter(t);
					}
				}
				else
				{
					// Kalman filtering
					updateKalmanFilter(vx,vy,vz,vroll,vpitch,vyaw);
				}
			}
			else if(particleFilters_.size())
			{
				// Particle filtering
				UASSERT(particleFilters_.size()==6);
				if(previousVelocityTransform_.isNull())
				{
					particleFilters_[0]->init(vx);
					particleFilters_[1]->init(vy);
					particleFilters_[2]->init(vz);
					particleFilters_[3]->init(vroll);
					particleFilters_[4]->init(vpitch);
					particleFilters_[5]->init(vyaw);
				}
				else
				{
					vx = particleFilters_[0]->filter(vx);
					vy = particleFilters_[1]->filter(vy);
					vyaw = particleFilters_[5]->filter(vyaw);

					if(!_holonomic)
					{
						// arc trajectory around ICR
						float tmpY = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f;
						if(fabs(tmpY) < fabs(vy) || (tmpY<=0 && vy >=0) || (tmpY>=0 && vy<=0))
						{
							vy = tmpY;
						}
						else
						{
							vyaw = (atan(vx/vy)*2.0f-CV_PI)*-1;
						}
					}

					if(!_force3DoF)
					{
						vz = particleFilters_[2]->filter(vz);
						vroll = particleFilters_[3]->filter(vroll);
						vpitch = particleFilters_[4]->filter(vpitch);
					}
				}

				if(info)
				{
					info->timeParticleFiltering = time.ticks();
				}

				if(_force3DoF)
				{
					vz = 0.0f;
					vroll = 0.0f;
					vpitch = 0.0f;
				}
			}
			else if(!_holonomic)
			{
				// arc trajectory around ICR
				vy = vyaw!=0.0f ? vx / tan((CV_PI-vyaw)/2.0f) : 0.0f;
				if(_force3DoF)
				{
					vz = 0.0f;
					vroll = 0.0f;
					vpitch = 0.0f;
				}
			}

			if(dt)
			{
				t = Transform(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
			}
			else
			{
				t = Transform(vx, vy, vz, vroll, vpitch, vyaw);
			}

			if(info)
			{
				info->transformFiltered = t;
			}
		}

		previousStamp_ = data.stamp();
		previousVelocityTransform_.setNull();
		if(dt)
		{
			previousVelocityTransform_ = Transform(vx, vy, vz, vroll, vpitch, vyaw);
		}

		if(info)
		{
			distanceTravelled_ += t.getNorm();
			info->distanceTravelled = distanceTravelled_;
		}

		return _pose *= t; // updated
	}
	else if(_resetCurrentCount > 0)
	{
		UWARN("Odometry lost! Odometry will be reset after next %d consecutive unsuccessful odometry updates...", _resetCurrentCount);

		--_resetCurrentCount;
		if(_resetCurrentCount == 0)
		{
			UWARN("Odometry automatically reset to latest pose!");
			this->reset(_pose);
		}
	}

	previousVelocityTransform_.setNull();
	previousStamp_ = 0;

	return Transform();
}
コード例 #18
0
ファイル: CameraThread.cpp プロジェクト: AndriiDSD/rtabmap
void CameraThread::mainLoop()
{
	UDEBUG("");
	CameraInfo info;
	SensorData data = _camera->takeImage(&info);

	if(!data.imageRaw().empty())
	{
		if(_colorOnly && !data.depthRaw().empty())
		{
			data.setDepthOrRightRaw(cv::Mat());
		}
		if(_imageDecimation>1 && !data.imageRaw().empty())
		{
			UDEBUG("");
			UTimer timer;
			if(!data.depthRaw().empty() &&
			   !(data.depthRaw().rows % _imageDecimation == 0 && data.depthRaw().cols % _imageDecimation == 0))
			{
				UERROR("Decimation of depth images should be exact (decimation=%d, size=(%d,%d))! "
					   "Images won't be resized.", _imageDecimation, data.depthRaw().cols, data.depthRaw().rows);
			}
			else
			{
				data.setImageRaw(util2d::decimate(data.imageRaw(), _imageDecimation));
				data.setDepthOrRightRaw(util2d::decimate(data.depthOrRightRaw(), _imageDecimation));
				std::vector<CameraModel> models = data.cameraModels();
				for(unsigned int i=0; i<models.size(); ++i)
				{
					if(models[i].isValidForProjection())
					{
						models[i] = models[i].scaled(1.0/double(_imageDecimation));
					}
				}
				data.setCameraModels(models);
				StereoCameraModel stereoModel = data.stereoCameraModel();
				if(stereoModel.isValidForProjection())
				{
					stereoModel.scale(1.0/double(_imageDecimation));
					data.setStereoCameraModel(stereoModel);
				}
			}
			info.timeImageDecimation = timer.ticks();
		}
		if(_mirroring && data.cameraModels().size() == 1)
		{
			UDEBUG("");
			UTimer timer;
			cv::Mat tmpRgb;
			cv::flip(data.imageRaw(), tmpRgb, 1);
			data.setImageRaw(tmpRgb);
			UASSERT_MSG(data.cameraModels().size() <= 1 && !data.stereoCameraModel().isValidForProjection(), "Only single RGBD cameras are supported for mirroring.");
			if(data.cameraModels().size() && data.cameraModels()[0].cx())
			{
				CameraModel tmpModel(
						data.cameraModels()[0].fx(),
						data.cameraModels()[0].fy(),
						float(data.imageRaw().cols) - data.cameraModels()[0].cx(),
						data.cameraModels()[0].cy(),
						data.cameraModels()[0].localTransform());
				data.setCameraModel(tmpModel);
			}
			if(!data.depthRaw().empty())
			{
				cv::Mat tmpDepth;
				cv::flip(data.depthRaw(), tmpDepth, 1);
				data.setDepthOrRightRaw(tmpDepth);
			}
			info.timeMirroring = timer.ticks();
		}
		if(_stereoToDepth && data.stereoCameraModel().isValidForProjection() && !data.rightRaw().empty())
		{
			UDEBUG("");
			UTimer timer;
			cv::Mat depth = util2d::depthFromDisparity(
					_stereoDense->computeDisparity(data.imageRaw(), data.rightRaw()),
					data.stereoCameraModel().left().fx(),
					data.stereoCameraModel().baseline());
			data.setCameraModel(data.stereoCameraModel().left());
			data.setDepthOrRightRaw(depth);
			data.setStereoCameraModel(StereoCameraModel());
			info.timeDisparity = timer.ticks();
			UDEBUG("Computing disparity = %f s", info.timeDisparity);
		}
		if(_scanFromDepth &&
			data.cameraModels().size() &&
			data.cameraModels().at(0).isValidForProjection() &&
			!data.depthRaw().empty())
		{
			UDEBUG("");
			if(data.laserScanRaw().empty())
			{
				UASSERT(_scanDecimation >= 1);
				UTimer timer;
				pcl::IndicesPtr validIndices(new std::vector<int>);
				pcl::PointCloud<pcl::PointXYZ>::Ptr cloud = util3d::cloudFromSensorData(data, _scanDecimation, _scanMaxDepth, _scanMinDepth, validIndices.get());
				float maxPoints = (data.depthRaw().rows/_scanDecimation)*(data.depthRaw().cols/_scanDecimation);
				if(_scanVoxelSize>0.0f)
				{
					cloud = util3d::voxelize(cloud, validIndices, _scanVoxelSize);
					float ratio = float(cloud->size()) / float(validIndices->size());
					maxPoints = ratio * maxPoints;
				}
				else if(!cloud->is_dense)
				{
					pcl::PointCloud<pcl::PointXYZ>::Ptr denseCloud(new pcl::PointCloud<pcl::PointXYZ>);
					pcl::copyPointCloud(*cloud, *validIndices, *denseCloud);
					cloud = denseCloud;
				}

				cv::Mat scan;
				if(_scanNormalsK>0)
				{
					scan = util3d::laserScanFromPointCloud(*util3d::computeNormals(cloud, _scanNormalsK));
				}
				else
				{
					scan = util3d::laserScanFromPointCloud(*cloud);
				}
				data.setLaserScanRaw(scan, (int)maxPoints, _scanMaxDepth);
				info.timeScanFromDepth = timer.ticks();
				UDEBUG("Computing scan from depth = %f s", info.timeScanFromDepth);
			}
			else
			{
				UWARN("Option to create laser scan from depth image is enabled, but "
					  "there is already a laser scan in the captured sensor data. Scan from "
					  "depth will not be created.");
			}
		}

		info.cameraName = _camera->getSerial();
		this->post(new CameraEvent(data, info));
	}
	else if(!this->isKilled())
	{
		UWARN("no more images...");
		this->kill();
		this->post(new CameraEvent());
	}
}
コード例 #19
0
ファイル: BayesFilter.cpp プロジェクト: aijim/3d_icgm
cv::Mat BayesFilter::generatePrediction(const Memory * memory, const std::vector<int> & ids) const
{
	if(!_fullPredictionUpdate && !_prediction.empty())
	{
		return updatePrediction(_prediction, memory, uKeys(_posterior), ids);
	}
	UDEBUG("");

	UASSERT(memory &&
		   _predictionLC.size() >= 2 &&
		   ids.size());

	UTimer timer;
	timer.start();
	UTimer timerGlobal;
	timerGlobal.start();

	std::map<int, int> idToIndexMap;
	for(unsigned int i=0; i<ids.size(); ++i)
	{
		UASSERT_MSG(ids[i] != 0, "Signature id is null ?!?");
		idToIndexMap.insert(idToIndexMap.end(), std::make_pair(ids[i], i));
	}

	//int rows = prediction.rows;
	cv::Mat prediction = cv::Mat::zeros(ids.size(), ids.size(), CV_32FC1);
	int cols = prediction.cols;

	// Each prior is a column vector
	UDEBUG("_predictionLC.size()=%d",_predictionLC.size());
	std::set<int> idsDone;

	for(unsigned int i=0; i<ids.size(); ++i)
	{
		if(idsDone.find(ids[i]) == idsDone.end())
		{
			if(ids[i] > 0)
			{
				// Set high values (gaussians curves) to loop closure neighbors

				// ADD prob for each neighbors
				std::map<int, int> neighbors = memory->getNeighborsId(ids[i], _predictionLC.size()-1, 0);
				std::list<int> idsLoopMargin;
				//filter neighbors in STM
				for(std::map<int, int>::iterator iter=neighbors.begin(); iter!=neighbors.end();)
				{
					if(memory->isInSTM(iter->first))
					{
						neighbors.erase(iter++);
					}
					else
					{
						if(iter->second == 0)
						{
							idsLoopMargin.push_back(iter->second);
						}
						++iter;
					}
				}

				// should at least have 1 id in idsMarginLoop
				if(idsLoopMargin.size() == 0)
				{
					UFATAL("No 0 margin neighbor for signature %d !?!?", ids[i]);
				}

				// same neighbor tree for loop signatures (margin = 0)
				for(std::list<int>::iterator iter = idsLoopMargin.begin(); iter!=idsLoopMargin.end(); ++iter)
				{
					float sum = 0.0f; // sum values added
					sum += this->addNeighborProb(prediction, i, neighbors, idToIndexMap);
					idsDone.insert(*iter);
					this->normalize(prediction, i, sum, ids[0]<0);
				}
			}
			else
			{
				// Set the virtual place prior
				if(_virtualPlacePrior > 0)
				{
					if(cols>1) // The first must be the virtual place
					{
						((float*)prediction.data)[i] = _virtualPlacePrior;
						float val = (1.0-_virtualPlacePrior)/(cols-1);
						for(int j=1; j<cols; j++)
						{
							((float*)prediction.data)[i + j*cols] = val;
						}
					}
					else if(cols>0)
					{
						((float*)prediction.data)[i] = 1;
					}
				}
				else
				{
					// Only for some tests...
					// when _virtualPlacePrior=0, set all priors to the same value
					if(cols>1)
					{
						float val = 1.0/cols;
						for(int j=0; j<cols; j++)
						{
							((float*)prediction.data)[i + j*cols] = val;
						}
					}
					else if(cols>0)
					{
						((float*)prediction.data)[i] = 1;
					}
				}
			}
		}
	}

	ULOGGER_DEBUG("time = %fs", timerGlobal.ticks());

	return prediction;
}
コード例 #20
0
ファイル: UAudioCaptureMic.cpp プロジェクト: matlabbe/utilite
/*********************************
Fonction RecordFichier
Permet d'enregistrer un fichier WAV
*********************************/
void UAudioCaptureMic::mainLoop()
{
	if(!_sound)
	{
		UERROR("Recorder is not initialized.");
		this->kill();
		return;
	}
    FMOD_RESULT result;
    void *ptr1 = 0, *ptr2 = 0;
    int blockLength;
    unsigned int len1 = 0, len2 = 0;

    unsigned int recordPos = 0;

	result = UAudioSystem::getRecordPosition(_driver, &recordPos); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
	if (recordPos != _lastRecordPos)
	{
		blockLength = (int)recordPos - (int)_lastRecordPos;
		if (blockLength < 0)
		{
			blockLength += _soundLength;
		}

		// * exinfo.numchannels * 2 = stereo 16bit.  1 sample = 4 bytes.
		// Lock the sound to get access to the raw data.
		FMOD_Sound_Lock(_sound, _lastRecordPos * channels() * bytesPerSample(), blockLength * channels() * bytesPerSample(), &ptr1, &ptr2, &len1, &len2);
		{
			if (ptr1 && len1)    //    Write it to disk.
			{
				if(_fp)
				{
					//write to file
					_dataLength += fwrite(ptr1, 1, len1, _fp);
				}

				// push back in the frames buffer
				pushBackSamples(ptr1, len1);
			}

			if (ptr2 && len2)    //    Write it to disk.
			{
				if(_fp)
				{
					//write to file
					_dataLength += fwrite(ptr2, 1, len2, _fp);
				}

				// push back in the frames buffer
				pushBackSamples(ptr2, len2);
			}
		}
		//Unlock the sound to allow FMOD to use it again.
		FMOD_Sound_Unlock(_sound, ptr1, ptr2, len1, len2);

		_lastRecordPos = recordPos;
	}

    UAudioSystem::update();

    uSleep(10);

    // If we are recording to a file, make sure to stop 
    // when the maximum file size is reached
    if (_fp && _maxFileSize != 0 && int(_dataLength) + frameLength()*bytesPerSample() > _maxFileSize)
    {
        UWARN("Recording max memory reached (%d Mb)... stopped", _maxFileSize/1000000);
        this->kill();
    }
}
コード例 #21
0
ファイル: UAudioCaptureMic.cpp プロジェクト: matlabbe/utilite
bool UAudioCaptureMic::init()
{
	this->close();
	bool ok = UAudioCapture::init();
	if(ok)
	{
		std::string::size_type loc;

		if(_fileName.size())
		{
			loc = _fileName.find( ".mp3", 0 );
			if( loc != std::string::npos )
			{
#ifdef BUILT_WITH_LAME
				_encodeToMp3 = true;
#else
				_fileName.append(".wav");
				UERROR("Cannot write to a mp3, saving to a wav instead (%s)", _fileName.c_str());
#endif
			}
			_fp = fopen(_fileName.c_str(), "wb");
		}

		FMOD_RESULT result;
		FMOD_BOOL isRecording = false;
		result = UAudioSystem::isRecording(_driver, &isRecording); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
		if(isRecording)
		{
			result = UAudioSystem::recordStop(_driver); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
		}

		_dataLength = 0;
		_soundLength = 0;
		_lastRecordPos = 0;
		FMOD_CREATESOUNDEXINFO exinfo;
		memset(&exinfo, 0, sizeof(FMOD_CREATESOUNDEXINFO));
		exinfo.cbsize           = sizeof(FMOD_CREATESOUNDEXINFO);
		exinfo.numchannels      = channels();
		if(bytesPerSample() == 1)
		{
			exinfo.format           = FMOD_SOUND_FORMAT_PCM8;
		}
		else if(bytesPerSample() == 2)
		{
			exinfo.format           = FMOD_SOUND_FORMAT_PCM16;
		}
		else if(bytesPerSample() == 3)
		{
			exinfo.format           = FMOD_SOUND_FORMAT_PCM24;
		}
		else if(bytesPerSample() == 4)
		{
			exinfo.format           = FMOD_SOUND_FORMAT_PCM32;
		}
		exinfo.defaultfrequency = (int)fs();
		exinfo.length           = exinfo.defaultfrequency * bytesPerSample() * exinfo.numchannels * 2; // 2 -> pour deux secondes

		result = UAudioSystem::createSound(0, FMOD_2D | FMOD_SOFTWARE | FMOD_OPENUSER, &exinfo, &_sound); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
		if(_fp)
		{
			int channels, bits;
			float rate;
			FMOD_Sound_GetFormat(_sound, 0, 0, &channels, &bits);
			FMOD_Sound_GetDefaults(_sound, &rate, 0, 0, 0);
			UWav::writeWavHeader(_fp, _dataLength, rate, channels, bits);        //    Write out the wav header.  La longueur sera de 0 puisqu'elle est incunnue pour l'instant.
		}

		result = FMOD_Sound_GetLength(_sound, &_soundLength, FMOD_TIMEUNIT_PCM); UASSERT_MSG(result==FMOD_OK, FMOD_ErrorString(result));
	}
	return ok;
}