예제 #1
0
SensorData CameraOpenNICV::captureImage(CameraInfo * info)
{
	SensorData data;
	if(_capture.isOpened())
	{
		_capture.grab();
		cv::Mat depth, rgb;
		_capture.retrieve(depth, CV_CAP_OPENNI_DEPTH_MAP );
		_capture.retrieve(rgb, CV_CAP_OPENNI_BGR_IMAGE );

		depth = depth.clone();
		rgb = rgb.clone();

		UASSERT(_depthFocal>0.0f);
		if(!rgb.empty() && !depth.empty())
		{
			CameraModel model(
					_depthFocal, //fx
					_depthFocal, //fy
					float(rgb.cols/2) - 0.5f,  //cx
					float(rgb.rows/2) - 0.5f,  //cy
					this->getLocalTransform(),
					0,
					rgb.size());
			data = SensorData(rgb, depth, model, this->getNextSeqID(), UTimer::now());
		}
	}
	else
	{
		ULOGGER_WARN("The camera must be initialized before requesting an image.");
	}
	return data;
}
예제 #2
0
MainWindow::MainWindow(QWidget *parent) :
    QMainWindow(parent),
    ui(new Ui::MainWindow)
{
    ui->setupUi(this);

    _sensors.push_back(Sensor(SensorData('s', CUSTOM_SENSOR01, 0, 0), 25.5));
    _sensors.push_back(Sensor(SensorData('s', CUSTOM_SENSOR02, 0, 0), 27.5));
    _sensors.push_back(Sensor(SensorData('s', CUSTOM_SENSOR03, 0, 0), 28.5));

    for (Sensor sensor: _sensors) {
        ui->comboBox_SensorId->addItem(QString::number(sensor.sensorData.byte01),
                                       sensor.sensorData.byte01);
        _sensorIds.push_back(sensor.sensorData.byte01);
    }

    _tcpClient = new TcpClient(CUSTOM_IPV4ADDRESS,
                               CUSTOM_PORT,
                               _sensorIds,
                               CUSTOM_QUERYINTERVAL,
                               this);

    bool isOk;
    // TcpClient: Connections
    isOk = connect(_tcpClient, SIGNAL(dataReceived(quint8, qreal)),
                   this, SLOT(onDataReceived(quint8, qreal)));
    Q_ASSERT(isOk);
    Q_UNUSED(isOk);

    isOk = connect(_tcpClient, SIGNAL(dataReceived(SensorData)),
                   this, SLOT(onDataReceived(SensorData)));
    Q_ASSERT(isOk);
    Q_UNUSED(isOk);

    // GUI: Connections
    isOk = connect(ui->pushButton_TemperatureSet, SIGNAL(clicked()),
                   this, SLOT(setTemperatureDesired()));
    Q_ASSERT(isOk);
    Q_UNUSED(isOk);

    isOk = connect(ui->comboBox_SensorId, SIGNAL(currentIndexChanged(int)),
                   this, SLOT(on_comboBox_SensorId_currentIndexChanged(int)));
    Q_ASSERT(isOk);
    Q_UNUSED(isOk);

    _tcpClient->start();
}
예제 #3
0
SensorDataSet SensorDB::ExecuteSQL_SelectFromSensorDataTable(std::string sqlcommand)
{
    SensorDataSet ds;
	ChannelList channelist;
    int channel_num;
    int datatype_id;
    int operator_id;
    int device_id;
    int position_id;
    int activity_id;
    int activitybeginframe_id;
    int activityendframe_id;
    double samplerate;
    QDateTime createtime;
    if(db.isOpen()){
        QSqlQuery query;
        QString sqlcmd = string2qstring(sqlcommand);
        if(query.exec(sqlcmd)){
                while(query.next()){
					datatype_id = query.value("DataTypeID").toInt();
                    activity_id = query.value("ActivityID").toInt();
                    device_id = query.value("DeviceID").toInt();
                    operator_id = query.value("OperatorID").toInt();
                    position_id = query.value("PositionID").toInt();
                    activitybeginframe_id = query.value("ActivityBeginFrameID").toInt();
                    activityendframe_id = query.value("ActivityEndFrameID").toInt();
                    samplerate = query.value("SampleRate").toDouble();
                    createtime = query.value("CreateTime").toDateTime();
					channel_num = query.value("TotalChannelNum").toInt();
					channelist.clear();
					for(int i=1;i<=channel_num;i++){
                        if(query.value(i).isNull()){
                            break;
                        }					
						string ch = "channel_"+int2string(i);						
						//qDebug() << query.value(string2qstring(ch.c_str())).toString();
						channelist.push_back(Channel(query.value(string2qstring(ch.c_str())).toString().toStdString()));
						//qDebug() << string2qstring((channelist[channelist.size()-1].ToString()));
					}
					ds.PushBackSensorData(SensorData(channelist,channel_num,datatype_id,operator_id,device_id,position_id,
                                         activity_id,activitybeginframe_id,activityendframe_id, samplerate,createtime));
                }
        }
        else{
            qDebug()<<query.lastError();
        }
    }
    else{
        qDebug()<<"DataBase is not opened";
    }
    return ds;
}
예제 #4
0
/**
 * @brief Initializes data values and configures the sensor with desired modules
 * @param _device XsDevice from Xsens API
 */
mtiG::mtiG(XsDevice * _device, int argc, char ** argv):device(_device){

	ros::param::get("~override", override_settings);
	parseOptions(argc, argv);

	if(override_settings){
		//configures Xsensor device with mSettings
		ROS_DEBUG("OVERRIDE MODE");
		configure();
	}else{
		// uses the current device settings - to be used in conjunction with Windows GUI tool
		ROS_DEBUG("UNALTERED MODE");
	}
	readSettings();
	printSettings();
	sensorData = SensorData(mSettings);
	messageMaker = new MessageMaker(sensorData);
	// Advertise all messages published in this node, uses mSettings
	advertise();
}
예제 #5
0
TEST(KMeansCorrelation,TestSetup)
{
   // Test plane data
   std::vector<SensorData> planes;

   // Create mock data
   planes.push_back(SensorData("", 4.0, 0.0, 9.0, 7.0, 2.0, 4.0, adsb, 5, 0.0));
   planes.push_back(SensorData("", 0.0, 9.0, 0.0, 4.0, 10.0, 9.0, tcas, 0, 0.0));
   planes.push_back(SensorData("", 0.0, 4.0, 0.0, 4.0, 5.0, 9.0, adsb, 3, 0.0));
   planes.push_back(SensorData("", 0.0, 0.0, 0.0, 4.0, 5.0, 2.0, adsb, 4, 0.0));
   planes.push_back(SensorData("", 0.0, 9.0, 0.0, 4.0, 10.0, 9.0, tcas, 1, 0.0));
   planes.push_back(SensorData("", 0.0, 4.0, 0.0, 4.0, 5.0, 9.0, adsb, 2, 0.0));

   std::cout << "There are " << planes.size() << " plane inputs." << std::endl;

   // Call correlation
   KMeansCorrelation corr;
   std::vector<CorrelatedData> result = corr.correlate(planes);

   ASSERT_EQ(0, 1);
}
예제 #6
0
SensorData CameraStereoFlyCapture2::captureImage(CameraInfo * info)
{
	SensorData data;
#ifdef RTABMAP_FLYCAPTURE2
	if(camera_ && triclopsCtx_ && camera_->IsConnected())
	{
		// grab image from camera.
		// this image contains both right and left imagesCount
		FlyCapture2::Image grabbedImage;
		if(camera_->RetrieveBuffer(&grabbedImage) == FlyCapture2::PGRERROR_OK)
		{
			// right and left image extracted from grabbed image
			ImageContainer imageCont;

			TriclopsColorStereoPair colorStereoInput;
			generateColorStereoInput(triclopsCtx_, grabbedImage, imageCont, colorStereoInput);

			// rectify images
			TriclopsError triclops_status = triclopsColorRectify(triclopsCtx_, &colorStereoInput);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());

			// get images
			cv::Mat left,right;
			TriclopsColorImage color_image;
			triclops_status = triclopsGetColorImage(triclopsCtx_, TriImg_RECTIFIED_COLOR, TriCam_LEFT, &color_image);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());
			cv::cvtColor(cv::Mat(color_image.nrows, color_image.ncols, CV_8UC4, color_image.data), left, CV_RGBA2RGB);
			triclops_status = triclopsGetColorImage(triclopsCtx_, TriImg_RECTIFIED_COLOR, TriCam_RIGHT, &color_image);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());
			cv::cvtColor(cv::Mat(color_image.nrows, color_image.ncols, CV_8UC4, color_image.data), right, CV_RGBA2GRAY);

			// Set calibration stuff
			float fx, cy, cx, baseline;
			triclopsGetFocalLength(triclopsCtx_, &fx);
			triclopsGetImageCenter(triclopsCtx_, &cy, &cx);
			triclopsGetBaseline(triclopsCtx_, &baseline);
			cx *= left.cols;
			cy *= left.rows;

			StereoCameraModel model(
				fx,
				fx,
				cx,
				cy,
				baseline,
				this->getLocalTransform(),
				left.size());
			data = SensorData(left, right, model, this->getNextSeqID(), UTimer::now());

			// Compute disparity
			/*triclops_status = triclopsStereo(triclopsCtx_);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());

			TriclopsImage16 disparity_image;
			triclops_status = triclopsGetImage16(triclopsCtx_, TriImg16_DISPARITY, TriCam_REFERENCE, &disparity_image);
			UASSERT_MSG(triclops_status == Fc2Triclops::ERRORTYPE_OK, uFormat("Error: %d", (int)triclops_status).c_str());
			cv::Mat depth(disparity_image.nrows, disparity_image.ncols, CV_32FC1);
			int pixelinc = disparity_image.rowinc / 2;
			float x, y;
			for (int i = 0, k = 0; i < disparity_image.nrows; i++) {
				unsigned short *row = disparity_image.data + i * pixelinc;
				float *rowOut = (float *)depth.row(i).data;
				for (int j = 0; j < disparity_image.ncols; j++, k++) {
					unsigned short disparity = row[j];

					// do not save invalid points
					if (disparity < 0xFFF0) {
						// convert the 16 bit disparity value to floating point x,y,z
						triclopsRCD16ToXYZ(triclopsCtx_, i, j, disparity, &x, &y, &rowOut[j]);
					}
				}
			}
			CameraModel model(fx, fx, cx, cy, this->getLocalTransform(), 0, left.size());
			data = SensorData(left, depth, model, this->getNextSeqID(), UTimer::now());
			*/
		}
	}

#else
	UERROR("CameraStereoFlyCapture2: RTAB-Map is not built with Triclops support!");
#endif
	return data;
}
예제 #7
0
SensorData DBReader::getNextData()
{
	SensorData data;
	if(_dbDriver)
	{
		float frameRate = _frameRate;
		if(frameRate>0.0f)
		{
			int sleepTime = (1000.0f/frameRate - 1000.0f*_timer.getElapsedTime());
			if(sleepTime > 2)
			{
				uSleep(sleepTime-2);
			}

			// Add precision at the cost of a small overhead
			while(_timer.getElapsedTime() < 1.0/double(frameRate)-0.000001)
			{
				//
			}

			double slept = _timer.getElapsedTime();
			_timer.start();
			UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(frameRate));
		}

		if(!this->isKilled() && _currentId != _ids.end())
		{
			cv::Mat imageBytes;
			cv::Mat depthBytes;
			cv::Mat laserScanBytes;
			int mapId;
			float fx,fy,cx,cy;
			Transform localTransform, pose;
			float variance = 1.0f;
			_dbDriver->getNodeData(*_currentId, imageBytes, depthBytes, laserScanBytes, fx, fy, cx, cy, localTransform);
			if(!_odometryIgnored)
			{
				_dbDriver->getPose(*_currentId, pose, mapId);
				std::map<int, Link> links;
				_dbDriver->loadLinks(*_currentId, links, Link::kNeighbor);
				if(links.size())
				{
					// assume the first is the backward neighbor, take its variance
					variance = links.begin()->second.variance();
				}
			}
			int seq = *_currentId;
			++_currentId;
			if(imageBytes.empty())
			{
				UWARN("No image loaded from the database for id=%d!", *_currentId);
			}

			util3d::CompressionThread ctImage(imageBytes, true);
			util3d::CompressionThread ctDepth(depthBytes, true);
			util3d::CompressionThread ctLaserScan(laserScanBytes, false);
			ctImage.start();
			ctDepth.start();
			ctLaserScan.start();
			ctImage.join();
			ctDepth.join();
			ctLaserScan.join();
			data = SensorData(
					ctLaserScan.getUncompressedData(),
					ctImage.getUncompressedData(),
					ctDepth.getUncompressedData(),
					fx,fy,cx,cy,
					localTransform,
					pose,
					variance,
					seq);
		}
	}
	else
	{
		UERROR("Not initialized...");
	}
	return data;
}
예제 #8
0
SensorData DBReader::getNextData()
{
	SensorData data;
	if(_dbDriver)
	{
		float frameRate = _frameRate;
		if(frameRate>0.0f)
		{
			int sleepTime = (1000.0f/frameRate - 1000.0f*_timer.getElapsedTime());
			if(sleepTime > 2)
			{
				uSleep(sleepTime-2);
			}

			// Add precision at the cost of a small overhead
			while(_timer.getElapsedTime() < 1.0/double(frameRate)-0.000001)
			{
				//
			}

			double slept = _timer.getElapsedTime();
			_timer.start();
			UDEBUG("slept=%fs vs target=%fs", slept, 1.0/double(frameRate));
		}

		if(!this->isKilled() && _currentId != _ids.end())
		{
			cv::Mat imageBytes;
			cv::Mat depthBytes;
			cv::Mat laserScanBytes;
			int mapId;
			float fx,fy,cx,cy;
			Transform localTransform, pose;
			float rotVariance = 1.0f;
			float transVariance = 1.0f;
			std::vector<unsigned char> userData;
			_dbDriver->getNodeData(*_currentId, imageBytes, depthBytes, laserScanBytes, fx, fy, cx, cy, localTransform);

			// info
			int weight;
			std::string label;
			double stamp;
			_dbDriver->getNodeInfo(*_currentId, pose, mapId, weight, label, stamp, userData);

			if(!_odometryIgnored)
			{
				std::map<int, Link> links;
				_dbDriver->loadLinks(*_currentId, links, Link::kNeighbor);
				if(links.size())
				{
					// assume the first is the backward neighbor, take its variance
					rotVariance = links.begin()->second.rotVariance();
					transVariance = links.begin()->second.transVariance();
				}
			}
			else
			{
				pose.setNull();
			}

			int seq = *_currentId;
			++_currentId;
			if(imageBytes.empty())
			{
				UWARN("No image loaded from the database for id=%d!", *_currentId);
			}

			rtabmap::CompressionThread ctImage(imageBytes, true);
			rtabmap::CompressionThread ctDepth(depthBytes, true);
			rtabmap::CompressionThread ctLaserScan(laserScanBytes, false);
			ctImage.start();
			ctDepth.start();
			ctLaserScan.start();
			ctImage.join();
			ctDepth.join();
			ctLaserScan.join();
			data = SensorData(
					ctLaserScan.getUncompressedData(),
					ctImage.getUncompressedData(),
					ctDepth.getUncompressedData(),
					fx,fy,cx,cy,
					localTransform,
					pose,
					rotVariance,
					transVariance,
					seq,
					stamp,
					userData);
			UDEBUG("Laser=%d RGB/Left=%d Depth=%d Right=%d",
					data.laserScan().empty()?0:1,
					data.image().empty()?0:1,
					data.depth().empty()?0:1,
					data.rightImage().empty()?0:1);
		}
	}
	else
	{
		UERROR("Not initialized...");
	}
	return data;
}