bool ROSnode::Prepare() { seq = 0; //Retrieve parameters if (Handle.getParam("device", dev)) { ROS_INFO("Node %s: retrieved parameter device.", ros::this_node::getName().c_str()); } else { ROS_FATAL("Node %s: unable to retrieve parameter device.", ros::this_node::getName().c_str()); return false; } if (Handle.getParam("rate", rate)) { ROS_INFO("Node %s: retrieved parameter rate.", ros::this_node::getName().c_str()); } else { ROS_WARN("Node %s: unable to retrieve parameter rate. Setting default value to 20", ros::this_node::getName().c_str()); rate = 20; } PublisherMag = Handle.advertise<sensor_msgs::MagneticField>("mag", 50); PublisherImu = Handle.advertise<sensor_msgs::Imu>("imu", 50); driver.setFrequency(rate); //Open and initialize the device if(!driver.open(dev) || !driver.setReadingMode(xsens_imu::CAL_AND_ORI_DATA) || !driver.setTimeout(100)) return false; ROS_INFO("Node %s ready to run.", ros::this_node::getName().c_str()); return true; }
void GenericHWInterface::loadURDF(ros::NodeHandle &nh, std::string param_name) { std::string urdf_string; urdf_model_ = new urdf::Model(); // search and wait for robot_description on param server while (urdf_string.empty() && ros::ok()) { std::string search_param_name; if (nh.searchParam(param_name, search_param_name)) { ROS_INFO_STREAM_NAMED("generic_hw_interface", "Waiting for model URDF on the ROS param server at location: " << nh.getNamespace() << search_param_name); nh.getParam(search_param_name, urdf_string); } else { ROS_INFO_STREAM_NAMED("generic_hw_interface", "Waiting for model URDF on the ROS param server at location: " << nh.getNamespace() << param_name); nh.getParam(param_name, urdf_string); } usleep(100000); } if (!urdf_model_->initString(urdf_string)) ROS_ERROR_STREAM_NAMED("generic_hw_interface", "Unable to load URDF model"); else ROS_DEBUG_STREAM_NAMED("generic_hw_interface", "Received URDF from param server"); }
bool RTKRobotArm::_initialize(const ros::NodeHandle& n) { bool ret=true; // path to RTK format data folder. Keep robots here! if(!n.getParam("data_path", data_path)) { ROS_ERROR("Must provide data_path!"); ret = false; } else { ROS_INFO_STREAM("Data Folder: "<<data_path); } // RTK format robot name if(!n.getParam("robot_name", rob_name)) { ROS_ERROR("Must provide robot_name!"); ret = false; } else { ROS_INFO_STREAM("Robot: "<<rob_name); } // EE link name in the structure.xml if(!n.getParam("ee_link", ee_link)) { ROS_ERROR("Must provide ee_link!"); ret = false; } else { ROS_INFO_STREAM("EELink: "<<ee_link); } if(!n.getParam("max_jvel_deg", max_jvel)) { max_jvel = DEFAULT_MAX_JVEL_DEG; } ROS_INFO_STREAM("Max. Joint Vel. (deg): "<<max_jvel); return ret; }
bool loadRobotModel(ros::NodeHandle node_handle, urdf::Model &robot_model, std::string &root_name, std::string &tip_name, std::string &xml_string) { std::string urdf_xml,full_urdf_xml; node_handle.param("urdf_xml",urdf_xml,std::string("robot_description")); node_handle.searchParam(urdf_xml,full_urdf_xml); TiXmlDocument xml; ROS_DEBUG("Reading xml file from parameter server\n"); std::string result; if (node_handle.getParam(full_urdf_xml, result)) xml.Parse(result.c_str()); else { ROS_FATAL("Could not load the xml from parameter server: %s\n", urdf_xml.c_str()); return false; } xml_string = result; TiXmlElement *root_element = xml.RootElement(); TiXmlElement *root = xml.FirstChildElement("robot"); if (!root || !root_element) { ROS_FATAL("Could not parse the xml from %s\n", urdf_xml.c_str()); exit(1); } robot_model.initXml(root); if (!node_handle.getParam("root_name", root_name)) { ROS_FATAL("No root name found on parameter server"); return false; } if (!node_handle.getParam("tip_name", tip_name)) { ROS_FATAL("No tip name found on parameter server"); return false; } return true; }
void CalcMin::chatterCallback1(const fts_Omega160::fts msg) { int data1; int data2; int data3; int data4; int stop; n.setParam("/stop", 0); req.data = std::vector<int>(8); req.data[4] = msg.Fx; req.data[5] = msg.Fy; req.data[6] = msg.Fz; if (n.getParam("/data1", data1)){ req.data[0] = data1; } if (n.getParam("/data2", data2)){ req.data[1] = data2; } if (n.getParam("/data3", data3)){ req.data[2] = data3; } if (n.getParam("/data4", data4)){ req.data[3] = data4; } if (n.getParam("/stop", stop)){ req.data[7] = stop; } pub = n.advertise<std_msgs::Int32MultiArray>("/min_pub",1000); pub.publish(req); }
geometry_msgs::PoseStamped getCamPose(ros::NodeHandle n) { geometry_msgs::PoseStamped cam_pose; //set orientation cam_pose.pose.orientation.w = 1; if (!n.getParam("/suturo_manipulation_tf_publisher/cam_frame", cam_pose.header.frame_id)) { ROS_ERROR_STREAM("Failed to Frame for Cam."); } if (!n.getParam("/suturo_manipulation_tf_publisher/cam_x", cam_pose.pose.position.x)) { ROS_ERROR_STREAM("Failed to get x coordinate for Cam."); } if (!n.getParam("/suturo_manipulation_tf_publisher/cam_y", cam_pose.pose.position.y)) { ROS_ERROR_STREAM("Failed to get y coordinate for Cam."); } if (!n.getParam("/suturo_manipulation_tf_publisher/cam_z", cam_pose.pose.position.z)) { ROS_ERROR_STREAM("Failed to get z coordinate for Cam."); } return cam_pose; }
void global_path::getparameters(ros::NodeHandle n) { if (n.getParam("dbname", s_dbname)) ROS_INFO("Got param: %s", s_dbname.c_str()); else ROS_ERROR("Failed to get param 'dbname'"); if (n.getParam("user", s_user)) ROS_INFO("Got param: %s", s_user.c_str()); else ROS_ERROR("Failed to get param 'user'"); if (n.getParam("password", s_password)) ROS_INFO("Got param: %s", s_password.c_str()); else ROS_ERROR("Failed to get param 'password'"); if (n.getParam("hostaddr", s_hostaddr)) ROS_INFO("Got param: %s", s_hostaddr.c_str()); else ROS_ERROR("Failed to get param 'hostaddr'"); if (n.getParam("port", s_port)) ROS_INFO("Got param: %d", s_port); else ROS_ERROR("Failed to get param 'port'"); }
void IkJointControllerClass::getParams(ros::NodeHandle& n) { ROS_INFO("getting gains params and join name params"); XmlRpc::XmlRpcValue joint_names; //array of 7 joint names if (!n.getParam("joints", joint_names)){ ROS_ERROR("No joints given. (namespace: %s)", n.getNamespace().c_str()); } if ((joint_names.getType() != XmlRpc::XmlRpcValue::TypeArray) || (joint_names.size() != 7)){ ROS_ERROR("Malformed joint specification. (namespace: %s)", n.getNamespace().c_str()); } std::string joint_param_ns; for (int i = 0; i < 7; i++){ XmlRpc::XmlRpcValue &name_value = joint_names[i]; if (name_value.getType() != XmlRpc::XmlRpcValue::TypeString){ ROS_ERROR("Array of joint names should contain all strings. (namespace: %s)", n.getNamespace().c_str()); } joint_names_[i] = ((std::string)name_value).c_str(); joint_param_ns = std::string("gains/") + joint_names_[i]; n.getParam(joint_param_ns + "/p", p_[i]); n.getParam(joint_param_ns + "/d", d_[i]); ROS_INFO("NEW YAML got %d-th joint name %s with (p,d) gains parameters: (%f,%f)", i, joint_names_[i].c_str(), p_[i], d_[i]); } }
void CollisionPluginLoader::setupScene( ros::NodeHandle& nh, const planning_scene::PlanningScenePtr& scene) { if (!scene) return; std::string param_name; std::string collision_detector_name; if (nh.searchParam("collision_detector", param_name)) { nh.getParam(param_name, collision_detector_name); } else if (nh.hasParam("/move_group/collision_detector")) { // Check for existence in move_group namespace // mainly for rviz plugins to get same collision detector. nh.getParam("/move_group/collision_detector", collision_detector_name); } else { return; } if (collision_detector_name == "") { // This is not a valid name for a collision detector plugin return; } activate(collision_detector_name, scene, true); ROS_INFO_STREAM("Using collision detector:" << scene->getActiveCollisionDetectorName().c_str()); }
bool spin() { ROS_INFO("camera node is running."); h264Server.start(); while (node.ok()) { // Process any pending service callbacks ros::spinOnce(); std::string newResolution; node.getParam("resolution", newResolution); int newWidth = std::stoi(newResolution.substr(0, newResolution.find('x'))); int newHeight = std::stoi(newResolution.substr(newResolution.find('x') + 1)); std::string newVideoDevice; node.getParam("video_device", newVideoDevice); bool newDetectorEnabled; node.getParam("detector_enabled", newDetectorEnabled); if (newVideoDevice != videoDevice || newDetectorEnabled != detectorEnabled || newWidth != cameraWidth || newHeight != cameraHeight) { initCameraAndEncoders(); } if(!sendPreview()) { // Sleep and hope the camera recovers usleep(1000*1000); } // Run at 1kHz usleep(1000); } h264Server.stop(); return true; }
void Adventurer::init() { // strings temporaires std::string sSubscribeName, sRightWheel, sLeftWheel; // essaye de récupérer le nom du topic sur lequel on recoie les ordres // si aucun parametre n'est pas trouvé, on utilise une valeur par defaut if (!m_NodeHandle.getParam("sub_name", sSubscribeName)) sSubscribeName = "adventurer_order"; // valeur par defaut // création du subscriber pour recevoir les ordres // on passe en parametre la callback (order_cb) et l'objet qui l'appellera (this) m_OrderSubscriber = m_NodeHandle.subscribe(sSubscribeName, 1, &Adventurer::order_cb, this); // création du publisher pour envoyer des ordres au robot m_WheelsPublisher = m_NodeHandle.advertise<nxt_msgs::JointCommand>("joint_command", 1); // cf. ligne 42 if (!m_NodeHandle.getParam("right_wheel", sRightWheel)) sRightWheel = "motor_r"; if (!m_NodeHandle.getParam("left_wheel", sLeftWheel)) sLeftWheel = "motor_l"; m_sRightWheel = sRightWheel; m_sLeftWheel = sLeftWheel; }
void getParams(const ros::NodeHandle &nh) { //avg accel update period (number of msgs until next localization udpate) if (nh.getParam("update_period", UPDATE_PERIOD)) { ROS_INFO("Set %s/update_period to %d",nh.getNamespace().c_str(), UPDATE_PERIOD); } else { if(nh.hasParam("update_period")) ROS_WARN("%s/update_period must be an integer. Setting default value: %d",nh.getNamespace().c_str(), DEFAULT_LOC_NODE_UPDATE_PERIOD); else ROS_WARN("No value set for %s/update_period. Setting default value: %d",nh.getNamespace().c_str(), DEFAULT_LOC_NODE_UPDATE_PERIOD); UPDATE_PERIOD = DEFAULT_LOC_NODE_UPDATE_PERIOD; } //frequency this node publishes a new topic if (nh.getParam("publish_freq", PUBLISH_FREQ)) { ROS_INFO("Set %s/publish_freq to %d",nh.getNamespace().c_str(), PUBLISH_FREQ); } else { if(nh.hasParam("publish_freq")) ROS_WARN("%s/publish_freq must be an integer. Setting default value: %d",nh.getNamespace().c_str(), DEFAULT_LOC_NODE_PUBLISH_FREQ); else ROS_WARN("No value set for %s/publish_freq. Setting default value: %d",nh.getNamespace().c_str(), DEFAULT_LOC_NODE_PUBLISH_FREQ); PUBLISH_FREQ = DEFAULT_LOC_NODE_PUBLISH_FREQ; } //number of states from coax_filter this node will buffer before it begins to drop them if (nh.getParam("fstate_msg_buffer", FSTATE_MSG_BUFFER)) { ROS_INFO("Set %s/fstate_msg_buffer to %d",nh.getNamespace().c_str(), FSTATE_MSG_BUFFER); } else { if(nh.hasParam("fstate_msg_buffer")) ROS_WARN("%s/fstate_msg_buffer must be an integer. Setting default value: %d",nh.getNamespace().c_str(), DEFAULT_LOC_NODE_FSTATE_MSG_BUFFER); else ROS_WARN("No value set for %s/fstate_msg_buffer. Setting default value: %d",nh.getNamespace().c_str(), DEFAULT_LOC_NODE_FSTATE_MSG_BUFFER); FSTATE_MSG_BUFFER = DEFAULT_LOC_NODE_FSTATE_MSG_BUFFER; } //number of messages this node will queue for publishing before droping old data if (nh.getParam("msg_queue", MSG_QUEUE)) { ROS_INFO("Set %s/msg_queue to %d",nh.getNamespace().c_str(), MSG_QUEUE); } else { if(nh.hasParam("msg_queue")) ROS_WARN("%s/msg_queue must be an integer. Setting default value: %d",nh.getNamespace().c_str(), DEFAULT_LOC_NODE_MSG_QUEUE); else ROS_WARN("No value set for %s/msg_queue. Setting default value: %d",nh.getNamespace().c_str(), DEFAULT_LOC_NODE_MSG_QUEUE); MSG_QUEUE = DEFAULT_LOC_NODE_MSG_QUEUE; } }
void fillCameraInfoMessage (ros::NodeHandle nh, std::string base_name, sensor_msgs::CameraInfo& camera_info_msg ) { // Read calibration data and fill camera_info message: int width, height; nh.param(base_name + "/image_width", width, 640); nh.param(base_name + "/image_height", height, 480); camera_info_msg.width = width; camera_info_msg.height = height; std::string distortion_model; if (not nh.getParam (base_name + "/distortion_model", distortion_model)) { ROS_ERROR("Stereo calibration parameters not found!"); } camera_info_msg.distortion_model = distortion_model; std::vector<float> camera_matrix; if (not nh.getParam (base_name + "/camera_matrix/data", camera_matrix)) { ROS_ERROR("Stereo calibration parameters not found!"); } for (unsigned int i = 0; i < camera_matrix.size(); i++) { camera_info_msg.K[i] = camera_matrix[i]; } std::vector<float> rectification_matrix; if (not nh.getParam (base_name + "/rectification_matrix/data", rectification_matrix)) { ROS_ERROR("Stereo calibration parameters not found!"); } for (unsigned int i = 0; i < rectification_matrix.size(); i++) { camera_info_msg.R[i] = rectification_matrix[i]; } std::vector<float> projection_matrix; if (not nh.getParam (base_name + "/projection_matrix/data", projection_matrix)) { ROS_ERROR("Stereo calibration parameters not found!"); } for (unsigned int i = 0; i < projection_matrix.size(); i++) { camera_info_msg.P[i] = projection_matrix[i]; } std::vector<float> distortion_coefficients; if (not nh.getParam (base_name + "/distortion_coefficients/data", distortion_coefficients)) { ROS_ERROR("Stereo calibration parameters not found!"); } for (unsigned int i = 0; i < distortion_coefficients.size(); i++) { camera_info_msg.D.push_back(distortion_coefficients[i]); } }
bool Kinematics::init() { // Get URDF XML std::string urdf_xml, full_urdf_xml; nh.param("urdf_xml",urdf_xml,std::string("robot_description")); nh.searchParam(urdf_xml,full_urdf_xml); ROS_DEBUG("Reading xml file from parameter server"); std::string result; if (!nh.getParam(full_urdf_xml, result)) { ROS_FATAL("Could not load the xml from parameter server: %s", urdf_xml.c_str()); return false; } std::string kinematics_service_name; if (!nh_private.getParam("kinematics_service_name", kinematics_service_name)) { ROS_FATAL("GenericIK: kinematics service name not found on parameter server"); return false; } // Get Root and Tip From Parameter Service if (!nh.getParam(kinematics_service_name+"/manipulator/root_name", root_name)) { ROS_FATAL("GenericIK: No root name found on parameter server"); return false; } if (!nh.getParam(kinematics_service_name+"/manipulator/tip_name", tip_name)) { ROS_FATAL("GenericIK: No tip name found on parameter server"); return false; } // Load and Read Models if (!loadModel(result)) { ROS_FATAL("Could not load models!"); return false; } // Get Solver Parameters int maxIterations; double epsilon; nh_private.param("maxIterations", maxIterations, 1000); nh_private.param("epsilon", epsilon, 1e-2); // .01 // Build Solvers fk_solver = new KDL::ChainFkSolverPos_recursive(chain); ik_solver_vel = new KDL::ChainIkSolverVel_pinv(chain); ik_solver_pos = new KDL::ChainIkSolverPos_NR_JL(chain, joint_min, joint_max, *fk_solver, *ik_solver_vel, maxIterations, epsilon); ROS_INFO("Advertising services"); fk_service = nh_private.advertiseService(FK_SERVICE,&Kinematics::getPositionFK,this); // ik_service = nh_private.advertiseService(IK_SERVICE,&Kinematics::getPositionIK,this); ik_service = nh_private.advertiseService(IK_SERVICE,&Kinematics::searchPositionIK,this); ik_solver_info_service = nh_private.advertiseService(IK_INFO_SERVICE,&Kinematics::getIKSolverInfo,this); fk_solver_info_service = nh_private.advertiseService(FK_INFO_SERVICE,&Kinematics::getFKSolverInfo,this); return true; }
void GetTuningParameters() { n.getParam("KPR",KPR); n.getParam("KPL",KPL); n.getParam("KIL",KIL); n.getParam("KIR",KIR); n.getParam("KDR",KDR); n.getParam("KDL",KDL); }
// Set Opencv Algorithm parameters from ROS parameter server void setParameters( ros::NodeHandle& nodeHandle, cv::Ptr<cv::Algorithm>&& algorithm, const std::string& base_name ) { std::vector<cv::String> parameters; algorithm->getParams( parameters ); for ( const auto& param : parameters ) { if ( nodeHandle.hasParam(base_name + "/" + param) ) { int param_type = algorithm->paramType( param ); switch ( param_type ) { case cv::Param::INT: { int val; nodeHandle.getParam(base_name + "/" + param, val); algorithm->set(param, val); std::cout << " " << param << ": " << val << std::endl; break; } case cv::Param::BOOLEAN: { bool val; nodeHandle.getParam(base_name + "/" + param, val); algorithm->set(param, val); std::cout << " " << param << ": " << val << std::endl; break; } case cv::Param::REAL: { double val; nodeHandle.getParam(base_name + "/" + param, val); algorithm->set(param, val); std::cout << " " << param << ": " << val << std::endl; break; } case cv::Param::STRING: { std::string val; nodeHandle.getParam(base_name + "/" + param, val); algorithm->set(param, val); std::cout << " " << param << ": " << val << std::endl; break; } default: ROS_ERROR_STREAM("unknown/unsupported parameter type for "); break; } } } }
bool CMVisionColorBlobFinder::initialize(ros::NodeHandle &node_handle) { uyvy_image_ = NULL; width_ = 0; height_ = 0; vision_ = new CMVision(); blob_count_ = 0; blob_message_.blob_count = 0; // Get the color file. This defines what colors to track if (!node_handle.getParam("/cmvision/color_file", color_filename_)) { ROS_ERROR("Could not find color calibration file name \"/cmvision/color_file\" in namespace: %s.", node_handle.getNamespace().c_str()); return false; } // Get the level of debug output node_handle.param("/cmvision/debug_on", debug_on_, false); // check whether mean shift is turned on if (!node_handle.getParam("/cmvision/mean_shift_on", mean_shift_on_)) { ROS_ERROR("Could not find mean shift flag \"/cmvision/mean_shift_on\" in namespace: %s.", node_handle.getNamespace().c_str()); return false; } else { if (!node_handle.getParam("/cmvision/spatial_radius_pix", spatial_radius_)) { ROS_ERROR("Could not get spatial_radius_pix from param server \"/cmvision/spatial_radius_pix\" in namespace: %s.", node_handle.getNamespace().c_str()); return false; } if (!node_handle.getParam("/cmvision/color_radius_pix", color_radius_)) { ROS_ERROR("Could not get color_radius_pix from param server \"/cmvision/color_radius_pix\" in namespace: %s.", node_handle.getNamespace().c_str()); return false; } } // Subscribe to an image stream image_subscriber_ = node_handle.subscribe("image", 1, &CMVisionColorBlobFinder::imageCB, this); // Advertise our blobs blob_publisher_ = node_handle.advertise<cmvision::Blobs> ("blobs", 1); if (debug_on_) { cvNamedWindow("Image"); } return true; }
/* The method set the hsv from node handle */ void initParams() { nh_.getParam("lowerH", hsv_vals_[0]); nh_.getParam("lowerS", hsv_vals_[1]); nh_.getParam("lowerV", hsv_vals_[2]); nh_.getParam("upperH", hsv_vals_[3]); nh_.getParam("upperS", hsv_vals_[4]); nh_.getParam("upperV", hsv_vals_[5]); nh_.getParam("Tuning", tuning_); }
RosTopicParser::RosTopicParser(ros::NodeHandle nh, std::string topic_name){ _topic_name = ""; _queue_size = 1; if (nh.hasParam(topic_name)){ nh.getParam("/" + topic_name + "/" + NAME_KEY, _topic_name); nh.getParam("/" + topic_name + "/" + QUEUE_SIZE_KEY, _queue_size); ROS_INFO("Topic %s, queue size %i", _topic_name.c_str(), _queue_size); }else{ ROS_WARN("Topic not found."); } }
bool setup(ros::NodeHandle nh){ if (!nh.hasParam("/has_parameters")){ return false; } nh.getParam("/rot_spread", rot_spread); nh.getParam("/vel_spread", vel_spread); nh.getParam("/updates_before_resample", updates_before_resample); nh.getParam("/times_to_resample", times_to_resample); nh.getParam("/prob_sigma", prob_sigma); nh.getParam("/maze_xmax", maze_xmax); nh.getParam("/maze_ymax", maze_ymax); std::vector<double> ir_positions_temp; for(int i=0;i<NUM_OBSERVATIONS;i++){ std::string temp = "/ir_positions_"; temp += boost::lexical_cast<std::string>(i+1); nh.getParam(temp, ir_positions_temp); sensor_positions[i][0] = ir_positions_temp[0]; sensor_positions[i][1] = ir_positions_temp[1]; sensor_positions[i][2] = ir_positions_temp[2]; } bool known_pos; nh.getParam("/known_pos", known_pos); if(known_pos){ nh.getParam("/start_x", position.x); nh.getParam("/start_y", position.y); nh.getParam("/start_theta", position.theta); init_known_pos(position.x, position.y, position.theta); } else{ void init_un_known_pos(0.0,0.0,maze_xmax,maze_ymax); } }
/// Controller initialization in non-realtime bool MyCartControllerClass::init(pr2_mechanism_model::RobotState *robot, ros::NodeHandle &n) { // Get the root and tip link names from parameter server. std::string root_name, tip_name; if (!n.getParam("root_name", root_name)) { ROS_ERROR("No root name given in namespace: %s)", n.getNamespace().c_str()); return false; } if (!n.getParam("tip_name", tip_name)) { ROS_ERROR("No tip name given in namespace: %s)", n.getNamespace().c_str()); return false; } // Construct a chain from the root to the tip and prepare the kinematics. // Note the joints must be calibrated. if (!chain_.init(robot, root_name, tip_name)) { ROS_ERROR("MyCartController could not use the chain from '%s' to '%s'", root_name.c_str(), tip_name.c_str()); return false; } // Store the robot handle for later use (to get time). robot_state_ = robot; // Construct the kdl solvers in non-realtime. chain_.toKDL(kdl_chain_); jnt_to_pose_solver_.reset(new KDL::ChainFkSolverPos_recursive(kdl_chain_)); jnt_to_jac_solver_.reset(new KDL::ChainJntToJacSolver(kdl_chain_)); // Resize (pre-allocate) the variables in non-realtime. q_.resize(kdl_chain_.getNrOfJoints()); q0_.resize(kdl_chain_.getNrOfJoints()); qdot_.resize(kdl_chain_.getNrOfJoints()); tau_.resize(kdl_chain_.getNrOfJoints()); J_.resize(kdl_chain_.getNrOfJoints()); // Pick the gains. Kp_.vel(0) = 100.0; Kd_.vel(0) = 1.0; // Translation x Kp_.vel(1) = 100.0; Kd_.vel(1) = 1.0; // Translation y Kp_.vel(2) = 100.0; Kd_.vel(2) = 1.0; // Translation z Kp_.rot(0) = 100.0; Kd_.rot(0) = 1.0; // Rotation x Kp_.rot(1) = 100.0; Kd_.rot(1) = 1.0; // Rotation y Kp_.rot(2) = 100.0; Kd_.rot(2) = 1.0; // Rotation z return true; }
void GetSteerJointNames(ros::NodeHandle &_nh) { // steer joint to get angular command _nh.getParam(ns_ + "front_steer", steer_jnt_name_); // virtual steer joint for gazebo control std::vector<std::string> virtual_steer_jnt_names; _nh.getParam(ns_ + "gazebo/virtual_front_steer", virtual_steer_jnt_names); virtual_steer_jnt_names_ = virtual_steer_jnt_names; cnt_virtual_steer_joints_ = virtual_steer_jnt_names_.size(); }
void GetWheelJointNames(ros::NodeHandle &_nh) { // wheel joint to get linear command _nh.getParam(ns_ + "rear_wheel", wheel_jnt_name_); // virtual wheel joint for gazebo control std::vector<std::string> virtual_wheel_jnt_names; _nh.getParam(ns_ + "gazebo/virtual_rear_wheel", virtual_wheel_jnt_names); virtual_wheel_jnt_names_ = virtual_wheel_jnt_names; cnt_virtual_wheel_joints_ = virtual_wheel_jnt_names_.size(); }
void SteerBotHardwareGazebo::GetSteerJointNames(ros::NodeHandle &_nh) { // steer joint to get angular command _nh.getParam(ns_ + "front_steer", steer_jnt_name_); // virtual steer joint for gazebo control _nh.getParam(ns_ + "virtual_front_steers", virtual_steer_jnt_names_); const int dof = virtual_steer_jnt_names_.size(); virtual_steer_jnt_pos_.resize(dof); virtual_steer_jnt_vel_.resize(dof); virtual_steer_jnt_eff_.resize(dof); virtual_steer_jnt_pos_cmd_.resize(dof); }
BeaconFinder(ros::NodeHandle n, vector<Beacon> beacons) : n(n), pnh("~"), beacons(beacons), img_sub(n, "/camera/rgb/image_color/compressed", 1), laser_sub(n, "/scan", 1), sync(SyncPolicy(10), laser_sub, img_sub) { odom_sub = n.subscribe("ass1/odom", 1, &BeaconFinder::odom_callback, this); beacons_pub = n.advertise<ass1::FoundBeacons>("/ass1/beacons", 1); // Use ApproximateTime message_filter to read both kinect image and laser. sync.registerCallback(boost::bind(&BeaconFinder::image_callback, this, _1, _2)); // Get colours from params XmlRpc::XmlRpcValue colour_thresholds; n.getParam("/beacon_finder/colour_ranges/pink", colour_thresholds); pink_ranges[0] = (int)colour_thresholds["h_lo"]; pink_ranges[1] = (int)colour_thresholds["h_hi"]; pink_ranges[2] = (int)colour_thresholds["s"]; pink_ranges[3] = (int)colour_thresholds["v"]; n.getParam("/beacon_finder/colour_ranges/blue", colour_thresholds); blue_ranges[0] = (int)colour_thresholds["h_lo"]; blue_ranges[1] = (int)colour_thresholds["h_hi"]; blue_ranges[2] = (int)colour_thresholds["s"]; blue_ranges[3] = (int)colour_thresholds["v"]; n.getParam("/beacon_finder/colour_ranges/green", colour_thresholds); green_ranges[0] = (int)colour_thresholds["h_lo"]; green_ranges[1] = (int)colour_thresholds["h_hi"]; green_ranges[2] = (int)colour_thresholds["s"]; green_ranges[3] = (int)colour_thresholds["v"]; n.getParam("/beacon_finder/colour_ranges/yellow", colour_thresholds); yellow_ranges[0] = (int)colour_thresholds["h_lo"]; yellow_ranges[1] = (int)colour_thresholds["h_hi"]; yellow_ranges[2] = (int)colour_thresholds["s"]; yellow_ranges[3] = (int)colour_thresholds["v"]; for( int i=0; i < 4; i++ ) { ROS_INFO("PINK %i = %d", i, pink_ranges[i] ); } for( int i=0; i < 4; i++ ) { ROS_INFO("BLUE %i = %d", i, blue_ranges[i] ); } for( int i=0; i < 4; i++ ) { ROS_INFO("GREEN %i = %d", i, green_ranges[i] ); } for( int i=0; i < 4; i++ ) { ROS_INFO("YELLOW %i = %d", i, yellow_ranges[i] ); } }
bool PoseOdomStateRecord::initialize() { if (!nh_private_.getParam("path_name", path_name_)) { ROS_FATAL("Could not found path name from parameter server"); return false; } if (!nh_private_.getParam("motion_name", motion_name_)) { ROS_FATAL("Could not found motion name from parameter server"); return false; } pose_odom_state_sub_ = nh_.subscribe<nav_msgs::Odometry>("base_odometry/odom", 1, &PoseOdomStateRecord::poseOdomStateCallback, this); return true; }
explicit HaarAdaNode(const ros::NodeHandle& nh): node_(nh) { num_class1 = 0; num_class0 = 0; num_TP_class1 = 0; num_FP_class1 = 0; num_TP_class0 = 0; num_FP_class0 = 0; string nn = ros::this_node::getName(); int qs; if(!node_.getParam(nn + "/Q_Size",qs)){ qs=3; } int NS; if(!node_.getParam(nn + "/num_Training_Samples",NS)){ NS = 350; // default sets asside very little for training node_.setParam(nn + "/num_Training_Samples",NS); } HAC_.setMaxSamples(NS); if(!node_.getParam(nn + "/RemoveOverlappingRois",remove_overlapping_rois_)){ remove_overlapping_rois_ = false; } // Published Messages pub_rois_ = node_.advertise<Rois>("HaarAdaOutputRois",qs); pub_Color_Image_ = node_.advertise<Image>("HaarAdaColorImage",qs); pub_Disparity_Image_= node_.advertise<DisparityImage>("HaarAdaDisparityImage",qs); // Subscribe to Messages sub_image_.subscribe(node_,"Color_Image",qs); sub_disparity_.subscribe(node_, "Disparity_Image",qs); sub_rois_.subscribe(node_,"input_rois",qs); // Sync the Synchronizer approximate_sync_.reset(new ApproximateSync(ApproximatePolicy(qs), sub_image_, sub_disparity_, sub_rois_)); approximate_sync_->registerCallback(boost::bind(&HaarAdaNode::imageCb, this, _1, _2, _3)); }
bool JointStateTorqueSensorController::init(hardware_interface::JointStateInterface* hw, ros::NodeHandle& root_nh, ros::NodeHandle& controller_nh) { // get all joint names from the hardware interface joint_names_ = hw->getNames(); num_hw_joints_ = joint_names_.size(); for (unsigned i=0; i<num_hw_joints_; i++) ROS_DEBUG("Got joint %s", joint_names_[i].c_str()); // get publishing period if (!controller_nh.getParam("publish_rate", publish_rate_)){ ROS_ERROR("Parameter 'publish_rate' not set"); return false; } std::string output_topic; if (!controller_nh.getParam("output_topic", output_topic)){ ROS_ERROR("Parameter 'output_topic'' not specified"); return false; } use_relative_for_nans_ = false; controller_nh.getParam("use_relative_for_nans", use_relative_for_nans_); if(use_relative_for_nans_){ ROS_ERROR_STREAM("using relative encoders for nans"); } else{ ROS_ERROR_STREAM("NOT using relative encoders for nans"); } // realtime publisher realtime_pub_.reset(new realtime_tools::RealtimePublisher<sensor_msgs::JointState>(root_nh, output_topic, 4)); // get joints and allocate message for (unsigned i=0; i<num_hw_joints_; i++){ joint_state_.push_back(hw->getHandle(joint_names_[i])); realtime_pub_->msg_.name.push_back(joint_names_[i]); realtime_pub_->msg_.position.push_back(0.0); realtime_pub_->msg_.velocity.push_back(0.0); realtime_pub_->msg_.effort.push_back(0.0); } addExtraJoints(controller_nh, realtime_pub_->msg_); if(!parseJointOffsets(controller_nh)){ return false; } return true; }
EmoTimeWrap() : it_(nh_) { // Subscrive to input video feed and sadness_vs_anger_contempt_disgust_fear_happy_neutral_surprise_featspublish output video feed counter = 0; nh_.getParam("camera_topic", subscribe_topic); nh_.getParam("filtered_face_locations", filtered_face_locations); image_sub_ = it_.subscribe(subscribe_topic, 1,&EmoTimeWrap::imageCb, this); face_location_sub = nh_.subscribe(filtered_face_locations, 1, &EmoTimeWrap::list_of_faces_update, this); //image_pub_ = it_.advertise("/emotime_node/output_video", 1); //emotion_pub = nh_.advertise<std_msgs::String>("emotion_states", 1000); faces_locations = nh_.advertise<cmt_tracker_msgs::Objects>("emo_pub_registered", 10); method = "svm"; // config = "/home/lina/Desktop/emotime_final/emotime/resources/haarcascade_frontalface_cbcl1.xml"; config = "/usr/share/opencv/haarcascades/haarcascade_frontalface_alt.xml"; config_e = "/usr/share/opencv/haarcascades/haarcascade_eye.xml"; size.width = 52; size.height = 52; nwidths = 1; nlambdas = 5; nthetas = 8; path= ros::package::getPath("emotime"); classifier_paths.push_back(path + "/svm/anger_vs_contempt_disgust_fear_happy_neutral_sadness_surprise_feats.xml"); classifier_paths.push_back(path + "/svm/contempt_vs_anger_disgust_fear_happy_neutral_sadness_surprise_feats.xml"); classifier_paths.push_back(path + "/svm/disgust_vs_anger_contempt_fear_happy_neutral_sadness_surprise_feats.xml"); classifier_paths.push_back(path + "/svm/fear_vs_anger_contempt_disgust_happy_neutral_sadness_surprise_feats.xml"); classifier_paths.push_back(path + "/svm/happy_vs_anger_contempt_disgust_fear_neutral_sadness_surprise_feats.xml"); classifier_paths.push_back(path + "/svm/neutral_vs_anger_contempt_disgust_fear_happy_sadness_surprise_feats.xml"); classifier_paths.push_back(path + "/svm/sadness_vs_anger_contempt_disgust_fear_happy_neutral_surprise_feats.xml"); classifier_paths.push_back(path + "/svm/surprise_vs_anger_contempt_disgust_fear_happy_neutral_sadness_feats.xml"); //cout << "Length of the classifiers: " << classifier_paths.size() << std::endl; preprocessor = new FacePreProcessor(config, config_e, size.width, size.height, nwidths, nlambdas, nthetas); emodetector = new SVMEmoDetector(kCfactor, kMaxIteration, kErrorMargin); emodetector->init(classifier_paths); //text = "Funny text inside the box"; fontFace = FONT_HERSHEY_SCRIPT_SIMPLEX; fontScale = 2; thickness = 3; textOrg.x = 10; textOrg.y = 130; int lineType = 8; bool bottomLeftOrigin = false; //cv::namedWindow(OPENCV_WINDOW); }
LinactDynamixel(ros::NodeHandle& nh, int id, std::string& name) : LinearActuator(nh, id, 1000), m_name(name) { std::string controller; // Load the linear actuator parameters nh.getParam(name + "/base_length", m_base_length); nh.getParam(name + "/extent", m_extent); // m_state = nh.advertise<dynamixel_msgs::JointState>(name + "/state", 5); this->setName(name); // Here is where to set up a subscriber controller = name.substr(0, name.length() - 5); m_command = nh.subscribe(controller + "controller/command", 1000, &LinactDynamixel::command_callback, this); }