void SpatialPerspectiveLevel2::callbackTimer(const ros::TimerEvent&) {
    static tf::TransformListener listener;

    std::string target_frame = "head_origin1";
    std::vector<std::string> frames;
    listener.getFrameStrings(frames);

    for(size_t i=0; i<frames.size(); i++) {
        std::string obj_name = frames.at(i);
        if(obj_name.find("obj_")!=std::string::npos) {
            geometry_msgs::PointStamped object_head_frame;
            try {
                geometry_msgs::PointStamped object_object_frame;
                // get object coordinates in target_frame, needed because of offset
                object_object_frame.header.frame_id = obj_name;
                object_object_frame.point.x = 0;
                object_object_frame.point.y = 0;
                object_object_frame.point.z = 0;

                listener.waitForTransform(obj_name, target_frame, ros::Time::now(), ros::Duration(1.0));
                listener.transformPoint(target_frame, object_object_frame, object_head_frame);

                double angle = atan(object_head_frame.point.y / object_head_frame.point.x) * 180 / M_PI;

                if(angle > 5.0) {
                    ROS_INFO_STREAM("Object " << obj_name << " is left of human.");
                } else if(angle < -5.0){
                    ROS_INFO_STREAM("Object " << obj_name << " is right of human.");
                } else {
                    ROS_INFO_STREAM("Object " << obj_name << " is central.");
                }
            } catch (tf::TransformException ex) {
                ROS_WARN_STREAM("Skip object " << obj_name << " as point could not be transformed!");
                continue;
            }
        }
    }
}
示例#2
0
void Raytracer::doRaytrace(const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr& cloud) {
    static tf::TransformListener listener;

    RayTracing<pcl::PointXYZRGB> sor;
    sor.setInputCloud (cloud);
    sor.setLeafSize (voxel_leaf_size, voxel_leaf_size, voxel_leaf_size);
    sor.initializeVoxelGrid();
    ROS_INFO_STREAM("Cloud size: " << sor.getFilteredPointCloud().size());
    if(sor.getFilteredPointCloud().empty()) {
        return;
    }
    viewer->spinOnce();

    std::string target_frame_cloud=cloud->header.frame_id;

    geometry_msgs::PointStamped head_cloud_frame;
    try {
        // get head position in target_frame, needed because of nose tip
        std::string head_frame = "head_origin1";
        geometry_msgs::PointStamped head_head_frame;
        head_head_frame.header.frame_id = head_frame;
        tf::pointTFToMsg(head_offset, head_head_frame.point);
        listener.waitForTransform(head_frame, target_frame_cloud, ros::Time::now(), ros::Duration(0.1));
        listener.transformPoint(target_frame_cloud, head_head_frame, head_cloud_frame);
    } catch (tf::TransformException ex) {
        ROS_WARN_STREAM("Head coordinate transformation could not be obtained, return!");
        return;
    }

    // delete old shapes
    for (size_t i=0; i<shape_list.size(); i++) {
        viewer->removeShape(shape_list.at(i));
    }
    shape_list.clear();

    // check all frames which are known to tf
    std::vector<std::string> frames;
    listener.getFrameStrings(frames);

    for(size_t i=0; i<frames.size(); i++) {
        std::string obj_name = frames.at(i);
        if(obj_name.find("obj_")!=std::string::npos) {
            geometry_msgs::PointStamped object_cloud_frame;
            try {
                geometry_msgs::PointStamped object_object_frame;
                // get object coordinates in target_frame, needed because of offset
                object_object_frame.header.frame_id = obj_name;
                tf::pointTFToMsg(object_offset, object_object_frame.point);

                listener.waitForTransform(obj_name, target_frame_cloud, ros::Time::now(), ros::Duration(0.1));
                listener.transformPoint(target_frame_cloud, object_object_frame, object_cloud_frame);
            } catch (tf::TransformException ex) {
                ROS_WARN_STREAM("Skip object " << obj_name << " as point could not be transformed!");
                continue;
            }

            // draw object in visualizer
            // get random color, but consistent for one object
            boost::hash<std::string> string_hash;
            srand(string_hash(obj_name));
            double obj_r = double(rand()) / double(RAND_MAX);
            double obj_g = double(rand()) / double(RAND_MAX);
            double obj_b = double(rand()) / double(RAND_MAX);

            viewer->addSphere(vpt::ros_to_pcl(object_cloud_frame.point),
                              obj_size,
                              obj_r, obj_g, obj_b,
                              obj_name);
            shape_list.push_back(obj_name);

            // do level 1 perspective taking
            int is_occluded=-1;
            std::vector <Eigen::Vector3i> out_ray;
            sor.rayTraceWrapper(is_occluded,
                                vpt::ros_to_eigen(object_cloud_frame.point),
                                vpt::ros_to_eigen(head_cloud_frame.point),
                                theta,
                                out_ray);

            wysiwyd::wrdac::Relation r_occluded(obj_name, "occluded");
            wysiwyd::wrdac::Relation r_visible(obj_name, "visible");

            if(is_occluded) {
                ROS_INFO_STREAM("Object " << obj_name << " is occluded from partner.");
                if(opc->isConnected()) {
                    wysiwyd::wrdac::Agent* partner = opc->addOrRetrieveEntity<wysiwyd::wrdac::Agent>("partner");
                    partner->addBelief(r_occluded);
                    partner->removeBelief(r_visible);
                    opc->commit(partner);
                }
            } else {
                ROS_INFO_STREAM("Object " << obj_name << " is visible to partner.");
                if(opc->isConnected()) {
                    wysiwyd::wrdac::Agent* partner = opc->addOrRetrieveEntity<wysiwyd::wrdac::Agent>("partner");

                    partner->addBelief(r_visible);
                    partner->removeBelief(r_occluded);

                    opc->commit(partner);
                }
            }

            double dist=(vpt::ros_to_eigen(object_cloud_frame.point)-vpt::ros_to_eigen(head_cloud_frame.point)).norm();

            ROS_INFO_STREAM("Raytrace counter: " << out_ray.size());
            ROS_INFO_STREAM("Distance to object: " << dist);
            for(int i=0; i<out_ray.size(); i++) {
                pcl::PointXYZ traversed_pcl = vpt::eigen_to_pcl(sor.getCentroidCoordinate(out_ray.at(i)));
                std::string traversal_sphere_name = obj_name+boost::lexical_cast<std::string>(i);

                viewer->addSphere(traversed_pcl,
                                  traversal_sphere_size,
                                  obj_r, obj_g, obj_b,
                                  traversal_sphere_name);
                shape_list.push_back(traversal_sphere_name);
            }
        }
    }
}