コード例 #1
0
// test pushing to github by xcode
int main(int argc, const char * argv[])
{
    
    ft_data ftdata;
    if (argc<3) {
        cout<<argv[0]<<" user_profile_dir camera_profile.yaml";
        return 0;
    }
//    string fname =string(argv[1]);
//    ftdata = load_ft_jzp(fname);
//    face_tracker tracker = load_ft<face_tracker>(string(ftdata.baseDir+"trackermodel.yaml").c_str());
//    tracker.detector.baseDir = ftdata.baseDir;
//    
//    Mat cameraMatrix, distCoeffs;
//    readCameraProfile(fs::path(argv[2]), cameraMatrix, distCoeffs);
    
    fs::path baseDirPath(argv[1]);
    ASM_Gaze_Tracker poseTracker(baseDirPath / "trackermodel.yaml", fs::path(argv[2]));
    
    
    
//    vector<Point3f> faceFeatures ; //findBestFrontalFaceShape(smodel);
    vector<Point3f> faceCrdRefVecs;
    faceCrdRefVecs.push_back(Point3f(0,0,0));
    faceCrdRefVecs.push_back(Point3f(50,0,0));
    faceCrdRefVecs.push_back(Point3f(0,50,0));
    faceCrdRefVecs.push_back(Point3f(0,0,50));
    
//    vector<Point2f> frontPerspective2D = findBestFrontalFaceShape2D(ftdata);
    
    VideoCapture cam;
    cam.open(0);
    if(!cam.isOpened()){
        return 0;
    }
    Mat rvec, tvec;
    Mat im;
    captureImage(cam,im);
    

    while(true){
        bool success = captureImage(cam, im, true);
        if (success == false) {
            break;
        }
        
        bool succeeded = poseTracker.featureTracking(im);
        if (succeeded)
            poseTracker.estimateFacePose();

//        Mat hM = findHomography(featuresTruPts ,frontPerspective2D, 0);
//        Mat frontim;
//        Mat gray;
//        warpPerspective(im.clone(), frontim, hM, im.size());
//        imshow("front", frontim);
        
        
        
        
        vector<Point2f> reprjCrdRefPts;

        poseTracker.projectPoints(faceCrdRefVecs, reprjCrdRefPts);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[1], Scalar(255,0,0),2);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[2], Scalar(0,255,0),2);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[3], Scalar(0,0,255),2);
        drawStringAtTopLeftCorner(im, "distance to camera:" + boost::lexical_cast<string>(poseTracker.distanceToCamera()));
        imshow("head pose",im);
        
        int c = waitKey(1);
        if(c == 'q')break;
        else if(c == 'd') poseTracker.reDetectFace();
        
    }
    
}
コード例 #2
0
ファイル: test_core_pullup.cpp プロジェクト: aurelw/beholder
int main() {

    /* the basic rig config */
    RigConfig rc;
    rc.loadFromFile("rigconfig_core_pullup.xml");

    /* camera parameter data */
    CameraParameters::Ptr cameraParameters(new CameraParameters(rc));
    cameraParameters->print();

    /* pose tracking for the camera*/
    PoseTrackerKinfu::Ptr poseTracker(new PoseTrackerKinfu(rc));
    poseTracker->start();

    /* range finder with kinfu cloud provider */
    CloudProvider<pcl::PointXYZ>::Ptr rangeimageProvider = poseTracker->getKinfu();
    RangeFinder<pcl::PointXYZ>::Ptr rangeFinder(new RangeFinder<pcl::PointXYZ>(rc));
    rangeFinder->setCloudSource(rangeimageProvider);

    /* viewfinder to extract pois from the scene */
    ViewFinderRangeImage<pcl::PointXYZ> viewFinder(0.5);
    viewFinder.setRangeFinder(rangeFinder);
    viewFinder.setCameraParameters(cameraParameters);

    /* the focus motor */
    FocusMotor::Ptr focusMotor(new FocusMotorTwoPolys(rc));

    /* create a focus controller */
    FocusControllerSinglePoint::Ptr fCtrlSingle(new FocusControllerSinglePoint(
                cameraParameters, poseTracker));
    fCtrlSingle->setIdentifier("SingeTracker0");
    fCtrlSingle->setPriority(5);
    fCtrlSingle->start();

    /* focus control */
    CentralFocusControl fControl;
    fControl.setFocusMotor(focusMotor);
    fControl.addFocusController(fCtrlSingle);
    fControl.start();

    /* simulate a main loop */
    for (int i=0; i<50; i++) {
        poseTracker->getPose();
        fControl.focusDistance();
        fControl.getFocusedPoint();
        fControl.activlyControlled();
        sleep(0.55);
    }

    /* save cloud from kinfu */
    pcl::PCDWriter writer;
    CloudProvider<pcl::PointXYZ>::CloudPtr outCloud(
            new CloudProvider<pcl::PointXYZ>::Cloud(
                *rangeimageProvider->getLastCloud()));
    writer.writeASCII("kinfu_cloud_pullup.pcd", *outCloud );

    viewFinder.compute();
    pcl::RangeImagePlanar::Ptr rangeImage = viewFinder.getRangeImage();
    RangeImageWriter riWriter(rangeImage);
    riWriter.save("rangeimage_pullup.png");
    viewFinder.getMiddlePoint();
    
}
コード例 #3
0
int main(int argc, const char * argv[])
{
    
    ft_data ftdata;
    if (argc<3) {
        cout<<argv[0]<<" user_profile_dir camera_profile.yaml";
        return 0;
    }

    fs::path baseDirPath(argv[1]);
    ASM_Gaze_Tracker poseTracker(baseDirPath / "trackermodel.yaml", fs::path(argv[2]));
    
    
    vector<Point3f> faceCrdRefVecs;
    faceCrdRefVecs.push_back(Point3f(0,0,0));
    faceCrdRefVecs.push_back(Point3f(50,0,0));
    faceCrdRefVecs.push_back(Point3f(0,50,0));
    faceCrdRefVecs.push_back(Point3f(0,0,50));
    
    VideoCapture cam;
    cam.open(0);
    if(!cam.isOpened()){
        return 0;
    }
    Mat rvec, tvec;
    Mat im;
    captureImage(cam,im);
    

    while(true){
        bool success = captureImage(cam, im, true);
        if (success == false) {
            break;
        }
        
        bool succeeded = poseTracker.featureTracking(im);
        if (succeeded)
            poseTracker.estimateFacePose();

        
        
        
        Mat frontim,flipback;
        flip(im,flipback,1);
        
        vector<Point2f> reprjCrdRefPts;
        vector<Point2f> reprjFeaturePts;
        poseTracker.projectPoints(poseTracker.facialPointsIn3D, reprjFeaturePts);
        poseTracker.projectPoints(faceCrdRefVecs, reprjCrdRefPts);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[1], Scalar(255,0,0),2);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[2], Scalar(0,255,0),2);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[3], Scalar(0,0,255),2);
        drawPoints(im, reprjFeaturePts);
        drawStringAtTopLeftCorner(im, "distance to camera:" + boost::lexical_cast<string>(poseTracker.distanceToCamera()));
        imshow("head pose",im);
        
        vector<Point2f> transformedPoints = poseTracker.tracker.points;
        fliplr(transformedPoints, im.size());
        Mat part;
        
        Mat hM = findHomography(poseTracker.facialPointsIn2D ,transformedPoints, 0);
        warpPerspective(flipback(boundingRect(transformedPoints)), frontim, hM, im.size());
        imshow("front", im);

        
        int c = waitKey(1)%256;
        if(c == 'q')break;
        
    }
    
}