void mainloop(){
	acquireData(&gd);
	gdnorm = gd - grelax;
	//printData(gdnorm);
	detectGesture(gdnorm, gdiff, &gstartstate);
	doTheThing();
}
Exemplo n.º 2
0
void testApp::processSample(int x, int y, int z){
    
    const int samplesToAverage = 4;
    static int count = 0;
    static int totalX, totalY, totalZ = 0;
    
    totalX += x;
    totalY += y;
    totalZ += z;
    
    count++;
    
    if (count == samplesToAverage) {
        sampleData[sampleCount][0] = quantizeAccelData(totalX / samplesToAverage);
        sampleData[sampleCount][1] = quantizeAccelData(totalY / samplesToAverage);
        sampleData[sampleCount][2] = quantizeAccelData(totalZ / samplesToAverage);
        
        sampleCount++;
        
        totalX = totalY = totalZ = 0;
        count = 0;
    }
    
    if(sampleCount > (sampleDataLength - 1)){
        endCapture();
        
        int gesture = detectGesture();
        
        cout << "Gesture: " << gesture << endl;
    }
}
Exemplo n.º 3
0
// subscribers retreive hand position on depth map for left and right hands of currently tracked user
void gestureTracker::l_hand_cb(const std_msgs::Int32MultiArray::ConstPtr& array)
{
    l_hand_pos_prev = l_hand_pos;

    std::vector<int>::const_iterator it = array->data.begin();
    this->l_hand_pos.x = *it;
    it++;
    this->l_hand_pos.y = *it;

    //cerr << " L: pos " << l_hand_pos.x  << " " << l_hand_pos.y << endl;

    if(abs(l_hand_pos_prev.x - l_hand_pos.x) > 10.0 || abs(l_hand_pos_prev.y - l_hand_pos.y) > 10.0)
        return;

    int w = 20;
    int h = 20;
    if(this->has_depth_data)
    {
        //while(this->in_use)
        //	waitKey(1);
        this->in_use = true;
        //Mat depth;
        //depth = this->depth_img.clone();
        //this->depth_img.copyTo(depth);

        Size max = this->depth_img.size();
        int max_x = max.width;
        int max_y = max.height;

        int tl_x = clamp(l_hand_pos.x-w/2, 1, (max_x-w)-1);
        int tl_y = clamp(l_hand_pos.y-h/2, 1, (max_y-h)-1);

        /*
        cerr << " L: " << max_x << " " << max_y << endl;
        cerr << " L: " << l_hand_pos.x << " " << l_hand_pos.y << endl;
        cerr << " L: clamp in " << l_hand_pos.x-w/2 << " " << l_hand_pos.y-h/2 << endl;
        cerr << " L: clamp in " << l_hand_pos.x-(w/2) << " " << l_hand_pos.y-(h/2) << endl;
        cerr << " L: " << tl_x << " " << tl_y << " " << w << " " << h <<endl;
        cerr << " L: first roi" << endl;
        */
        Rect roi = Rect(tl_x, tl_y, w, h);
        Scalar avg = cv::mean(this->depth_img(roi));

        //cerr << " L: first roi success" << endl;

        double scale_w = (avg[0]/(double)12750);
        double scale_h = (avg[0]/(double)12750);

        if(scale_w <= 0 || scale_h <= 0)
            return;

        int scaled_w = 1/scale_w * 110 + 60;
        int scaled_h = 1/scale_h * 110 + 60;

        //cerr << " L: " << avg[0] << " " << scaled_w << " " << scale_h << endl;

        scaled_h = clamp(scaled_h, scaled_h, 200);
        scaled_w = clamp(scaled_w, scaled_w, 200);

        l_roi_size_filter.add(scaled_w);
        scaled_w = l_roi_size_filter.average();
        scaled_h = scaled_w;

        int tl_x_new = clamp(l_hand_pos.x-scaled_w/2, 0, (max_x-scaled_w));
        int tl_y_new = clamp(l_hand_pos.y-scaled_h/2, 0, (max_y-scaled_h));

        /*
        cerr << " L: " << tl_x_new << " " << tl_y_new << " " << scaled_w << " " << scaled_h << endl;
        cerr << " L: scaling roi" << endl;
        */
        Rect roi_scaled = Rect(tl_x_new, tl_y_new, scaled_w, scaled_h);
        this->depth_img(roi_scaled).copyTo(l_hand_roi);

        this->in_use = false;
        //imshow("left", l_hand_roi);
        //waitKey(60);
        detectGesture(l_hand_roi, "left");
    }

}