コード例 #1
0
    int run(int width, int height, signed char *_yuv, int* _bgra)
    {

        //decoding the camera data to BGRA Mat image
        Mat myuv(height + height/2, width, CV_8UC1, (unsigned char *)_yuv);
        Mat mbgra(height, width, CV_8UC4, (unsigned char *)_bgra);
        cvtColor(myuv, mbgra, CV_YUV420sp2BGR, 4);
        Mat image;
        cvtColor(mbgra,image,CV_BGRA2BGR);
        int flag=run(image);
        cvtColor(image,mbgra,CV_BGR2BGRA);
        return flag;
    }
コード例 #2
0
ファイル: picam.cpp プロジェクト: Roboy/legs
void CameraCallback(CCamera* cam, const void* buffer, int buffer_length)
{
	// check ellapsed time and increment fps as long as the the frame is received withthin the specified interval 
	t2 = std::chrono::high_resolution_clock::now();
        time_span = std::chrono::duration_cast<std::chrono::milliseconds>(t2 - t1);    	
	if(time_span.count() < 10 ){
		fps += 1;
	}else{
		printf("fps: %f\n", fps/time_span.count());
		startMilliSecondTimer();
		fps = 0;	
	}
	//printf("Ellapsed time: %.3f received %d bytes of data\n", time_span.count, buffer_length);
	cv::Mat myuv(HEIGHT + HEIGHT/2, WIDTH, CV_8UC1, (unsigned char*)buffer);
	cv::Mat mrgb(HEIGHT, WIDTH, CV_8UC4, dest);
	cv::cvtColor(myuv, mrgb, CV_YUV2RGBA_NV21); 
	//cv::imshow("video", mrgb);
	//cv::waitKey(1);
}
コード例 #3
0
ファイル: FaceCaptureView.cpp プロジェクト: eldog/fface
JNIEXPORT jlong JNICALL Java_uk_me_eldog_fface_FaceCaptureView_findFaces
(JNIEnv* env, jclass cls, jlong cnnPointer, jint width, jint height, jbyteArray yuv, jintArray bgra)
{
    Cnn* cnn = (Cnn*) cnnPointer;

    jbyte* _yuv  = env->GetByteArrayElements(yuv, 0);
    jint*  _bgra = env->GetIntArrayElements(bgra, 0);

    cv::Mat myuv(height + height/2, width, CV_8UC1, (unsigned char *)_yuv);
    cv::Mat mbgra(height, width, CV_8UC4, (unsigned char *)_bgra);
    cv::Mat mgray(height, width, CV_8UC1, (unsigned char *)_yuv);
    cv::cvtColor(myuv, mbgra, CV_YUV420sp2BGR, 4);

    std::vector<cv::Rect> faces;
    faces = cnn->findFaces(mbgra);
    
    cnn->drawRectangles(faces, mbgra);
    env->ReleaseIntArrayElements(bgra, _bgra, 0);
    env->ReleaseByteArrayElements(yuv, _yuv, 0);

    return (jlong) cnn;
} // findFaces
コード例 #4
0
ファイル: visionNode.cpp プロジェクト: Roboy/mocap
void VisionNode::CameraCallback(CCamera *cam, const void *buffer, int buffer_length) {
    cv::Mat myuv(HEIGHT + HEIGHT / 2, WIDTH, CV_8UC1, (unsigned char *) buffer);
    cv::cvtColor(myuv, img, CV_YUV2RGBA_NV21);
    cv::cvtColor(img, img_gray, CV_RGBA2GRAY);

    communication::MarkerPosition markerPosition;
    markerPosition.header.stamp = ros::Time::now();
    static uint next_id = 0;
    markerPosition.header.seq = next_id++;
    markerPosition.cameraID = ID;

    static uint counter = 0;
    t2 = std::chrono::high_resolution_clock::now();
    time_span = std::chrono::duration_cast<std::chrono::milliseconds>(t2 - t1);
    markerPosition.fps = (double)counter/time_span.count();
    counter++;

    if(time_span.count()>30){ // reset every 30 seconds
        counter = 0;
        t1 = std::chrono::high_resolution_clock::now();
        std_msgs::Int32 msg;
        msg.data = ID;
        cameraID_pub->publish(msg);
    }

    cv::Mat filtered_img;
    cv::threshold(img_gray, filtered_img, threshold_value, 255, 3);

    // find contours in result, which hopefully correspond to a found object
    vector <vector<cv::Point>> contours;
    vector <cv::Vec4i> hierarchy;
    findContours(filtered_img, contours, hierarchy, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE,
                 cv::Point(0, 0));

    // filter out tiny useless contours
    double min_contour_area = 10;
    for (auto it = contours.begin(); it != contours.end();) {
        if (contourArea(*it) < min_contour_area) {
            it = contours.erase(it);
        }
        else {
            ++it;
        }
    }

    // publish the markerPositions
    vector<cv::Point2f> centers(contours.size());
    vector<float> radius(contours.size());
    for (int idx = 0; idx < contours.size(); idx++) {
        minEnclosingCircle(contours[idx], centers[idx], radius[idx]);
        communication::Vector2 pos;
        pos.x = WIDTH - centers[idx].x;
        pos.y = centers[idx].y;
        markerPosition.marker_position.push_back(pos);
    }
    //imshow("camera", img);
    //waitKey(1);
    markerPosition.markerVisible=contours.size();
    marker_position_pub->publish(markerPosition);

    if(publish_video_flag && counter%3==0){
        // get centers and publish
        for (int idx = 0; idx < contours.size(); idx++) {
            drawContours(img_gray, contours, idx, cv::Scalar(0, 0, 0), 4, 8, hierarchy, 0,
                         cv::Point());
        }
        cv_bridge::CvImage cvImage;
        img_gray.copyTo(cvImage.image);
        sensor_msgs::Image msg;
        cvImage.toImageMsg(msg);
        msg.encoding = "mono8";
       	msg.header = markerPosition.header;
        video_pub->publish(msg);
   }
}