コード例 #1
0
    int run(int width, int height, signed char *_yuv, int* _bgra)
    {

        //decoding the camera data to BGRA Mat image
        Mat myuv(height + height/2, width, CV_8UC1, (unsigned char *)_yuv);
        Mat mbgra(height, width, CV_8UC4, (unsigned char *)_bgra);
        cvtColor(myuv, mbgra, CV_YUV420sp2BGR, 4);
        Mat image;
        cvtColor(mbgra,image,CV_BGRA2BGR);
        int flag=run(image);
        cvtColor(image,mbgra,CV_BGR2BGRA);
        return flag;
    }
コード例 #2
0
ファイル: FaceCaptureView.cpp プロジェクト: eldog/fface
JNIEXPORT jlong JNICALL Java_uk_me_eldog_fface_FaceCaptureView_findFaces
(JNIEnv* env, jclass cls, jlong cnnPointer, jint width, jint height, jbyteArray yuv, jintArray bgra)
{
    Cnn* cnn = (Cnn*) cnnPointer;

    jbyte* _yuv  = env->GetByteArrayElements(yuv, 0);
    jint*  _bgra = env->GetIntArrayElements(bgra, 0);

    cv::Mat myuv(height + height/2, width, CV_8UC1, (unsigned char *)_yuv);
    cv::Mat mbgra(height, width, CV_8UC4, (unsigned char *)_bgra);
    cv::Mat mgray(height, width, CV_8UC1, (unsigned char *)_yuv);
    cv::cvtColor(myuv, mbgra, CV_YUV420sp2BGR, 4);

    std::vector<cv::Rect> faces;
    faces = cnn->findFaces(mbgra);
    
    cnn->drawRectangles(faces, mbgra);
    env->ReleaseIntArrayElements(bgra, _bgra, 0);
    env->ReleaseByteArrayElements(yuv, _yuv, 0);

    return (jlong) cnn;
} // findFaces
JNIEXPORT void JNICALL
Java_ph_edu_dlsu_orbflow_CameraActivity_process(JNIEnv *env, jobject instance,
                                                jobject pTarget, jbyteArray pSource) {
    uint32_t start;
    cv::Mat srcBGR;

    cv::RNG randnum(12345); // for random color
    cv::Scalar color;

    AndroidBitmapInfo bitmapInfo;
    uint32_t *bitmapContent;

    if (AndroidBitmap_getInfo(env, pTarget, &bitmapInfo) < 0) abort();
    if (bitmapInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) abort();
    if (AndroidBitmap_lockPixels(env, pTarget, (void **) &bitmapContent) < 0) abort();

    /// Access source array data... OK
    jbyte *source = (jbyte *) env->GetPrimitiveArrayCritical(pSource, 0);
    if (source == NULL) abort();

    /// cv::Mat for YUV420sp source and output BGRA
    cv::Mat srcGray(bitmapInfo.height, bitmapInfo.width, CV_8UC1, (unsigned char *) source);
    cv::Mat src(bitmapInfo.height + bitmapInfo.height / 2, bitmapInfo.width, CV_8UC1,
                (unsigned char *) source);
    cv::Mat mbgra(bitmapInfo.height, bitmapInfo.width, CV_8UC4, (unsigned char *) bitmapContent);


/***********************************************************************************************/
    /// Native Image Processing HERE...

    start = static_cast<uint32_t>(cv::getTickCount());

    if (srcBGR.empty())
        srcBGR = cv::Mat(bitmapInfo.height, bitmapInfo.width, CV_8UC3);

    // RGB frame input
    cv::cvtColor(src, srcBGR, CV_YUV420sp2RGB);

    /// If previous frame doesn't exist yet, initialize to srcGray
    if (previous_gray_frame.empty()) {
        srcGray.copyTo(previous_gray_frame);
        LOGI("Initializing previous frame...");
    }

    if (detector == NULL)
        detector = cv::ORB::create();

    // create(int nfeatures=500, float scaleFactor=1.2f, int nlevels=8, int edgeThreshold=31,
    // int firstLevel=0, int WTA_K=2, int scoreType=ORB::HARRIS_SCORE, int patchSize=31, int fastThreshold=20);


    // Detect the strong corners on an image.
    detector->detect(previous_gray_frame, previous_keypoints, cv::Mat());


    // Debugging
    //LOGI("previous_keypoints.size() = %d", previous_keypoints.size());

    // Convert Keypoints to Points
    keypoints2points(previous_keypoints, previous_features);

    //LOGI("previous_features.size() = %d", previous_features.size());

    // Refines the corner locations
     cornerSubPix(previous_gray_frame, previous_features,
                 cv::Size(win_size, win_size), cv::Size(-1, -1),
                cv::TermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03));

    std::vector<uchar> features_found;
    // The output status vector. Each element of the vector is set to 1 if the flow for
    // the corresponding features has been found, 0 otherwise

    // Calculates the optical flow for a sparse feature set using the iterative Lucas-Kanade method with
    // pyramids
    cv::calcOpticalFlowPyrLK(previous_gray_frame, srcGray,
                             previous_features, current_features, features_found,
                             cv::noArray(), cv::Size(win_size * 4 + 1, win_size * 4 + 1), 0,
                             cv::TermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.3));

    for (int i = 0; i < (int) previous_features.size(); i++) {
        if (features_found[i]) {
            // Randomize color and display the velocity vectors
            color = cv::Scalar(randnum.uniform(0, 255), randnum.uniform(0, 255),
                               randnum.uniform(0, 255));
            line(srcBGR, previous_features[i], current_features[i], color);
        }
    }


    LOGI("Processing took %0.2f ms.", 1000*(static_cast<uint32_t>(cv::getTickCount()) - start)/(float)cv::getTickFrequency());

    cvtColor(srcBGR, mbgra, CV_BGR2BGRA);

    // Copy the current gray frame into previous_gray_frame
    srcGray.copyTo(previous_gray_frame);

/************************************************************************************************/

    /// Release Java byte buffer and unlock backing bitmap
    //env-> ReleasePrimitiveArrayCritical(pSource,source,0);
    /*
     * If 0, then JNI should copy the modified array back into the initial Java
     * array and tell JNI to release its temporary memory buffer.
     *
     * */

    env->ReleasePrimitiveArrayCritical(pSource, source, JNI_COMMIT);
    /*
 * If JNI_COMMIT, then JNI should copy the modified array back into the
 * initial array but without releasing the memory. That way, the client code
 * can transmit the result back to Java while still pursuing its work on the
 * memory buffer
 *
 * */

    /*
     * Get<Primitive>ArrayCritical() and Release<Primitive>ArrayCritical()
     * are similar to Get<Primitive>ArrayElements() and Release<Primitive>ArrayElements()
     * but are only available to provide a direct access to the target array
     * (instead of a copy). In exchange, the caller must not perform blocking
     * or JNI calls and should not hold the array for a long time
     *
     */


    if (AndroidBitmap_unlockPixels(env, pTarget) < 0) abort();
}
JNIEXPORT void JNICALL
Java_ph_edu_dlsu_circles_CameraActivity_process(JNIEnv *env, jobject instance,
                                                            jobject pTarget, jbyteArray pSource, jint thresh) {
    uint32_t t;
    cv::Mat srcBGR;
    cv::Mat edges;

    cv::Scalar color;

    std::vector<cv::Vec3f> circles;

    AndroidBitmapInfo bitmapInfo;
    uint32_t* bitmapContent;

    if(AndroidBitmap_getInfo(env, pTarget, &bitmapInfo) < 0) abort();
    if(bitmapInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) abort();
    if(AndroidBitmap_lockPixels(env, pTarget, (void**)&bitmapContent) < 0) abort();

    /// Access source array data... OK
    jbyte* source = (jbyte*)env->GetPrimitiveArrayCritical(pSource, 0);
    if (source == NULL) abort();

    /// cv::Mat for YUV420sp source and output BGRA
    cv::Mat srcGray(bitmapInfo.height, bitmapInfo.width, CV_8UC1, (unsigned char *)source);
    cv::Mat src(bitmapInfo.height + bitmapInfo.height/2, bitmapInfo.width, CV_8UC1, (unsigned char *)source);
    cv::Mat mbgra(bitmapInfo.height, bitmapInfo.width, CV_8UC4, (unsigned char *)bitmapContent);


/***********************************************************************************************/
    /// Native Image Processing HERE...

    t = cv::getTickCount();

    if(srcBGR.empty())
        srcBGR = cv::Mat(bitmapInfo.height, bitmapInfo.width, CV_8UC3);

    cv::cvtColor(src, srcBGR, CV_YUV420sp2RGB);

    // Reduce noise
    cv::GaussianBlur( srcGray, srcGray, cv::Size(9, 9), 2, 2 );

    // Detect the circles
    cv::HoughCircles(srcGray, circles, CV_HOUGH_GRADIENT, 1, srcGray.rows/4, 200, thresh > 0 ? thresh : 1, 5);

    // Draw the circles

    for(size_t i = 0; i < circles.size(); i++) {

        cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));

        int radius = cvRound(circles[i][2]);

        color = cv::Scalar( rng.uniform(0, 255), rng.uniform(0,255), rng.uniform(0,255) );

        // draw the circle center
        cv::circle(srcBGR, center, 3, color, -1, 8, 0);

        // draw the circle outline
        cv::circle(srcBGR, center, radius, color, 3, 8, 0);
    }


    LOGI("Processing took %0.2f ms.", 1000*((float)cv::getTickCount() - t)/(float)cv::getTickFrequency());

    cvtColor(srcBGR, mbgra, CV_BGR2BGRA);


/************************************************************************************************/

    /// Release Java byte buffer and unlock backing bitmap
    //env-> ReleasePrimitiveArrayCritical(pSource,source,0);
    /*
     * If 0, then JNI should copy the modified array back into the initial Java
     * array and tell JNI to release its temporary memory buffer.
     *
     * */

    env-> ReleasePrimitiveArrayCritical(pSource, source, JNI_COMMIT);
    /*
 * If JNI_COMMIT, then JNI should copy the modified array back into the
 * initial array but without releasing the memory. That way, the client code
 * can transmit the result back to Java while still pursuing its work on the
 * memory buffer
 *
 * */

    /*
     * Get<Primitive>ArrayCritical() and Release<Primitive>ArrayCritical()
     * are similar to Get<Primitive>ArrayElements() and Release<Primitive>ArrayElements()
     * but are only available to provide a direct access to the target array
     * (instead of a copy). In exchange, the caller must not perform blocking
     * or JNI calls and should not hold the array for a long time
     *
     */


    if (AndroidBitmap_unlockPixels(env, pTarget) < 0) abort();
}