Beispiel #1
0
JNIEXPORT jintArray JNICALL Java_pris_videotest_JNIClient_detectWithReturn(
		JNIEnv * env, jclass, jintArray pixels, jint width, jint height) {
	jint * cPixels;
	cPixels = env->GetIntArrayElements(pixels, 0);

	cv::Mat imgData(height, width, CV_8UC4, (unsigned char*) cPixels);

	IplImage *frame = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 4);
	*frame = imgData.operator _IplImage();
	//imgData.release();

	cvSmooth(frame, frame, CV_GAUSSIAN, 3, 0, 0);     ///<高斯滤波
	cv::Mat m_OriFrameMat = frame;
	//cvReleaseImage(&frame);
	resize(m_OriFrameMat, m_ResizeFrameMat,
			cv::Size(m_nVideoResizeW, m_nVideoResizeH), 0, 0, CV_INTER_LINEAR); ///<压缩 640*480 m_pResizeFrame=30*40
	//m_OriFrameMat.release();
	//cvtColor(m_ResizeFrameMat, m_GrayFrameMat, CV_BGRA2GRAY, 1); ///<灰度化
	//m_ResizeFrameMat.release();
	m_pBGSubMOG2.operator()(m_ResizeFrameMat, foregroundMat, 0.001);
	m_ResizeFrameMat = foregroundMat;

	int result[m_nVideoResizeH*m_nVideoResizeW];
	for(int i = 0; i<m_nVideoResizeH*m_nVideoResizeW; i++){
		result[i] = m_ResizeFrameMat.data[i];
	}
	env->ReleaseIntArrayElements(pixels, cPixels, 0);
	jintArray intArray = env->NewIntArray(m_nVideoResizeH*m_nVideoResizeW);
	env->SetIntArrayRegion(intArray, 0, m_nVideoResizeH*m_nVideoResizeW, result);
	return intArray;
}
void GUI::set_img(const cv::Mat &frame, cv::Mat &result, Controller &controller) {

    /** result == controller.current_image_to_display. */

    // Convert controller.current_image_to_process (as src) to RGB(A) controller.current_image_to_display (as dst).
    convert_image_to_gui_output_format(frame, result) ;


    if (widget_current_image_to_display != NULL) {

        delete widget_current_image_to_display ;
    }

    widget_current_image_to_display = Gtk::manage(new Gtk::Image());

    widget_current_image_to_display->clear() ;

    if (result.depth() != CV_8U) { // This desnt' should be !

        result.assignTo(result, CV_8U) ;

    }

    // We resize the original image every time we display it.
    // It's better like this because the image is resized (if needed) only one time per changement
    // Not always resizing the same image.

    //controller.resize_image_to_display(result) ;

    if (controller.get_image_size_gt_layout()) {
        cv::resize(result, result, cv::Size(controller.display_image_size.first, controller.display_image_size.second), 1.0, 1.0, cv::INTER_LINEAR) ;
    }


    IplImage iplimg = _IplImage(result) ;

    widget_current_image_to_display->set(Gdk::Pixbuf::create_from_data( (const guint8 *) iplimg.imageData,
                                         Gdk::COLORSPACE_RGB,
                                         (result.channels() == 4),
                                         iplimg.depth,
                                         iplimg.width,
                                         iplimg.height,
                                         iplimg.widthStep)) ;



    widget_current_image_to_display->show() ;

    display_area.put(*widget_current_image_to_display, controller.current_image_position.first, controller.current_image_position.second) ;

}
Beispiel #3
0
JNIEXPORT jboolean JNICALL Java_pris_videotest_JNIClient_detect(JNIEnv * env,
		jclass, jbyteArray pixels, jint width, jint height) {
	jbyte * cPixels;
	cPixels = env->GetByteArrayElements(pixels, 0);

	cv::Mat imgData(height, width, CV_8UC1, (unsigned char*) cPixels);

	IplImage *frame = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 1);
	*frame = imgData.operator _IplImage();
	//imgData.release();

	cvSmooth(frame, frame, CV_GAUSSIAN, 3, 0, 0);     ///<高斯滤波
	cv::Mat m_OriFrameMat = frame;
	//cvReleaseImage(&frame);
	resize(m_OriFrameMat, m_ResizeFrameMat,
			cv::Size(m_nVideoResizeW, m_nVideoResizeH), 0, 0, CV_INTER_LINEAR); ///<压缩 640*480 m_pResizeFrame=30*40
	//m_OriFrameMat.release();
	//cvtColor(m_ResizeFrameMat, m_GrayFrameMat, CV_BGRA2GRAY, 1); ///<灰度化
	//m_ResizeFrameMat.release();
	m_pBGSubMOG2.operator()(m_ResizeFrameMat, foregroundMat, 0.001);
	m_ResizeFrameMat = foregroundMat;

	int i, j, k;
	k = 0;
	for (i = 0; i < m_nVideoResizeH; i++) {
		for (j = 0; j < m_nVideoResizeW; j++) {
			if (m_ResizeFrameMat.data[i * m_nVideoResizeW + j] != 0) {
				k++; ///<计算二值前景图像非0像素点个数
			}
		}
	}
	//m_GrayFrameMat.release();
	//delete frame;
	double k_ratio = (double) k / (double) (m_nVideoResizeW * m_nVideoResizeH);
	if (k_ratio <= 0.01) {
		env->ReleaseByteArrayElements(pixels, cPixels, 0);
		return false;
	}
	if (k_ratio / m_rFGSegThreshold > 1.5 || k_ratio / m_rFGSegThreshold < 0.79)
		m_rFGSegThreshold = k_ratio;

	///检测到运动视频段
	if (k_ratio >= m_rFGSegThreshold) {
		env->ReleaseByteArrayElements(pixels, cPixels, 0);
		return true;
	}
	env->ReleaseByteArrayElements(pixels, cPixels, 0);
	return false;
}