Beispiel #1
0
//this thread does all the work
void CAMERA_VAR3::processImages() {
	time(&start_time);
	frame_counter = 0;
	while(running) {
		process_mutex.lock();
		
		IplImage* image_raspi = raspiCamCvQueryFrame(capture);	//MAYBE DO THIS BETTER
		cv::Mat image(image_raspi);
		
		if(findObject(image, lookup_reduce_colourspace, lookup_threshold, &redObjectDetected, &redObject, &window)) {
			CAMERA_COMMON::drawObjectMarker(image, cv::Point(redObject.x+image.cols/2, -redObject.y+image.rows/2), cv::Scalar(0, 0, 0));
		}
		CAMERA_COMMON::drawBox(image, cv::Point(window.x, window.y), cv::Point(window.x+window.w, window.y+window.l), cv::Scalar(255, 255, 255));
		CAMERA_COMMON::drawCrosshair(image);
        cv::imshow("Image", image);
        
		if(takePhotoThisCycle) {
			//imwrite(std::string("photos/") + imageFileName, image);
			cv::imwrite(imageFileName, image);
			takePhotoThisCycle = false;
		}
		frame_counter++;
		cv::waitKey(1);
		
		process_mutex.unlock();
        if(THREAD_SLEEP_TIME > 0) {
            boost::this_thread::sleep(boost::posix_time::milliseconds(THREAD_SLEEP_TIME));
        }
	}
}
int main() {
  RASPIVID_CONFIG* config = (RASPIVID_CONFIG*)malloc(sizeof(RASPIVID_CONFIG));
  config -> width = 320;
  config -> height = 240;
  config -> bitrate = 0;
  config -> framerate = 0;
  config -> monochrome = 1;

  RaspiCamCvCapture * camera = raspiCamCvCreateCameraCapture2(0, config);
  free(config);

  IplImage* image = raspiCamCvQueryFrame(camera);
  cvNamedWindow("Display", CV_WINDOW_AUTOSIZE);

  if (faceCascade.load(faceCascadePath)) {
    display(image);
  }

  return 0;
}
Beispiel #3
0
//this thread does all the work
void CAMERA_VAR5::processImages() {	
	time(&start_time);
	frame_counter = 0;
	
	char* outfile = "/home/pi/out.mjpg";
	
	while(running) {
		process_mutex.lock();
		
		IplImage* image_raspi = raspiCamCvQueryFrame(capture);	//MAYBE DO THIS BETTER
		cv::Mat image = cv::Mat(image_raspi);

//		cv::imshow("Image", image);
		frame_counter++;
		
		/*----------------------*
		 * Magic happening here *
		 *----------------------*/
		
		cv::Mat image_lowRes;
		cv::resize(image, image_lowRes, cv::Size(), 0.25, 0.25, cv::INTER_NEAREST);
		cv::VideoWriter outStream(outfile, CV_FOURCC('M','J','P','G'), 2, image_lowRes.size(), true);
		if(outStream.isOpened()) {
			outStream.write(image_lowRes);
		}
		
		/*----------------------*
		 *      Magic done      *
		 *----------------------*/
		
		cv::waitKey(1);

		
		process_mutex.unlock();
        if(THREAD_SLEEP_TIME > 0) {
            boost::this_thread::sleep(boost::posix_time::milliseconds(THREAD_SLEEP_TIME));
        }
	}
}
Beispiel #4
0
//this thread does all the work
void CAMERA_MISC::processImages() {
	time(&start_time);
	frame_counter = 0;
	while(running) {
		process_mutex.lock();
		
		IplImage* image_raspi = raspiCamCvQueryFrame(capture);	//MAYBE DO THIS BETTER
		cv::Mat image(image_raspi);
		
		if(hsvOn) {
			cv::Mat image_hsv;
			cv::cvtColor(image, image_hsv, CV_BGR2HSV);
		}
		
		if(displayOn) {
			cv::imshow("Image 320x240", image);
		}
		
		if(lowResOn) {
			cv::Mat image_lowRes;
			cv::resize(image, image_lowRes, cv::Size(), 0.5, 0.5, cv::INTER_NEAREST);
			cv::imshow("Image 160x120", image_lowRes);
		}

		if(takePhotoThisCycle) {
			//imwrite(std::string("../") + imageFileName, image);
			cv::imwrite(imageFileName, image);
			takePhotoThisCycle = false;
		}
		frame_counter++;
		cv::waitKey(1);
		
		process_mutex.unlock();
        if(THREAD_SLEEP_TIME > 0) {
            boost::this_thread::sleep(boost::posix_time::milliseconds(THREAD_SLEEP_TIME));
        }
	}
}
int main( int argc, char** argv )
{
	// Medida de tiempo inicial
	clock_t t0, t_load, t_loop, t_loop0;
	double dtime;
	t0 = clock();

	// Declaration of variables
	CascadeClassifier face_cascade;				// Cascade Classifier
	Mat captureFrame, grayscaleFrame;			// Captured and converted to gray Frames
	double x_face_pos, y_face_pos, area_face;	// Coordinates of the detected face
	vector< Rect_<int> > faces;					// Vector of faces
	#ifdef RASPBERRY
		RaspiCamCvCapture * captureDevice;		// Video input
	#else
		CvCapture * captureDevice;				// Video input
	#endif
	char sTmp[255];

#ifdef TRACE
	sprintf(sTmp,"\n Directorio de ejecucion del programa: ");
	trace (sTmp);
	cout << get_ProgramDirectory();
#endif

#ifdef RECOGNITION
	// Declaration of variables Face recognition
	string people[MAX_PEOPLE];					// Each person of the model of face recognition
	int im_width, im_height;					// heigh, witdh of 1st images of the model of face recognition
	int prediction_seuil;						// Prediction limit
	Ptr<FaceRecognizer> model;					// Model of face recognitio

	// Prediction limit depending on the device
	#ifdef EIGENFACES
		prediction_seuil = 10;
	#else
		prediction_seuil = 1000;
	#endif

	// Model of face recognition
	#ifdef EIGENFACES
		model = createEigenFaceRecognizer();
	#else
		model = createFisherFaceRecognizer();
	#endif

	// Read measures file
	read_measures_file(im_width, im_height, '=');

	// Read people file
	read_people_file(people, '(', ')', '=');

	// Load model
	load_model_recognition(model);
	t_load = clock();

	#ifdef DEBUG
		dtime = difftime(t_load,t0);
		sprintf(sTmp,"\n (Face Tracking) tiempo de carga del modelo = ");
		debug(sTmp);
		cout << print_time(dtime);
	#endif

#endif

	// Video input depending on the device
	#ifdef RASPBERRY
		captureDevice = raspiCamCvCreateCameraCapture(0); // Index doesn't really matter
	#else
		captureDevice = cvCreateCameraCapture(0);
	#endif

	// Load of Haar Cascade
	if (!load_haar_cascade(face_cascade)) {	return -1;}

	// Create new window
	SHOW cvNamedWindow("Face tracking", 1);

	do {
		t_loop0 = clock();

		#ifdef RASPBERRY
				IplImage* image = raspiCamCvQueryFrame(captureDevice);	// Get images from the video input
		#else
				IplImage* image = cvQueryFrame(captureDevice);			// Get images from the video input
		#endif
		captureFrame = cvarrToMat(image);			// Convert images to Mat
		cvtColor(captureFrame, grayscaleFrame, CV_RGB2GRAY);	// Convert the image to gray scale

		// Detection and Face Recognition
		face_detection(face_cascade, grayscaleFrame, captureFrame, &faces, x_face_pos, y_face_pos, area_face);

		#ifdef RECOGNITION
				// Detection and Face Recognition
				face_recognition(people, grayscaleFrame, captureFrame, &faces, im_width, im_height, model,
								prediction_seuil, x_face_pos, y_face_pos, area_face);
		#endif

		// Display results
		#ifdef SHOW
				imshow("Face tracking", captureFrame);
		#endif

		t_loop = clock();
		#ifdef DEBUG
			dtime = difftime(t_loop,t_loop0);
			sprintf(sTmp,"\n (Face Tracking) tiempo del bucle del reconocimiento de cara = ");
			debug(sTmp);
			cout << print_time(dtime);
		#endif
	} while(cvWaitKey(10) < 0);

	// Destroy window
	#ifdef SHOW
		cvDestroyWindow("Face tracking");
	#endif

	#ifdef TRACE
		trace ("\n");
	#endif

	#ifdef RASPBERRY
		raspiCamCvReleaseCapture(&captureDevice);
	#else
		cvReleaseCapture(&captureDevice);
	#endif

	return 0;
}
Beispiel #6
0
int main(int argc, char* argv[])
{
#if ( USE_TALK > 0 )
	system("/home/pi/aquestalkpi/AquesTalkPi -g 60 \"まっすんどろいど を きどうします\" | aplay");
#endif

#if ( USE_TALK_TEST > 0 )
	int i=0;
	for(i=0;i<TALK_REASON_NUM;i++){
		talkReason(i);
	}

	for(i=0;i<TALK_WELCOME_NUM;i++){
		talkWelcome(i);
	}
#endif

	printf("Press Esc-Key to Exit Process.\n");
	int iRet = -1;
	
	try
	{
		// ready GPIO
		if( wiringPiSetupGpio() == -1 ){
			printf("failed to wiringPiSetupGpio()\n");
			throw 0;
		}
		// ready PWM
		pinMode(GPIO_PITCH, PWM_OUTPUT);
		pinMode(GPIO_YAW, PWM_OUTPUT);
		pwmSetMode(PWM_MODE_MS);
		pwmSetClock(400);
		pwmSetRange(1024);
				
		pinMode(GPIO_EXIT, INPUT);
		pinMode(GPIO_HALT, INPUT);
		
		//pinMode(GPIO_MONOEYE, OUTPUT);
		//digitalWrite(GPIO_MONOEYE,HIGH);

		// servoMotor GWS park hpx min25 mid74 max123
		const int servo_mid = 76;
		const int servo_min = 36; //servo_mid - 30;
		const int servo_max = 122; //servo_mid + 30;
		const double servo_min_deg = 0.0;
		const double servo_max_deg = 180.0;
		const double ratio_deg = ( servo_max_deg - servo_min_deg ) / ( servo_max - servo_min );
		
		const int pitch_limit_max = servo_max - 22;
		const int pitch_limit_min = servo_min + 34;

	RASPIVID_CONFIG * config = new RASPIVID_CONFIG();
	if(!config){
		printf("failed to create RASPIDVID_CONFIG.\n");
		return -1;
	}
	config->width=static_cast<int>(WIN_WIDTH);
	config->height=static_cast<int>(WIN_HEIGHT);
	config->bitrate=0;	// zero: leave as default
	config->framerate=0;
	config->monochrome=0;
    config->rotation=90; // using https://github.com/IsaoNakamura/robidouille.git
    
#if ( USE_WIN > 0 )
		cvNamedWindow( DISP_WIN , CV_WINDOW_AUTOSIZE );
#endif
	
		RaspiCamCvCapture* capture = NULL;
		if (argc > 1){
            //capture = raspiCamCvCreateCameraCapture2( argv[1], config );
            throw 0;
		}else{
			capture = raspiCamCvCreateCameraCapture2( 0, config );
            if(config){
                delete config;
                config = NULL;
            }
			if(!capture){
				printf("failed to create capture\n");
				throw 0;
			}
			// キャプチャサイズを設定する.
			double w = WIN_WIDTH;
			double h = WIN_HEIGHT;
            raspiCamCvSetCaptureProperty (capture, RPI_CAP_PROP_FRAME_WIDTH, w);
            raspiCamCvSetCaptureProperty (capture, RPI_CAP_PROP_FRAME_HEIGHT, h);
		}
	
		// 正面顔検出器の読み込み
		CvHaarClassifierCascade* cvHCC = (CvHaarClassifierCascade*)cvLoad(CASCADE, NULL,NULL,NULL);
	
		// 検出に必要なメモリストレージを用意する
		CvMemStorage* cvMStr = cvCreateMemStorage(0);
		
		// サーボ角度を中間に設定
		pwmWrite(GPIO_YAW, servo_mid);
		pwmWrite(GPIO_PITCH, servo_mid);
		
		// スクリーン座標からカメラのピッチ角とヨー角を算出するオブジェクトを初期化
		DF::CamAngleConverter camAngCvt(	static_cast<int>(WIN_WIDTH),
											static_cast<int>(WIN_HEIGHT),
											ANGLE_DIAGONAL					);
		
		if (!camAngCvt.Initialized()) {
			printf("failed to initialize CamAngleConverter.\n");
			throw 0;
		}

		struct timeval stNow;	// 現時刻取得用
		struct timeval stLen;	// 任意時間
		struct timeval stEnd;	// 現時刻から任意時間経過した時刻
		timerclear(&stNow);
		timerclear(&stLen);
		timerclear(&stEnd);
		unsigned int msec = HOMING_DELAY_MSEC;
		gettimeofday(&stNow, NULL);
		stLen.tv_sec = msec / 1000;
		stLen.tv_usec = msec % 1000;
		timeradd(&stNow, &stLen, &stEnd);
		
		srand(stNow.tv_usec);

		// 前値保存用のサーボ角度
		int _servo_yaw		= servo_mid;
		int _servo_pitch	= servo_mid;

		// スクリーン中心らへんの範囲
		double center_area_x = 60.0;
		double center_area_y = 60.0;
		
		int homing_state = HOMING_NONE;

		int over_cnt = 0;
		int nonface_cnt = 0;
		int silent_cnt = 0;

		// メインループ
		while(1){
			int wrk_homing_state = HOMING_NONE;
			
			IplImage* frame = raspiCamCvQueryFrame(capture);
			if(!frame){
				printf("failed to query frame.\n");
				break;
			}

			// 画像中から検出対象の情報を取得する
			CvSeq* face = cvHaarDetectObjects(	  frame
												, cvHCC
												, cvMStr
												, 1.2
												, 2
												, CV_HAAR_DO_CANNY_PRUNING
												, minsiz
												, minsiz
			);
			if(!face){
				printf("failed to detect objects.\n");
				break;
			}
	
			int i=0;
			//for(i = 0; i < face->total; i++) {
			if( face->total > 0 ){
				nonface_cnt = 0;
				i=0; // 最初のひとつの顔だけ追尾ターゲットにする
				
				// 検出情報から顔の位置情報を取得
				CvRect* faceRect = (CvRect*)cvGetSeqElem(face, i);
				if(!faceRect){
					printf("failed to get Face-Rect.\n");
					break;
				}
				center_area_x = faceRect->width / 2.0 * CENTER_AREA_RATIO;
				center_area_y = faceRect->height / 2.0 * CENTER_AREA_RATIO;
#if ( USE_WIN > 0 )
				// スクリーン中心らへん矩形描画を行う
				cvRectangle(	  frame
								, cvPoint( (WIN_WIDTH_HALF - center_area_x), (WIN_HEIGHT_HALF - center_area_x) )
								, cvPoint( (WIN_WIDTH_HALF + center_area_y), (WIN_HEIGHT_HALF +center_area_y) )
								, CV_RGB(0, 255 ,0)
								, 2
								, CV_AA
								, 0
				);
				// 取得した顔の位置情報に基づき、矩形描画を行う
				cvRectangle(	  frame
								, cvPoint(faceRect->x, faceRect->y)
								, cvPoint(faceRect->x + faceRect->width, faceRect->y + faceRect->height)
								, CV_RGB(255, 0 ,0)
								, 2
								, CV_AA
								, 0
				);
#endif

				// 顔のスクリーン座標を算出
				double face_x = faceRect->x + (faceRect->width / 2.0);
				double face_y = faceRect->y + (faceRect->height / 2.0);

				if(	   face_x >= (WIN_WIDTH_HALF - center_area_x) && face_x <= (WIN_WIDTH_HALF + center_area_x)
					&& face_y >= (WIN_HEIGHT_HALF - center_area_y) && face_y <= (WIN_HEIGHT_HALF + center_area_y)	){
					wrk_homing_state = HOMING_CENTER;
				}else{
					// 顔がスクリーン中心らへんになければ処理を行う

					// 現在時刻を取得
					gettimeofday(&stNow, NULL);

					if( timercmp(&stNow, &stEnd, >) ){
						// 任意時間経てば処理を行う
						
						// スクリーン座標からカメラのピッチ・ヨー角を算出
						double deg_yaw		= 0.0;
						double deg_pitch	= 0.0;
						if( camAngCvt.ScreenToCameraAngle(deg_yaw, deg_pitch, face_x, face_y) != 0 ){
							continue;
						}
						printf("face(%f,%f) deg_yaw=%f deg_pitch=%f servo(%d,%d)\n",face_x,face_y,deg_yaw,deg_pitch,_servo_yaw,_servo_pitch);

						// サーボ値を入れる変数 初期値は前回の結果
						int servo_yaw	= _servo_yaw;
						int servo_pitch	= _servo_pitch;
				
						// ヨー角用サーボ制御
						//servo_yaw = servo_mid - static_cast<int>(deg_yaw / ratio_deg); // カメラ固定だとこれでよい
						servo_yaw = _servo_yaw  - static_cast<int>(deg_yaw / ratio_deg);
						if(servo_yaw > servo_max){
							over_cnt++;
							printf("yaw is over max. cnt=%d ######## \n", over_cnt);
						}else if(servo_yaw < servo_min){
							over_cnt++;
							printf("yaw is under min. cnt=%d ######## \n",over_cnt);
							servo_yaw = servo_min;
						}
						//printf("face_x=%f deg_yaw=%f servo_yaw=%d \n",face_x,deg_yaw,servo_yaw);
				
						// ピッチ角用サーボ制御
						//servo_pitch = servo_mid - static_cast<int>(deg_pitch / ratio_deg); // カメラ固定だとこれでよい
						servo_pitch = _servo_pitch - static_cast<int>(deg_pitch / ratio_deg);
						if(servo_pitch > pitch_limit_max){
							over_cnt++;
							printf("pitch is over max ######## \n");
							servo_pitch = pitch_limit_max;
						}else if(servo_pitch < pitch_limit_min){
							over_cnt++;
							printf("pitch is under min ######## \n");
							servo_pitch = pitch_limit_min;
						}
						//printf("pwmWrite(%d,%d,%f)\n",servo_yaw,servo_pitch,ratio_deg);

						bool isPwmWrite = false;
						
						// SERVO_OVER_MAXフレーム分の間、サーボ角度が最大が続くのであれば、サーボ角度を中間にもどす。
						if( over_cnt > SERVO_OVER_MAX){
							servo_yaw=servo_mid;
							servo_pitch=servo_mid;
							over_cnt = 0;
						}

						// 前回と同じサーボ値ならスキップ
						if(servo_yaw!=_servo_yaw){
							// サーボの角度設定
							printf("pwmWrite(GPIO_YAW, %d)\n",servo_yaw);
							pwmWrite(GPIO_YAW, servo_yaw);
							isPwmWrite = true;
							// 前値保存
							_servo_yaw = servo_yaw;
						}
						if(servo_pitch!=_servo_pitch){
							// サーボの角度設定
							printf("pwmWrite(GPIO_PITCH, %d)\n",servo_pitch);
							pwmWrite(GPIO_PITCH, servo_pitch);
							isPwmWrite = true;
							// 前値保存
							_servo_pitch = servo_pitch;
						}

						if( isPwmWrite ){
							// サーボの値を設定したら現時刻から任意時間プラスして、サーボの角度設定しない終了時刻を更新
							timerclear(&stEnd);
							timeradd(&stNow, &stLen, &stEnd);
							wrk_homing_state = HOMING_HOMING;
						}else{
							wrk_homing_state = HOMING_KEEP;
						}

					}else{ // if( timercmp(&stNow, &stEnd, >) )
						wrk_homing_state = HOMING_DELAY;
					}
				} // if(face_x ... ){}else
			}else{ // if( face->total > 0 )
				wrk_homing_state = HOMING_NONE;
				// NONFACE_CNT_MAXフレーム分の間、顔検出されなければ、サーボ角度を中間にもどす。
				nonface_cnt++;
				silent_cnt++;
#if ( USE_TALK > 0 )
				if( silent_cnt > SILENT_CNT ){
					silent_cnt = 0;
					//digitalWrite(GPIO_MONOEYE,HIGH);
					int talkType = rand() % TALK_REASON_NUM;
					talkReason(talkType);
					//digitalWrite(GPIO_MONOEYE,LOW);
				}
#endif
				if( nonface_cnt > NONFACE_CNT_MAX ){
					nonface_cnt = 0;
					int servo_yaw = servo_mid;
					int servo_pitch = servo_mid;
					// サーボの角度設定
					printf("pwmWrite(GPIO_YAW, %d) for non-face. \n",servo_yaw);
					pwmWrite(GPIO_YAW, servo_yaw);
					//isPwmWrite = true;
					// 前値保存
					_servo_yaw = servo_yaw;

					// サーボの角度設定
					printf("pwmWrite(GPIO_PITCH, %d) for non-face. \n",servo_pitch);
					pwmWrite(GPIO_PITCH, servo_pitch);
					//isPwmWrite = true;
					// 前値保存
					_servo_pitch = servo_pitch;
				}
			}
			
			// ホーミング状態を更新
			if(homing_state != wrk_homing_state){
				int talkType = 0;
				switch( wrk_homing_state )
				{
				case HOMING_NONE:
					printf("[STATE] no detected face.\n");
/*
#if ( USE_TALK > 0 )
					if( homing_state != HOMING_DELAY ){
						digitalWrite(GPIO_MONOEYE,HIGH);
						talkType = rand() % TALK_REASON_NUM;
						talkReason(talkType);
						digitalWrite(GPIO_MONOEYE,LOW);
					}
#endif
*/
					break;
				case HOMING_HOMING:
					printf("[STATE] homing.\n");
#if ( USE_TALK > 0 )
					//digitalWrite(GPIO_MONOEYE,HIGH);
					talkType = rand() % TALK_WELCOME_NUM;
					talkWelcome(talkType);
					//digitalWrite(GPIO_MONOEYE,LOW);
					silent_cnt = 0;
#endif
					break;
				case HOMING_DELAY:
					printf("[STATE] delay.\n");
					break;
				case HOMING_CENTER:
					printf("[STATE] face is center.\n");
#if ( USE_TALK > 0 )
					////digitalWrite(GPIO_MONOEYE,HIGH);
					//talkType = rand() % TALK_WELCOME_NUM;
					//talkWelcome(talkType);
					////digitalWrite(GPIO_MONOEYE,LOW);
					//silent_cnt = 0;
#endif
					break;
				case HOMING_KEEP:
					printf("[STATE] keep.\n");
					break;
				default:
					break;
				}
				homing_state = wrk_homing_state;
			}

#if ( USE_WIN > 0 )
			// 画面表示更新
			cvShowImage( DISP_WIN, frame);
#endif

			// 負荷分散のためDelay
			char c = cvWaitKey(DELAY_SEC);
			if( c==27 ){ // ESC-Key
				printf("exit program.\n");
#if ( USE_TALK > 0 )
				system("/home/pi/aquestalkpi/AquesTalkPi -g 60 \"ぷろぐらむを しゅうりょう します\" | aplay");
#endif
				break;
			}
			
			if( digitalRead(GPIO_EXIT) == LOW ){
				printf("exit program.\n");
#if ( USE_TALK > 0 )
				system("/home/pi/aquestalkpi/AquesTalkPi -g 60 \"ぷろぐらむを しゅうりょう します\" | aplay");
#endif
				break;
			}
			if( digitalRead(GPIO_HALT) == LOW ){
#if ( USE_TALK > 0 )
				system("/home/pi/aquestalkpi/AquesTalkPi -g 60 \"しすてむを しゃっとだうん します\" | aplay");
#endif
				printf("shutdown system.\n");
				system("sudo halt");
				break;
			}
		} // while(1)
int main(int argc, char* argv[])
{
	printf("Press Esc-Key to Exit Process.\n");

	RASPIVID_CONFIG * config = new RASPIVID_CONFIG();
	if(!config){
		printf("failed to create RASPIDVID_CONFIG.\n");
		return -1;
	}
	config->width=static_cast<int>(WIN_WIDTH);
	config->height=static_cast<int>(WIN_HEIGHT);
	config->bitrate=0;	// zero: leave as default
	config->framerate=0;
	config->monochrome=0;

	cvNamedWindow( DISP_WIN , CV_WINDOW_AUTOSIZE );
	RaspiCamCvCapture* capture = NULL;

	capture = raspiCamCvCreateCameraCapture2( 0, config );
	if(config){
		delete config;
		config = NULL;
	}
	if(!capture){
		printf("failed to create capture\n");
		return -1;
	}
	// キャプチャサイズを設定する.
	double w = WIN_WIDTH;
	double h = WIN_HEIGHT;
	raspiCamCvSetCaptureProperty (capture, RPI_CAP_PROP_FRAME_WIDTH, w);
	raspiCamCvSetCaptureProperty (capture, RPI_CAP_PROP_FRAME_HEIGHT, h);

	// 正面顔検出器の読み込み
	CvHaarClassifierCascade* cvHCC = (CvHaarClassifierCascade*)cvLoad(CASCADE, NULL,NULL,NULL);

	// 検出に必要なメモリストレージを用意する
	CvMemStorage* cvMStr = cvCreateMemStorage(0);

	while(1){
		IplImage* frame = raspiCamCvQueryFrame(capture);
		if(!frame){
			printf("failed to query frame.\n");
			break;
		}
		// 画像中から検出対象の情報を取得する
		CvSeq* face = cvHaarDetectObjects(	  frame
											, cvHCC
											, cvMStr
											, 1.2
											, 2
											, CV_HAAR_DO_CANNY_PRUNING
											, minsiz
											, minsiz
		);
		if(!face){
			printf("failed to detect objects.\n");
			break;
		}

		int i=0;
		for(i = 0; i < face->total; i++) {
			// 検出情報から顔の位置情報を取得
			CvRect* faceRect = (CvRect*)cvGetSeqElem(face, i);
			if(!faceRect){
				printf("failed to get Face-Rect.\n");
				break;
			}
			
			// 取得した顔の位置情報に基づき、矩形描画を行う
			cvRectangle(	  frame
							, cvPoint(faceRect->x, faceRect->y)
							, cvPoint(faceRect->x + faceRect->width, faceRect->y + faceRect->height)
							, CV_RGB(255, 0 ,0)
							, 2
							, CV_AA
							, 0
			);
		}
		cvShowImage( DISP_WIN, frame);
		char c = cvWaitKey(DELAY_MSEC);
		if( c==27 ){ // ESC-Key
			break;
		}
		sleep(0);
	}

	// 用意したメモリストレージを解放
	cvReleaseMemStorage(&cvMStr);

	// カスケード識別器の解放
	cvReleaseHaarClassifierCascade(&cvHCC);

	raspiCamCvReleaseCapture(&capture);
	cvDestroyWindow(DISP_WIN);

	return 0;
}
int main(int argc, char *argv[ ])
{
	char SETTINGS_FILE[]="settings.txt";

	RASPIVID_CONFIG * config = (RASPIVID_CONFIG*)malloc(sizeof(RASPIVID_CONFIG));
	
	int full_width=2592;
	int full_height=1944;
	config->width=full_width;
	config->height=full_height;
	config->bitrate=0;	// zero: leave as default
	config->framerate=15;
	config->monochrome=1;

    RaspiCamCvCapture * capture = (RaspiCamCvCapture *) raspiCamCvCreateCameraCapture2(0, config); 
	free(config);
	
	CvFont font;
	double hScale=0.8;
	double vScale=0.8;
	int    lineWidth=2;

	cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX|CV_FONT_ITALIC, hScale, vScale, 0, lineWidth, 8);

	IplImage* tmp  = 0;
    IplImage* image = 0;

	cvNamedWindow("RaspiCamTest", 1);
	cvMoveWindow("RaspiCamTest", 100,100);

	int viewport_x = 0;
	int viewport_y = -400;
	int viewport_width = 640;
	int viewport_height = 480;

	CvScalar sharpness= {100, 5.0, 1.0, .10};

	int flip=0;
	int full_view=0;

	FILE * f = fopen(SETTINGS_FILE,"r");
	if(f) {
		fscanf(f,"%d %d %d\n",&viewport_x,&viewport_y,&flip);
		fclose(f);
	}

	int exit =0;
	do {
		IplImage* big_img = raspiCamCvQueryFrame(capture);
        if(!image) {	
			image = cvCreateImage( cvSize(viewport_width, viewport_height), big_img->depth,3 );
		}
        if(!tmp) {	
			tmp = cvCreateImage( cvSize(viewport_width, viewport_height), big_img->depth,1 );
		}

		if(!full_view) {
			CvRect cropRect = cvRect( (big_img->width-viewport_width)/2+viewport_x
					,(big_img->height-viewport_height)/2+viewport_y
					,viewport_width ,viewport_height );
			cvSetImageROI(big_img,cropRect);
			CvRect destRect=cvRect(0,0,viewport_width,viewport_height);
			cvSetImageROI(image,destRect);
			cvCvtColor(big_img,image,CV_GRAY2BGR);

			destRect=cvRect(0,0,viewport_width,viewport_height);
			cvSetImageROI(image,destRect);
		} else {
			CvRect cropRect = cvRect( 0,0, full_width, full_height );
			cvSetImageROI(big_img,cropRect);
			cvRectangle(big_img
             ,cvPoint((big_img->width-viewport_width)/2+viewport_x,
		   	          (big_img->height-viewport_height)/2+viewport_y)
             ,cvPoint((big_img->width+viewport_width)/2+viewport_x,
		   	          (big_img->height+viewport_height)/2+viewport_y)
			 ,CV_RGB( 0, 255, 0 ));
  
			cvResize(big_img,tmp);
			cvCvtColor(tmp,image,CV_GRAY2BGR);
		}


		if(flip) {
			cvFlip(image,0,-1);
		}

		sharpness = GetSharpness(big_img,image);
		double threshold=2.8;

		char text[200];

		if(full_view) {
			sprintf(text , "full view" );	
			cvPutText (image, text, cvPoint(05, 360), &font, cvScalar(255, 0, 0, 0));
		}
		sprintf(text , (sharpness.val[1]>threshold ? "** OK **" : "!! keep going !!" ) );	
		cvPutText (image, text, cvPoint(05, 400), &font, cvScalar(255, 0, 0, 0));
		sprintf(text, "Sharpness: %.0f (%.2f) x=%d y=%d", sharpness.val[0], sharpness.val[1], viewport_x, viewport_y);
		cvPutText (image, text, cvPoint(05, 440), &font, cvScalar(255, 0, 0, 0));

		cvLine( image, cvPoint(0,240), cvPoint(639,240), CV_RGB( 255, 0, 0 ));
		cvLine( image, cvPoint(320,0), cvPoint(320,479),   CV_RGB( 255, 0, 0 ));
		cvCircle( image, cvPoint(320,240), 100,   CV_RGB( 255, 0, 0 ));


		cvShowImage("RaspiCamTest", image);

		
		char key = cvWaitKey(10);
	

		if(flip) {	
			switch(key)	
			{
				case 83:        //left
					viewport_x -= 10;

					if(viewport_x<-(full_width-viewport_width)/2) {
						viewport_x=-(full_width-viewport_width)/2;
					}
					break;
				case 84:        //up
					viewport_y -= 10;
					if(viewport_y< -(full_height-viewport_height)/2) {
						viewport_y=-(full_height-viewport_height)/2;
					}
					break;
				case 81:        //right
					viewport_x += 10;
					if(viewport_x>  (full_width-viewport_width)/2) {
						viewport_x= (full_width-viewport_width)/2;
					}
					break;
				case 82:        //down
					viewport_y += 10;
					if(viewport_x>  (full_height-viewport_height)/2) {
						viewport_x= (full_height-viewport_height)/2;
					}
					break;

			}

		} else {
			switch(key)	
			{
			case 81:        //left
					viewport_x -= 10;

					if(viewport_x<-(full_width-viewport_width)/2) {
						viewport_x=-(full_width-viewport_width)/2;
					}
					break;

			case 82:        //up
				viewport_y -= 10;
				if(viewport_y< -(full_height-viewport_height)/2) {
					viewport_y=-(full_height-viewport_height)/2;
				}
				break;
			case 83:        //right
				viewport_x += 10;
				if(viewport_x>  (full_width-viewport_width)/2) {
					viewport_x= (full_width-viewport_width)/2;
				}
				break;
			case 84:        //down
				viewport_y += 10;
				if(viewport_x>  (full_height-viewport_height)/2) {
					viewport_x= (full_height-viewport_height)/2;
				}
				break;

			}

		}

		switch(key)	
		{
			case 's':       //save current position
			{
				FILE *f = fopen(SETTINGS_FILE,"w");
				if(f) {
					fprintf(f,"%d %d %d\n",viewport_x,viewport_y,flip);
					fclose(f);
				}
				break;
			}


			case 'r':       //read position
            {
				FILE *f = fopen(SETTINGS_FILE,"r");
				if(f) {
					fscanf(f,"%d %d %d\n",&viewport_x,&viewport_y,&flip);
					fclose(f);
				}

				break;
			}

			case 'c':
				viewport_x=viewport_y=0;
				break;

			case 'f':
				flip = !flip;
				break;

			case 'x':
				system("sudo halt");
				break;

			case 27:		// Esc to exit
				exit = 1;
				break;
		}
		
	} while (!exit);

	cvDestroyWindow("RaspiCamTest");
	raspiCamCvReleaseCapture(&capture);
	return 0;
}
Beispiel #9
0
 void *EXImageRecognition::RunVideoThread (void *self) {
     EXImageRecognition *Class = (EXImageRecognition*)self;
     
     unsigned long long TimeStamp = Class->GetMillisecondTime();
     unsigned int FPS = 0;
     
     while (Class->_videoexit == 0) {
         
         
         // Check Capture Device
         
         bool OK = false;
         if (Class->_Device == CaptureDeviceRaspiCam) {
 #if __arm__ && _OPEN_CV_RASPI
             if (VideoCaptureHandle != 0) {
                 OK = true;
             }
 #endif
         } else {
             if (Class->VideoCapture != 0 && Class->VideoCapture->isOpened()) {
                 OK = true;
             }
         }
         
         
         // If Good Capture Device exists
         
         if (OK == true) {
             
             
             // Get New Frame
             
             cv::Mat SourceFrame;
             if (Class->_Device == CaptureDeviceRaspiCam) {
     #if __arm__ && _OPEN_CV_RASPI
                 SourceFrame = cv::cvarrToMat(raspiCamCvQueryFrame(Class->VideoCaptureHandle));
     #endif
             } else {
                 *Class->VideoCapture >> SourceFrame;
             }
             
             Class->SetNewFrame(SourceFrame);
             
             #ifdef __APPLE__
             
             // Call Callback Method for Obj-C
             if (Class->Apple_Video_Dispatch) {
                 Class->ImageBlock();
             }
             
             #endif
             
         }
         
         
         // Calculate REAL Framerate
         
         FPS++;
         unsigned long long BufferTime = 0;
         if ((TimeStamp + 1000) <= (BufferTime = Class->GetMillisecondTime())) {
             Class->SetRealFPS(FPS);
             TimeStamp = BufferTime;
             FPS = 0;
         }
         usleep((1000 / Class->GetFramerate()) * 1000);
     }
Beispiel #10
0
int main(int argc, char *argv[ ]){

	RASPIVID_CONFIG * config = (RASPIVID_CONFIG*)malloc(sizeof(RASPIVID_CONFIG));
	
	config->width=320;
	config->height=240;
	config->bitrate=0;	// zero: leave as default
	config->framerate=0;
	config->monochrome=0;

	int opt;

	while ((opt = getopt(argc, argv, "lxm")) != -1)
	{
		switch (opt)
		{
			case 'l':					// large
				config->width = 640;
				config->height = 480;
				break;
			case 'x':	   				// extra large
				config->width = 960;
				config->height = 720;
				break;
			case 'm':					// monochrome
				config->monochrome = 1;
				break;
			default:
				fprintf(stderr, "Usage: %s [-x] [-l] [-m] \n", argv[0], opt);
				fprintf(stderr, "-l: Large mode\n");
				fprintf(stderr, "-x: Extra large mode\n");
				fprintf(stderr, "-l: Monochrome mode\n");
				exit(EXIT_FAILURE);
		}
	}

	/*
	Could also use hard coded defaults method: raspiCamCvCreateCameraCapture(0)
	*/
    RaspiCamCvCapture * capture = (RaspiCamCvCapture *) raspiCamCvCreateCameraCapture2(0, config); 
	free(config);
	
	CvFont font;
	double hScale=0.4;
	double vScale=0.4;
	int    lineWidth=1;

	cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX|CV_FONT_ITALIC, hScale, vScale, 0, lineWidth, 8);

	cvNamedWindow("RaspiCamTest", 1);
	int exit =0;
	do {
		IplImage* image = raspiCamCvQueryFrame(capture);
		
		char text[200];
		sprintf(
			text
			, "w=%.0f h=%.0f fps=%.0f bitrate=%.0f monochrome=%.0f"
			, raspiCamCvGetCaptureProperty(capture, RPI_CAP_PROP_FRAME_WIDTH)
			, raspiCamCvGetCaptureProperty(capture, RPI_CAP_PROP_FRAME_HEIGHT)
			, raspiCamCvGetCaptureProperty(capture, RPI_CAP_PROP_FPS)
			, raspiCamCvGetCaptureProperty(capture, RPI_CAP_PROP_BITRATE)
			, raspiCamCvGetCaptureProperty(capture, RPI_CAP_PROP_MONOCHROME)
		);
		cvPutText (image, text, cvPoint(05, 40), &font, cvScalar(255, 255, 0, 0));
		
		sprintf(text, "Press ESC to exit");
		cvPutText (image, text, cvPoint(05, 80), &font, cvScalar(255, 255, 0, 0));
		
		cvShowImage("RaspiCamTest", image);
		
		char key = cvWaitKey(10);
		
		switch(key)	
		{
			case 27:		// Esc to exit
				exit = 1;
				break;
			case 60:		// < (less than)
				raspiCamCvSetCaptureProperty(capture, RPI_CAP_PROP_FPS, 25);	// Currently NOOP
				break;
			case 62:		// > (greater than)
				raspiCamCvSetCaptureProperty(capture, RPI_CAP_PROP_FPS, 30);	// Currently NOOP
				break;
		}
		
	} while (!exit);

	cvDestroyWindow("RaspiCamTest");
	raspiCamCvReleaseCapture(&capture);
	return 0;
}