//同期ズレが起きた際に、再スタートできるようにmainとは別にしてある。 bool MainLoop(uint64_t leftGUID, uint64_t rightGUID){ unsigned long capture_time[CAMERA_NUM_MAX]; Cam_Check left_cam,right_cam; left_cam.GUID = leftGUID; right_cam.GUID = rightGUID; //KAW 最適化対策。これをいれないと、検出処理がバグる left_cam.found =false; right_cam.found =false; dc1394_t * dc; dc1394error_t err; dc1394camera_list_t * list; dc1394video_frame_t *frames[CAMERA_NUM_MAX]; //dc1394の関数を使う際には必須 dc = dc1394_new(); if(!dc){ cout << "dc1394_new error" << endl; return -1; } err = dc1394_camera_enumerate(dc, &list); DC1394_ERR_RTN(err, "Failed to enumerate cameras"); if(list->num >= 2){ cout << "# of cameras : " << list->num << endl; cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; for(unsigned i = 0; i < list->num; i++){ if(list->ids[i].guid == left_cam.GUID){ cout << "Found Left Camera" << endl; cameras[_LEFT] = dc1394_camera_new(dc, list->ids[i].guid); left_cam.found = true; }else if(list->ids[i].guid == right_cam.GUID){ cout << "Found Right Camera" << endl; cameras[_RIGHT] = dc1394_camera_new(dc, list->ids[i].guid); right_cam.found = true; } } dc1394_camera_free_list(list); if(right_cam.found && left_cam.found){ cout << "Found Stereo Camera!!(`・ω・´)" << endl; cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; } else{ cout << "Stereo Camera Not Found...(´・ω・`)" << endl; cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; return 1; } }else{ cout << "Need More Cameras...(´・ω・`)" << endl; cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; dc1394_camera_free_list(list); return 1; } for(unsigned i = 0; i < CAMERA_NUM_MAX; i++){ if(!cameras[i]){ cout << "camera" << i << ": not found" << endl; return 1; } } //Captureのためのセットアップ for(int i = 0; i < CAMERA_NUM_MAX; i++){ //Operation Mode err = dc1394_video_set_operation_mode(cameras[i], DC1394_OPERATION_MODE_1394B); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Operation Mode"); //cout << "Operation Mode = 1394B" << endl; //ISO Speed : ISO感度(フィルムの感度) err = dc1394_video_set_iso_speed(cameras[i], DC1394_ISO_SPEED_800); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set ISO Speed"); //cout << "ISO Speed = 800" << endl; //Video Mode : サイズとカラー err = dc1394_video_set_mode(cameras[i], (dc1394video_mode_t) color_mode); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Video Mode"); //cout << "Image Size = VGA (640 x 480)" << endl; //Frame Rate : フレームレート err = dc1394_video_set_framerate(cameras[i], (dc1394framerate_t) frame_rate); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Frame Rate"); //Setup : フレームレートとモードが有効なら通るはず err = dc1394_capture_setup(cameras[i], NUM_DMA_BUFFERS,DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Setup Cameras"); //Transmission : 信号の送信(ON/OFF) err = dc1394_video_set_transmission(cameras[i], DC1394_ON); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Start Transmission"); // -- Feature Mode Setting -- // //Gain : カメラ感度 // err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL); err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_AUTO); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Feature : Gain"); //Shutter : シャッタースピード err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL); // err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_AUTO); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Feature : Shutter"); //White Balance : ホワイトバランス // err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_WHITE_BALANCE, DC1394_FEATURE_MODE_MANUAL); err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_WHITE_BALANCE, DC1394_FEATURE_MODE_AUTO); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Feature : White Balance"); // Saturation : 彩度 // err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_SATURATION, DC1394_FEATURE_MODE_MANUAL); err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_SATURATION, DC1394_FEATURE_MODE_AUTO); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Feature : Saturation"); } // cout << "Set Up Done" << endl; // cout << "Transmission Starts!!" << endl; // cout << "Gain, Shutter, White Balance and Saturation Are Set Manually" << endl; // cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; // cout << "Calibrationg White Balance..." << endl; bool isSync = false; int sync_succeed=0, sync_fail=0; //Main Loop while(!gShutOff){ //左右のキャプチャ時間(ミリ単位UNIX時間タイムスタンプ)を格納。 double timestamps[2]; double dif_timestamp; unsigned char show_frame[640*480*3]; for(int i = 0; i < CAMERA_NUM_MAX; i++){ if(dc1394_capture_dequeue(cameras[i],DC1394_CAPTURE_POLICY_WAIT, &frames[i]) != DC1394_SUCCESS){ dc1394_log_error("Failed To Capture From CAM"); } capture_time[i] = (unsigned long) frames[i]->timestamp; timestamps[i] = (double)floor(frames[i]->timestamp / 1000.0); } //cout << "timestamps[0] : " << timestamps[_LEFT] << endl; //cout << "timestamps[1] : " << timestamps[_RIGHT] << endl; dif_timestamp = timestamps[_RIGHT] - timestamps[_LEFT]; //cout << "dif_timestamp : " << dif_timestamp << endl; if(!isSync){ if(dif_timestamp != 0){ sync_fail++; cout << "Error : Seem to have a problem in Syncronization" << endl; if(sync_fail > 60){ cout << "Restart!!" << endl; clean_up(); return false; }else if(sync_fail > 0){ cout << "This Capture will restart in " << (60 - sync_fail)/15 << " seconds..." << endl; } }else{ sync_succeed++; if(sync_succeed > 45){ isSync = true; cout << "Syncronization Succeeded!!" << endl; } } } camera_control(frames[_LEFT]->image); // if(!isWhiteCalibrated){ // isWhiteCalibrated = white_balance_calibration(frames[_LEFT]->image); // } for(int i = 0; i< CAMERA_NUM_MAX;i++){ convert_RGB_to_BGR(frames[i]->image,show_frame); memcpy(stereo_images.data.image[i],show_frame,sizeof(char)*WIDTH*HEIGHT*3); if(isShowImage){ if(i == _LEFT){ Mat showImage(Size(WIDTH,HEIGHT),CV_8UC3,show_frame); rectangle(showImage, Point(320-40,240-40),Point(320 + 40,240 + 40), Scalar(255,0,0), 1, 8, 0); imshow("Left Camera Image", showImage); } } } stereo_images.write(); if(isShowImage && waitKey(1) == 27){ gShutOff = true; } if(isShowFrameRate){ static unsigned long time_a = 0,time_b = -1; time_a = capture_time[0]; //unsignedのtime_bを-1で初期化しているので、最初だけ表示がされない仕組み。 if(time_a > time_b){ //UNIX時間でマイクロ秒を表示しているので、1000000で割る。 cout << "Frame Rate : " << (1000000.0 / (double)(time_a - time_b)) << endl; } time_b = time_a; } for(int i = 0; i < CAMERA_NUM_MAX; i++){ if(frames[i]){ //バッファに領域をエンキューして返さないとと止まる。 dc1394_capture_enqueue(cameras[i],frames[i]); } } } clean_up(); return true; }
int main (int argc, char *argv[]) { //-SSM-// initSSM(); OBJECT.create(5.0,1.0); SCAN_DATA.open(SSM_READ); AREA.create(5.0, 1.0); //-クラスと構造体-// Step_buf raw_data_U; //生データ Step_buf raw_data_D; //生データ Step_buf ped_data; //加工後のデータ LS3D background_U; //環境データ LS3D background_D; //環境データ //-時間の制御用-// double time_MovingObjects_are_detected = get_time(); // 動体が検出された時の時刻 double time_MovingObjects_gets_out = get_time(); // 動体がいなくなったときの時刻 double time_get_background = 0.0; //-初期化-// initialize(raw_data_U, raw_data_D, background_U, background_D); time_get_background = get_time(); // raw_data.out_csv(); // デバッグ用 //-ループ開始-// while(1) { if (SCAN_DATA.readNew()) { double loop_start_time = get_time(); if (SCAN_DATA.data.det == 'U') { //-読み込みデータのセット-// raw_data_U.set_data(SCAN_DATA.data.det, SCAN_DATA.data.dist, SCAN_DATA.data.x, SCAN_DATA.data.y, SCAN_DATA.data.z); //-差分データの計算-// cal_background_diff (raw_data_U, ped_data, background_U); //-エリアフィルタの適用-// apply_area_filter(ped_data); //-差分データをSSM構造体にセット-// OBJECT.data.det = ped_data.det[CUR_INDEX]; for (int i=0; i<STEP_NUM; i++) { OBJECT.data.dist[i] = ped_data.dist[i][CUR_INDEX]; OBJECT.data.x[i] = ped_data.x[i][CUR_INDEX]; OBJECT.data.y[i] = ped_data.y[i][CUR_INDEX]; OBJECT.data.z[i] = ped_data.z[i][CUR_INDEX]; } } if (SCAN_DATA.data.det == 'D') { //-読み込みデータのセット-// raw_data_D.set_data(SCAN_DATA.data.det, SCAN_DATA.data.dist, SCAN_DATA.data.x, SCAN_DATA.data.y, SCAN_DATA.data.z); //-差分データの計算-// cal_background_diff (raw_data_D, ped_data, background_D); //-エリアフィルタの適用-// apply_area_filter(ped_data); //-差分データをSSM構造体にセット-// OBJECT.data.det = ped_data.det[CUR_INDEX]; for (int i=0; i<STEP_NUM; i++) { OBJECT.data.dist[i] = ped_data.dist[i][CUR_INDEX]; OBJECT.data.x[i] = ped_data.x[i][CUR_INDEX]; OBJECT.data.y[i] = ped_data.y[i][CUR_INDEX]; OBJECT.data.z[i] = ped_data.z[i][CUR_INDEX]; } } //-静止物体が存在するかの判定-// // AREA.data.hasObjects = judge_Objects_exist(ped_data); //-動体が存在するかの判定-// AREA.data.hasMovingObjects = judge_MovingObjects_exist(ped_data); if ( AREA.data.hasMovingObjects == true) { // 動物体を検出したときの時刻を取得 time_MovingObjects_are_detected = get_time(); }else{ // 動物体が存在しないときの時刻を取得 time_MovingObjects_gets_out = get_time(); // 最後に物体を検出してからの時間を算出 double timelength_noMovingObjects = time_MovingObjects_gets_out - time_MovingObjects_are_detected; cout << "timelength_noMovingObjects = " << timelength_noMovingObjects << endl; // 動物体が長い間存在していなかったら、環境データを更新する。 if (timelength_noMovingObjects > 3 && (get_time() - time_get_background) > 180) { get_background (raw_data_U, background_U); get_background (raw_data_D, background_D); time_get_background = get_time(); time_MovingObjects_are_detected = get_time(); time_MovingObjects_gets_out = get_time(); } } //-SSMの書き込み-// OBJECT.write(); AREA.write(); double loop_end_time = get_time(); sleep_const_freq(loop_start_time, loop_end_time, FREQ); }else{ usleep(10000); // CPU使用率100%対策 } } //-SSMクローズ-// OBJECT.release(); SCAN_DATA.close(); AREA.release(); endSSM(); return 0; }
int main (int argc, char *argv[]) { //-SSM-// initSSM(); OBJECT.create(5.0,1.0); SCAN_DATA.open(SSM_READ); get_background(&background_U, &background_D); // ofstream ofs; // ofs.open("background"); // for (int i = 0; i < beam_num; i++) // { // ofs << background_U.dist[i] << endl; // } // for (int i = 0; i < beam_num; i++) // { // ofs << background_D.dist[i] << endl; // } // ofs.close(); cout << "get background" << endl; while(1) //データ取得ループ { if(SCAN_DATA.readNew()) //readNew { time_1 = get_time(); if(SCAN_DATA.data.det == 'U') { OBJECT.data.det = 'U'; for(int i=0; i<beam_num; i++) { dist_diff[i] = fabs(SCAN_DATA.data.dist[i] - background_U.dist[i]); if(dist_diff[i] <= diff_min) { OBJECT.data.dist[i] = 0.0; OBJECT.data.x[i] = 0.0; OBJECT.data.y[i] = 0.0; OBJECT.data.z[i] = 0.0; }else{//(dist_diff[i] > diff_min /*&& dist_diff[i] < diff_max*/){ OBJECT.data.dist[i] = SCAN_DATA.data.dist[i]; OBJECT.data.x[i] = SCAN_DATA.data.x[i]; OBJECT.data.y[i] = SCAN_DATA.data.y[i]; OBJECT.data.z[i] = SCAN_DATA.data.z[i]; } } } if(SCAN_DATA.data.det == 'D') { OBJECT.data.det = 'D'; for(int i=0; i<beam_num; i++) { dist_diff[i] = fabs(SCAN_DATA.data.dist[i] - background_D.dist[i]); if(dist_diff[i] <= diff_min ) { OBJECT.data.dist[i] = 0.0; OBJECT.data.x[i] = 0.0; OBJECT.data.y[i] = 0.0; OBJECT.data.z[i] = 0.0; }else{//(dist_diff[i] > diff_min /*&& dist_diff[i] < diff_max*/){ OBJECT.data.dist[i] = SCAN_DATA.data.dist[i]; OBJECT.data.x[i] = SCAN_DATA.data.x[i]; OBJECT.data.y[i] = SCAN_DATA.data.y[i]; OBJECT.data.z[i] = SCAN_DATA.data.z[i]; } } } OBJECT.write(); time_2 = get_time(); usleep(freq*1000000 - (time_2 - time_1)*1000000); time_3 = get_time(); cout << "time = " << time_3 - time_1 << endl; }else{ //readNew usleep(1000); //CPU使用率100%対策 } } //データ取得ループ //-SSMクローズ-// OBJECT.release(); SCAN_DATA.close(); endSSM(); return 0; }