int main(int argc, char **argv) { //SSM initSSM(); OBJECT.open(SSM_READ); //GLUTの初期化 glutInit(&argc, argv); glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA); glutInitWindowSize(640, 640); glutInitWindowPosition(0, 0); glutCreateWindow("Viewer"); // コールバック関数の登録 glutDisplayFunc(display); glutReshapeFunc(reshape); glutMouseFunc(mouse); glutMotionFunc(motion); glutIdleFunc(idle); // 環境初期化 initEnvironment(); // GLUTのメインループに処理を移す glutMainLoop(); OBJECT.close(); endSSM(); return 0; }
main() { int fd,c, res; struct termios oldtio,newtio; char buf[255]; char buf2[255]; // ssmのイニシャライズ initSSM(); DORDER.open(SSM_READ); fd = open(MODEMDEVICE, O_WRONLY | O_NOCTTY ); if (fd <0) {perror(MODEMDEVICE); exit(-1); } tcgetattr(fd,&oldtio); /* 現在のポート設定を待避 */ bzero(&newtio, sizeof(newtio)); newtio.c_cflag = BAUDRATE | CRTSCTS | CS8 | CLOCAL | CREAD; newtio.c_iflag = IGNPAR; newtio.c_oflag = 0; /* set input mode (non-canonical, no echo,...) */ newtio.c_lflag = 0; newtio.c_cc[VTIME] = 0; /* キャラクタ間タイマは未使用 */ newtio.c_cc[VMIN] = 5; /* 5文字受け取るまでブロックする */ tcflush(fd, TCIFLUSH); tcsetattr(fd,TCSANOW,&newtio); while(1) { DORDER.readNew(); cout << DORDER.data.order << endl; buf[0] = 'o'; buf2[0] = 'c'; if(DORDER.data.order >= 1) { write(fd, buf, 1); }else{ write(fd, buf2, 1); } usleep(1000); } tcsetattr(fd,TCSANOW,&oldtio);//設定をもとに戻す close(fd); }
bool initialize(Step_buf& raw_data_U, Step_buf& raw_data_D, LS3D& background_U, LS3D& background_D) { cout << "初期化開始" << endl; int scan_cnt_U = 0; int scan_cnt_D = 0; double loop_start_time = 0.0; double loop_end_time = 0.0; while(scan_cnt_U < BUFFER_LENGTH && scan_cnt_D < BUFFER_LENGTH) { if (SCAN_DATA.readNew()) { loop_start_time = get_time(); //-読み込みデータのセット-// if(SCAN_DATA.data.det == 'U') { raw_data_U.set_data(SCAN_DATA.data.det, SCAN_DATA.data.dist, SCAN_DATA.data.x, SCAN_DATA.data.y, SCAN_DATA.data.z); scan_cnt_U++; } if(SCAN_DATA.data.det == 'D') { raw_data_D.set_data(SCAN_DATA.data.det, SCAN_DATA.data.dist, SCAN_DATA.data.x, SCAN_DATA.data.y, SCAN_DATA.data.z); scan_cnt_D++; } loop_end_time = get_time(); sleep_const_freq(loop_start_time, loop_end_time, FREQ); }else{ usleep(1000); } } get_background (raw_data_U, background_U); get_background (raw_data_D, background_D); cout << "初期化完了" << endl; }
void display(void) { glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //変換行列 glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glTranslatef(0.0, 0.0, -camera_distance); glRotatef(-camera_pitch, 1.0, 0.0, 0.0); glRotatef(-camera_yaw, 0.0, 1.0, 0.0); glRotatef(-camera_roll, 0.0, 0.0, 1.0); // 変換行列を設定(物体のモデル座標系→カメラ座標系) //(物体が (0.0, 1.0, 0.0) の位置にあり、静止しているとする) glTranslatef( -1000.0, 0.0, SENSOR_HEIGHT); if (OBJECT.readNew()) { //スキャンデータの格納 if (OBJECT.data.det == 'U') { for (int i = 0; i < STEP_NUM_MAX; i++) { vertex_U[i][0] = OBJECT.data.x[i]; vertex_U[i][1] = OBJECT.data.y[i]; vertex_U[i][2] = OBJECT.data.z[i]; } } if (OBJECT.data.det == 'D') { for (int i = 0; i < STEP_NUM_MAX; i++) { vertex_D[i][0] = OBJECT.data.x[i]; vertex_D[i][1] = OBJECT.data.y[i]; vertex_D[i][2] = OBJECT.data.z[i]; } } //スキャン点の描画 glLineWidth(1.0); glClear(GL_COLOR_BUFFER_BIT); glPointSize(POINTSIZE); glBegin(GL_POINTS); for(int j=0; j<STEP_NUM_MAX; j++) { glColor3d(1.0, 0.0, 0.0); glVertex3d(vertex_U[j][0], vertex_U[j][1], vertex_U[j][2]); } for(int j=0; j<STEP_NUM_MAX; j++) { glColor3d(0.0, 0.0, 1.0); glVertex3d(vertex_D[j][0], vertex_D[j][1], vertex_D[j][2]); } glEnd(); // //1メートルのエリア // glColor3d(0.0, 0.0, 0.0); // glBegin(GL_LINE_LOOP); // glVertex3d(0.0, -1000.0, -SENSOR_HEIGHT); // glVertex3d(1000.0, -1000.0, -SENSOR_HEIGHT); // glVertex3d(1000.0, 1000.0, -SENSOR_HEIGHT); // glVertex3d(0.0, 1000.0, -SENSOR_HEIGHT); // glEnd(); //監視領域(スポット) glColor3d(0.0, 0.0, 0.0); glBegin(GL_LINE_LOOP); glVertex3d(0.0, -2200.0, -SENSOR_HEIGHT); glVertex3d(3000.0, -2200.0, -SENSOR_HEIGHT); glVertex3d(3000.0, 2200.0, -SENSOR_HEIGHT); glVertex3d(0.0, 2200.0, -SENSOR_HEIGHT); glEnd(); glColor3d(0.0, 0.0, 0.0); glBegin(GL_LINE_LOOP); glVertex3d(0.0, 0.0, -SENSOR_HEIGHT); glVertex3d(6000.0, -0.0, -SENSOR_HEIGHT); glEnd(); glColor3d(0.0, 0.0, 0.0); glBegin(GL_LINE_LOOP); glVertex3d(0.0, 4000.0, -SENSOR_HEIGHT); glVertex3d(0.0, -4000.0, -SENSOR_HEIGHT); glEnd(); glBegin(GL_LINE_LOOP); glVertex3d(0.0, 0.0, 1000.0); glVertex3d(0.0, 0.0, -SENSOR_HEIGHT); glEnd(); // //円周を線だけで表示(1000) // glBegin( GL_LINE_LOOP ); // float cx, cy, cz; // glColor3f( 0.0, 0.0, 0.0 );//white // for(int i=0;i<=180;i++){ // cx = 1000.0*sin(M_PI*(double)i/(double)180.0); // cy = 1000.0*cos(M_PI*(double)i/(double)180.0); // cz = -SENSOR_HEIGHT; // glVertex3f( cx, cy, cz ); // } // glEnd(); // //円周を線だけで表示(2000) // glBegin( GL_LINE_LOOP ); // cx = 0.0; // cy = 0.0; // cz = 0.0; // glColor3f( 0.0, 0.0, 0.0 );//white // for(int i=0;i<=180;i++){ // cx = 2000.0*sin(M_PI*(float)i/(float)180.0); // cy = 2000.0*cos(M_PI*(float)i/(float)180.0); // cz = -SENSOR_HEIGHT; // glVertex3f( cx, cy, cz ); // } // glEnd(); // //円周を線だけで表示(3000) // glBegin( GL_LINE_LOOP ); // cx = 0.0; // cy = 0.0; // cz = 0.0; // glColor3f( 0.0, 0.0, 0.0 );//white // for(int i=0;i<=180;i++){ // cx = 3000.0*sin(M_PI*(float)i/(float)180.0); // cy = 3000.0*cos(M_PI*(float)i/(float)180.0); // cz = -SENSOR_HEIGHT; // glVertex3f( cx, cy, cz ); // } // glEnd(); //センサ glPushMatrix(); glColor3d(0.0, 0.0, 0.0); //色の設定 glTranslated(0.0, 0.0, 0.0);//平行移動値の設定 glutSolidSphere(100.0, 20, 20);//引数:(半径, Z軸まわりの分割数, Z軸に沿った分割数) glPopMatrix(); glutSwapBuffers(); } //if(OBJECT.readNew) sleepSSM(0.005); }
int main (int argc, char *argv[]) { //-SSM-// initSSM(); OBJECT.create(5.0,1.0); SCAN_DATA.open(SSM_READ); AREA.create(5.0, 1.0); //-クラスと構造体-// Step_buf raw_data_U; //生データ Step_buf raw_data_D; //生データ Step_buf ped_data; //加工後のデータ LS3D background_U; //環境データ LS3D background_D; //環境データ //-時間の制御用-// double time_MovingObjects_are_detected = get_time(); // 動体が検出された時の時刻 double time_MovingObjects_gets_out = get_time(); // 動体がいなくなったときの時刻 double time_get_background = 0.0; //-初期化-// initialize(raw_data_U, raw_data_D, background_U, background_D); time_get_background = get_time(); // raw_data.out_csv(); // デバッグ用 //-ループ開始-// while(1) { if (SCAN_DATA.readNew()) { double loop_start_time = get_time(); if (SCAN_DATA.data.det == 'U') { //-読み込みデータのセット-// raw_data_U.set_data(SCAN_DATA.data.det, SCAN_DATA.data.dist, SCAN_DATA.data.x, SCAN_DATA.data.y, SCAN_DATA.data.z); //-差分データの計算-// cal_background_diff (raw_data_U, ped_data, background_U); //-エリアフィルタの適用-// apply_area_filter(ped_data); //-差分データをSSM構造体にセット-// OBJECT.data.det = ped_data.det[CUR_INDEX]; for (int i=0; i<STEP_NUM; i++) { OBJECT.data.dist[i] = ped_data.dist[i][CUR_INDEX]; OBJECT.data.x[i] = ped_data.x[i][CUR_INDEX]; OBJECT.data.y[i] = ped_data.y[i][CUR_INDEX]; OBJECT.data.z[i] = ped_data.z[i][CUR_INDEX]; } } if (SCAN_DATA.data.det == 'D') { //-読み込みデータのセット-// raw_data_D.set_data(SCAN_DATA.data.det, SCAN_DATA.data.dist, SCAN_DATA.data.x, SCAN_DATA.data.y, SCAN_DATA.data.z); //-差分データの計算-// cal_background_diff (raw_data_D, ped_data, background_D); //-エリアフィルタの適用-// apply_area_filter(ped_data); //-差分データをSSM構造体にセット-// OBJECT.data.det = ped_data.det[CUR_INDEX]; for (int i=0; i<STEP_NUM; i++) { OBJECT.data.dist[i] = ped_data.dist[i][CUR_INDEX]; OBJECT.data.x[i] = ped_data.x[i][CUR_INDEX]; OBJECT.data.y[i] = ped_data.y[i][CUR_INDEX]; OBJECT.data.z[i] = ped_data.z[i][CUR_INDEX]; } } //-静止物体が存在するかの判定-// // AREA.data.hasObjects = judge_Objects_exist(ped_data); //-動体が存在するかの判定-// AREA.data.hasMovingObjects = judge_MovingObjects_exist(ped_data); if ( AREA.data.hasMovingObjects == true) { // 動物体を検出したときの時刻を取得 time_MovingObjects_are_detected = get_time(); }else{ // 動物体が存在しないときの時刻を取得 time_MovingObjects_gets_out = get_time(); // 最後に物体を検出してからの時間を算出 double timelength_noMovingObjects = time_MovingObjects_gets_out - time_MovingObjects_are_detected; cout << "timelength_noMovingObjects = " << timelength_noMovingObjects << endl; // 動物体が長い間存在していなかったら、環境データを更新する。 if (timelength_noMovingObjects > 3 && (get_time() - time_get_background) > 180) { get_background (raw_data_U, background_U); get_background (raw_data_D, background_D); time_get_background = get_time(); time_MovingObjects_are_detected = get_time(); time_MovingObjects_gets_out = get_time(); } } //-SSMの書き込み-// OBJECT.write(); AREA.write(); double loop_end_time = get_time(); sleep_const_freq(loop_start_time, loop_end_time, FREQ); }else{ usleep(10000); // CPU使用率100%対策 } } //-SSMクローズ-// OBJECT.release(); SCAN_DATA.close(); AREA.release(); endSSM(); return 0; }
void idle() { bool update = false; if(setgl) { qout << "r"; } #ifdef MERGEDEBUG if(edata.isOpen()) if(edata.readNew()) { double minX=1e10, maxX=-1e10; double minY=1e10, maxY=-1e10; for(int i=0; i<edata.data.numpoints; i++) { minX = min(minX, edata.data. start_x[i]); minX = min(minX, edata.data. finish_x[i]); minY = min(minY, edata.data. start_y[i]); minY = min(minY, edata.data. finish_y[i]); maxX = max(maxX, edata.data. start_x[i]); maxX = max(maxX, edata.data. finish_x[i]); maxY = max(maxY, edata.data. start_y[i]); maxY = max(maxY, edata.data. finish_y[i]); point s(edata.data. start_x[i], edata.data. start_y[i]); point t(edata.data.finish_x[i], edata.data.finish_y[i]); mergedline.push(line(s,t)); } mergecenter = point( (minX+maxX)/2.0, (minY+maxY)/2.0 ); if(true) //mergecnt==0) { mergedline.shift(-mergecenter); mergedline.prepare(); mergedline.clustering_line(); mergedline.clustering_segment(); mergedline.merge(); mergedline.shift(+mergecenter); } update=true; } #endif if(ldata.isOpen()) if(ldata.readNew()) { update=true; } if(edata.isOpen()) if(edata.readNew()) { update=true; } if(lajst.isOpen()) if(lajst.readNew()) { update=true; } if(eajst.isOpen()) if(eajst.readNew()) { update=true; } if(pinfo.isOpen()) if(pinfo.readNew()) { update=true; } if(glpos.isOpen()) if(glpos.readNew()) { robot = triple(glpos.data.x, glpos.data.y, 0); update=true; } triple t; if(linfo.isOpen()) { if(linfo.readNew()) { linemap.clear(); ifstream fin(linfo.data.mapname); if(fin) while(fin >> t.x >> t.y) { linemap.push_back(t); } } }
void display() { if(camera_mode >= 1) camCent = robot; if(camera_mode >= 2) camRotH = glpos.data.theta + M_PI; glEnable(GL_DEPTH_TEST); glEnable(GL_BLEND); glEnable(GL_CULL_FACE); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); #ifdef MERGEDEBUG if(testscreen) { const double md=100.0; glViewport(0,0,wid1+wid2,hei); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0, 2*M_PI, 0, md, -1.0, 1.0); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); gluLookAt(0,0,1, 0,0,0, 0,1,0); displayT(); myString(0, 0, 0, strprintf("%d", mergedline.data.size()).c_str() ); glutSwapBuffers(); return; } #endif glViewport(0,0,wid1,hei); glMatrixMode(GL_PROJECTION); glLoadIdentity(); gluPerspective(45.0, (double)wid1/hei, 0.1, farDist); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); myLookAt(); display1(); glViewport(wid1,0,wid2,hei); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(-1.0, 1.0, -1.0, 1.0, -1.0, 1.0); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); gluLookAt(0,0,1, 0,0,0, 0,1,0); display2(); glViewport(0,0,wid1+wid2,hei); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(-1.0, 1.0, -1.0, 1.0, -1.0, 1.0); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); gluLookAt(0,0,1, 0,0,0, 0,1,0); if(linfo.isOpen()) myString(-0.98, -0.98, 0, strprintf("%s [%d]", linfo.data.mapname, linfo.data.mapnum).c_str() ); if(einfo.isOpen()) myString(-0.98, -0.90, 0, strprintf("%s [%d]", einfo.data.mapname, einfo.data.mapnum).c_str() ); if(setgl) myString(-0.98, 0.90, 0, "Restart Mode"); glutSwapBuffers(); }
void display1() { vector<double> color; double low = (vh[0]+0.9)/map_a/1.8 - map_b; double high = (vh[1]+0.9)/map_a/1.8 - map_b; // style glLineWidth(1); glPointSize(3); // ground const double gsize = 50.0; double gr_x = floor(robot.x); double gr_y = floor(robot.y); setColor(config["ground-panel"]); glBegin(GL_POLYGON); glVertex3d(gr_x+gsize, gr_y+gsize, 0.0); glVertex3d(gr_x-gsize, gr_y+gsize, 0.0); glVertex3d(gr_x-gsize, gr_y-gsize, 0.0); glVertex3d(gr_x+gsize, gr_y-gsize, 0.0); glEnd(); setColor(config["ground-line"]); glBegin(GL_LINES); for(int i=-gsize; i<=gsize; i++) { glVertex3d(gr_x+i*1.0, gr_y-gsize, 0.0); glVertex3d(gr_x+i*1.0, gr_y+gsize, 0.0); glVertex3d(gr_x-gsize, gr_y+i*1.0, 0.0); glVertex3d(gr_x+gsize, gr_y+i*1.0, 0.0); } glEnd(); // style glLineWidth(3); // robot glColor3d(1.0, 1.0, 1.0); drowCross(robot, glpos.data.theta, 1.0); // adjust if(disp_lajst) { glColor3d(0.0, 1.0, 1.0); drowCross(triple(lajst.data.x, lajst.data.y, 0.0), lajst.data.theta, 0.5); } if(disp_eajst) { glColor3d(0.0, 1.0, 0.0); drowCross(triple(eajst.data.x, eajst.data.y, 0.0), eajst.data.theta, 0.5); } #ifdef MERGEDEBUG glColor3d(1.0, 0.0, 1.0); drowCross(triple(mergecenter.real(), mergecenter.imag(), 0.0), eajst.data.theta, 0.5); #endif // gridmap if(disp_map) { color = config["map-points"]; glBegin(GL_POINTS); for(int i=0; i<gridmap.size(); i++) { if(low<=gridmap[i].z && gridmap[i].z<=high) { setColor(color, map_a*(map_b+gridmap[i].z)); if(view2d) glVertex2dv(gridmap[i].vec); else glVertex3dv(gridmap[i].vec); } } glEnd(); } // scan data for(int i=0; i<stream_num; i++) { // scan point if(!disp_sdata[i]) continue; glBegin(GL_POINTS); color = config[strprintf("urg-points-%d",i)]; for(int j=0; j<sdata[i].property.numPoints; j++) { if(sdata[i].data[j].isError()) continue; triple ref(sdata[i].data[j].reflect.vec); if(low<=ref.z && ref.z<=high) { ref.rotZ(glpos.data.theta); setColor(color, map_a*(map_b+ref.z)); glVertex3dv( (robot+ref).vec ); } } glEnd(); // scan laser if(!disp_laser) continue; glBegin(GL_LINES); for(int i=0; i<stream_num; i++) { color = config[strprintf("urg-beams-%d",i)]; for(int j=0; j<sdata[i].property.numPoints; j++) { if(sdata[i].data[j].isError()) continue; triple ref(sdata[i].data[j].reflect.vec); triple ori(sdata[i].data[j].origin.vec); if(low<=ref.z && ref.z<=high) { ref.rotZ(glpos.data.theta); ori.rotZ(glpos.data.theta); setColor(color, map_a*(map_b+ref.z)); glVertex3dv( (robot+ref).vec ); glVertex3dv( (robot+ori).vec ); } } } glEnd(); } // linemap if( disp_lmap ) { glColor3d(0.0, 0.4, 0.4); glBegin(GL_LINES); for(int i=0; i<linemap.size(); i++) { glVertex3dv( linemap[i].vec ); } glEnd(); } // edgemap if( disp_emap ) { glColor3d(0.0, 0.4, 0.0); glBegin(GL_LINES); for(int i=0; i<edgemap.size(); i++) { glVertex3dv( edgemap[i].vec ); } glEnd(); } // linedata if( ldata.isOpen() && disp_ldata ) { glColor3d(0.0, 1.0, 1.0); glBegin(GL_LINES); for(int i=0; i<ldata.data.numpoints; i++) { glVertex3d( ldata.data.start_x[i], ldata.data.start_y[i], 0.5 ); glVertex3d( ldata.data.finish_x[i], ldata.data.finish_y[i], 0.5 ); } glEnd(); } // edgedata if( edata.isOpen() && disp_edata ) { glColor3d(0.0, 1.0, 0.0); glBegin(GL_LINES); for(int i=0; i<edata.data.numpoints; i++) { glVertex3d( edata.data.start_x[i], edata.data.start_y[i], 0.5 ); glVertex3d( edata.data.finish_x[i], edata.data.finish_y[i], 0.5 ); } glEnd(); } #ifdef MERGEDEBUG glColor3d(1.0, 1.0, 1.0); glBegin(GL_LINES); glVertex3d(0,0,0); glVertex3d(10,0,0); glVertex3d(0,0,0); glVertex3d(0,10,0); glVertex3d(0,0,0); glVertex3d(0,0,10); glEnd(); //glColor3d(1.0, 1.0, 1.0); glBegin(GL_LINES); /* for(int i=0; i<mergedline.data.size(); i++) { point p1(0, 100), p2(0, -100); p1 *= polar(1.0, mergedline.data[i].rad); p1 += polar(mergedline.data[i].dist, mergedline.data[i].rad); p2 *= polar(1.0, mergedline.data[i].rad); p2 += polar(mergedline.data[i].dist, mergedline.data[i].rad); glColor3dv( testcolor[mergedline.data[i].group%7]); glVertex3d( p1.real(), p1.imag(), 1.5 ); glVertex3d( p2.real(), p2.imag(), 1.5 ); } */ for(int i=0; i<mergedline.data.size(); i++) { //glColor3dv( testcolor[mergedline.data[i].group%7]); glColor4d(1, 0, 0, 0.5); glVertex3d( mergedline.data[i][0].real(), mergedline.data[i][0].imag(), 0.5 ); glVertex3d( mergedline.data[i][1].real(), mergedline.data[i][1].imag(), 0.5 ); } glEnd(); #endif // style glPointSize(7); // route if(disp_way) { // route line glBegin(GL_LINES); for(int i=1; i<route.size(); i++) { //glColor3d(1.0, 1.0, 1.0); //if(pinfo.isOpen()) if(i==pinfo.data.waypoint) glColor3d(0.0, 1.0, 0.0); glColor3dv(amodecolor[routeinfo[i-1]]); glVertex3dv(route[i-1].vec); glVertex3dv(route[i ].vec); } glEnd(); // route point glColor3d(1.0, 1.0, 0.0); glBegin(GL_POINTS); for(int i=0; i<route.size(); i++) { glVertex3dv(route[i].vec); } glEnd(); // string glColor3d(1.0, 1.0, 1.0); for(int i=0; i<route.size(); i++) { //glColor3dv(amodecolor[routeinfo[i]]); myString(route[i]+triple(0,0,0.2), strprintf("%d",i).c_str()); } } }
int main(int argc, char ** argv) { uint64_t leftGUID,rightGUID; //設定ファイル読み込み用クラス by KAW TxtConfigurator tc(CONFIG_FILE); //SSM初期化 if(!initSSM()){ cout << "SSM Initialize error." << endl; return -1; } //設定ファイルの読み込み。 //設定ファイルの読み込み。 #ifdef IS32BIT cout << "This is 32 bit environment" << endl; tc.set("LEFT_GUID",(long long *)&leftGUID); tc.set("RIGHT_GUID",(long long *)&rightGUID); #else cout << "This is 64 bit environment" << endl; tc.set("LEFT_GUID",(long *)&leftGUID); tc.set("RIGHT_GUID",(long *)&rightGUID); #endif tc.set("FRAME_RATE",&frame_rate); tc.set("COLOR_MOED",&color_mode); tc.set("SHOW_IMAGE",&isShowImage); tc.set("SHOW_FRAME_RATE",&isShowFrameRate); tc.set("SHOW_WHITE_CALIBRATION",&isShowWhiteCalibration); tc.set("SHOW_CONTROL",&isShowControl); tc.set("WHAT_IS_GRAY",&WhatIsGray); tc.set("SHUTTER_SPEED",&ShutterSpeed); tc.substitute();//必須 tc.close(); //してもしなくてもいい。 cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; switch(color_mode){ case 0: cout << "Color Mode = OFF" << endl; color_mode = DC1394_VIDEO_MODE_640x480_MONO8; isColor = false; break; case 1: cout << "Color Mode = ON" << endl; color_mode = DC1394_VIDEO_MODE_640x480_RGB8; isColor = true; break; default: cout << "Color Mode Not Set : 0(Mono) 1(Color) Only" << endl; cout << "Color Mode = ON" << endl; color_mode = DC1394_VIDEO_MODE_640x480_RGB8; isColor = true; break; } switch(frame_rate){ case 7: frame_rate = DC1394_FRAMERATE_7_5; cout << "Frame Rate = 7.5" << endl; break; case 15: frame_rate = DC1394_FRAMERATE_15; cout << "Frame Rate = 15" << endl; break; case 30: cout << "Frame Rate = 30" << endl; frame_rate = DC1394_FRAMERATE_30; break; case 60: frame_rate = DC1394_FRAMERATE_60; cout << "Frame Rate = 60" << endl; break; default: cout << "Frame Rate Not Set : 7(7.5), 15, 30, 60 Only" << endl; cout << "Frame Rate = 15" << endl; frame_rate = DC1394_FRAMERATE_15; break; } if( !stereo_images.create( 1.5, 1 / 15.0) ){ return 1; } setSigInt(); bool isEnd = false; while(!isEnd){ cout << "Start Main Loop " << endl; isEnd = MainLoop(leftGUID,rightGUID); } stereo_images.release(); endSSM(); cout << "End Successfully." << endl; return 0; }
//同期ズレが起きた際に、再スタートできるようにmainとは別にしてある。 bool MainLoop(uint64_t leftGUID, uint64_t rightGUID){ unsigned long capture_time[CAMERA_NUM_MAX]; Cam_Check left_cam,right_cam; left_cam.GUID = leftGUID; right_cam.GUID = rightGUID; //KAW 最適化対策。これをいれないと、検出処理がバグる left_cam.found =false; right_cam.found =false; dc1394_t * dc; dc1394error_t err; dc1394camera_list_t * list; dc1394video_frame_t *frames[CAMERA_NUM_MAX]; //dc1394の関数を使う際には必須 dc = dc1394_new(); if(!dc){ cout << "dc1394_new error" << endl; return -1; } err = dc1394_camera_enumerate(dc, &list); DC1394_ERR_RTN(err, "Failed to enumerate cameras"); if(list->num >= 2){ cout << "# of cameras : " << list->num << endl; cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; for(unsigned i = 0; i < list->num; i++){ if(list->ids[i].guid == left_cam.GUID){ cout << "Found Left Camera" << endl; cameras[_LEFT] = dc1394_camera_new(dc, list->ids[i].guid); left_cam.found = true; }else if(list->ids[i].guid == right_cam.GUID){ cout << "Found Right Camera" << endl; cameras[_RIGHT] = dc1394_camera_new(dc, list->ids[i].guid); right_cam.found = true; } } dc1394_camera_free_list(list); if(right_cam.found && left_cam.found){ cout << "Found Stereo Camera!!(`・ω・´)" << endl; cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; } else{ cout << "Stereo Camera Not Found...(´・ω・`)" << endl; cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; return 1; } }else{ cout << "Need More Cameras...(´・ω・`)" << endl; cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; dc1394_camera_free_list(list); return 1; } for(unsigned i = 0; i < CAMERA_NUM_MAX; i++){ if(!cameras[i]){ cout << "camera" << i << ": not found" << endl; return 1; } } //Captureのためのセットアップ for(int i = 0; i < CAMERA_NUM_MAX; i++){ //Operation Mode err = dc1394_video_set_operation_mode(cameras[i], DC1394_OPERATION_MODE_1394B); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Operation Mode"); //cout << "Operation Mode = 1394B" << endl; //ISO Speed : ISO感度(フィルムの感度) err = dc1394_video_set_iso_speed(cameras[i], DC1394_ISO_SPEED_800); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set ISO Speed"); //cout << "ISO Speed = 800" << endl; //Video Mode : サイズとカラー err = dc1394_video_set_mode(cameras[i], (dc1394video_mode_t) color_mode); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Video Mode"); //cout << "Image Size = VGA (640 x 480)" << endl; //Frame Rate : フレームレート err = dc1394_video_set_framerate(cameras[i], (dc1394framerate_t) frame_rate); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Frame Rate"); //Setup : フレームレートとモードが有効なら通るはず err = dc1394_capture_setup(cameras[i], NUM_DMA_BUFFERS,DC1394_CAPTURE_FLAGS_DEFAULT); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Setup Cameras"); //Transmission : 信号の送信(ON/OFF) err = dc1394_video_set_transmission(cameras[i], DC1394_ON); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Start Transmission"); // -- Feature Mode Setting -- // //Gain : カメラ感度 // err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_MANUAL); err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_GAIN, DC1394_FEATURE_MODE_AUTO); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Feature : Gain"); //Shutter : シャッタースピード err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_MANUAL); // err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_SHUTTER, DC1394_FEATURE_MODE_AUTO); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Feature : Shutter"); //White Balance : ホワイトバランス // err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_WHITE_BALANCE, DC1394_FEATURE_MODE_MANUAL); err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_WHITE_BALANCE, DC1394_FEATURE_MODE_AUTO); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Feature : White Balance"); // Saturation : 彩度 // err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_SATURATION, DC1394_FEATURE_MODE_MANUAL); err = dc1394_feature_set_mode(cameras[i], DC1394_FEATURE_SATURATION, DC1394_FEATURE_MODE_AUTO); DC1394_ERR_CLN_RTN(err, clean_up(), "Cannot Set Feature : Saturation"); } // cout << "Set Up Done" << endl; // cout << "Transmission Starts!!" << endl; // cout << "Gain, Shutter, White Balance and Saturation Are Set Manually" << endl; // cout << "-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=" << endl; // cout << "Calibrationg White Balance..." << endl; bool isSync = false; int sync_succeed=0, sync_fail=0; //Main Loop while(!gShutOff){ //左右のキャプチャ時間(ミリ単位UNIX時間タイムスタンプ)を格納。 double timestamps[2]; double dif_timestamp; unsigned char show_frame[640*480*3]; for(int i = 0; i < CAMERA_NUM_MAX; i++){ if(dc1394_capture_dequeue(cameras[i],DC1394_CAPTURE_POLICY_WAIT, &frames[i]) != DC1394_SUCCESS){ dc1394_log_error("Failed To Capture From CAM"); } capture_time[i] = (unsigned long) frames[i]->timestamp; timestamps[i] = (double)floor(frames[i]->timestamp / 1000.0); } //cout << "timestamps[0] : " << timestamps[_LEFT] << endl; //cout << "timestamps[1] : " << timestamps[_RIGHT] << endl; dif_timestamp = timestamps[_RIGHT] - timestamps[_LEFT]; //cout << "dif_timestamp : " << dif_timestamp << endl; if(!isSync){ if(dif_timestamp != 0){ sync_fail++; cout << "Error : Seem to have a problem in Syncronization" << endl; if(sync_fail > 60){ cout << "Restart!!" << endl; clean_up(); return false; }else if(sync_fail > 0){ cout << "This Capture will restart in " << (60 - sync_fail)/15 << " seconds..." << endl; } }else{ sync_succeed++; if(sync_succeed > 45){ isSync = true; cout << "Syncronization Succeeded!!" << endl; } } } camera_control(frames[_LEFT]->image); // if(!isWhiteCalibrated){ // isWhiteCalibrated = white_balance_calibration(frames[_LEFT]->image); // } for(int i = 0; i< CAMERA_NUM_MAX;i++){ convert_RGB_to_BGR(frames[i]->image,show_frame); memcpy(stereo_images.data.image[i],show_frame,sizeof(char)*WIDTH*HEIGHT*3); if(isShowImage){ if(i == _LEFT){ Mat showImage(Size(WIDTH,HEIGHT),CV_8UC3,show_frame); rectangle(showImage, Point(320-40,240-40),Point(320 + 40,240 + 40), Scalar(255,0,0), 1, 8, 0); imshow("Left Camera Image", showImage); } } } stereo_images.write(); if(isShowImage && waitKey(1) == 27){ gShutOff = true; } if(isShowFrameRate){ static unsigned long time_a = 0,time_b = -1; time_a = capture_time[0]; //unsignedのtime_bを-1で初期化しているので、最初だけ表示がされない仕組み。 if(time_a > time_b){ //UNIX時間でマイクロ秒を表示しているので、1000000で割る。 cout << "Frame Rate : " << (1000000.0 / (double)(time_a - time_b)) << endl; } time_b = time_a; } for(int i = 0; i < CAMERA_NUM_MAX; i++){ if(frames[i]){ //バッファに領域をエンキューして返さないとと止まる。 dc1394_capture_enqueue(cameras[i],frames[i]); } } } clean_up(); return true; }
int main (int argc, char *argv[]) { //-SSM-// initSSM(); OBJECT.create(5.0,1.0); SCAN_DATA.open(SSM_READ); get_background(&background_U, &background_D); // ofstream ofs; // ofs.open("background"); // for (int i = 0; i < beam_num; i++) // { // ofs << background_U.dist[i] << endl; // } // for (int i = 0; i < beam_num; i++) // { // ofs << background_D.dist[i] << endl; // } // ofs.close(); cout << "get background" << endl; while(1) //データ取得ループ { if(SCAN_DATA.readNew()) //readNew { time_1 = get_time(); if(SCAN_DATA.data.det == 'U') { OBJECT.data.det = 'U'; for(int i=0; i<beam_num; i++) { dist_diff[i] = fabs(SCAN_DATA.data.dist[i] - background_U.dist[i]); if(dist_diff[i] <= diff_min) { OBJECT.data.dist[i] = 0.0; OBJECT.data.x[i] = 0.0; OBJECT.data.y[i] = 0.0; OBJECT.data.z[i] = 0.0; }else{//(dist_diff[i] > diff_min /*&& dist_diff[i] < diff_max*/){ OBJECT.data.dist[i] = SCAN_DATA.data.dist[i]; OBJECT.data.x[i] = SCAN_DATA.data.x[i]; OBJECT.data.y[i] = SCAN_DATA.data.y[i]; OBJECT.data.z[i] = SCAN_DATA.data.z[i]; } } } if(SCAN_DATA.data.det == 'D') { OBJECT.data.det = 'D'; for(int i=0; i<beam_num; i++) { dist_diff[i] = fabs(SCAN_DATA.data.dist[i] - background_D.dist[i]); if(dist_diff[i] <= diff_min ) { OBJECT.data.dist[i] = 0.0; OBJECT.data.x[i] = 0.0; OBJECT.data.y[i] = 0.0; OBJECT.data.z[i] = 0.0; }else{//(dist_diff[i] > diff_min /*&& dist_diff[i] < diff_max*/){ OBJECT.data.dist[i] = SCAN_DATA.data.dist[i]; OBJECT.data.x[i] = SCAN_DATA.data.x[i]; OBJECT.data.y[i] = SCAN_DATA.data.y[i]; OBJECT.data.z[i] = SCAN_DATA.data.z[i]; } } } OBJECT.write(); time_2 = get_time(); usleep(freq*1000000 - (time_2 - time_1)*1000000); time_3 = get_time(); cout << "time = " << time_3 - time_1 << endl; }else{ //readNew usleep(1000); //CPU使用率100%対策 } } //データ取得ループ //-SSMクローズ-// OBJECT.release(); SCAN_DATA.close(); endSSM(); return 0; }
void get_background(LS3D *background_U, LS3D *background_D) { int cnt_U=0; int cnt_D=0; while(cnt_U < 10 && cnt_D <10) { if(SCAN_DATA.readNew()) { if(SCAN_DATA.data.det == 'U') { for(int i=0; i<beam_num; i++) { buf_U[cnt_U].dist[i] = SCAN_DATA.data.dist[i]; buf_U[cnt_U].det = SCAN_DATA.data.det; buf_U[cnt_U].x[i] = SCAN_DATA.data.x[i]; buf_U[cnt_U].y[i] = SCAN_DATA.data.y[i]; buf_U[cnt_U].z[i] = SCAN_DATA.data.z[i]; } cnt_U++; } if(SCAN_DATA.data.det == 'D') { for(int i=0; i<beam_num; i++) { buf_D[cnt_D].dist[i] = SCAN_DATA.data.dist[i]; buf_D[cnt_D].det = SCAN_DATA.data.det; buf_D[cnt_D].x[i] = SCAN_DATA.data.x[i]; buf_D[cnt_D].y[i] = SCAN_DATA.data.y[i]; buf_D[cnt_D].z[i] = SCAN_DATA.data.z[i]; } cnt_D++; } } } //-バブルソート-// for(int k=0; k<beam_num; k++) //スキャンラインの数だけループ { double w; for(int i=0;i<(10-1);i++){ // 一番小さいデータを配列の右端から詰めていく for(int j=0;j<(10-1)-i;j++){// 詰めた分だけ比較する配列要素は減る if(buf_U[j].dist[k] < buf_U[j+1].dist[k]){ w = buf_U[j].dist[k]; buf_U[j].dist[k] = buf_U[j+1].dist[k]; buf_U[j+1].dist[k] = w; } } } } for(int k=0; k<beam_num; k++) //スキャンラインの数だけループ { double w; for(int i=0;i<(10-1);i++){ // 一番小さいデータを配列の右端から詰めていく for(int j=0;j<(10-1)-i;j++){// 詰めた分だけ比較する配列要素は減る if(buf_D[j].dist[k] < buf_D[j+1].dist[k]){ w = buf_D[j].dist[k]; buf_D[j].dist[k] = buf_D[j+1].dist[k]; buf_D[j+1].dist[k] = w; } } } } for(int i=0; i<beam_num; i++) { background_U->dist[i] = buf_U[5].dist[i]; background_U->x[i] = buf_U[5].x[i]; background_U->y[i] = buf_U[5].y[i]; background_U->z[i] = buf_U[5].z[i]; background_D->dist[i] = buf_D[5].dist[i]; background_D->x[i] = buf_D[5].x[i]; background_D->y[i] = buf_D[5].y[i]; background_D->z[i] = buf_D[5].z[i]; } background_U->det = 'U'; background_D->det = 'D'; }
void display(void) { glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //変換行列 glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glTranslatef(0.0, 0.0, -camera_distance); glRotatef(-camera_pitch, 1.0, 0.0, 0.0); glRotatef(-camera_yaw, 0.0, 1.0, 0.0); glRotatef(-camera_roll, 0.0, 0.0, 1.0); // 変換行列を設定(物体のモデル座標系→カメラ座標系) //(物体が (0.0, 1.0, 0.0) の位置にあり、静止しているとする) glTranslatef( -1000.0, 0.0, SENSOR_HEIGHT); if (SCAN_DATA.readNew()) { //スキャンデータの格納 if (SCAN_DATA.data.det == 'U') { for (int i = 0; i < STEP_NUM_MAX; i++) { vertex_U[i][0] = SCAN_DATA.data.x[i]; vertex_U[i][1] = SCAN_DATA.data.y[i]; vertex_U[i][2] = SCAN_DATA.data.z[i]; } } if (SCAN_DATA.data.det == 'D') { for (int i = 0; i < STEP_NUM_MAX; i++) { vertex_D[i][0] = SCAN_DATA.data.x[i]; vertex_D[i][1] = SCAN_DATA.data.y[i]; vertex_D[i][2] = SCAN_DATA.data.z[i]; } } //スキャン点の描画 glLineWidth(1.0); glClear(GL_COLOR_BUFFER_BIT); glPointSize(pointsize); glBegin(GL_POINTS); for(int j=0; j<2720; j++) { glColor3d(1.0, 0.0, 0.0); glVertex3d(vertex_U[j][0], vertex_U[j][1], vertex_U[j][2]); } for(int j=0; j<2720; j++) { glColor3d(0.0, 0.0, 1.0); glVertex3d(vertex_D[j][0], vertex_D[j][1], vertex_D[j][2]); } glEnd(); //1メートルのエリア glColor3d(0.0, 0.0, 0.0); glBegin(GL_LINE_LOOP); glVertex3d(0.0, -1000.0, -SENSOR_HEIGHT); glVertex3d(1000.0, -1000.0, -SENSOR_HEIGHT); glVertex3d(1000.0, 1000.0, -SENSOR_HEIGHT); glVertex3d(0.0, 1000.0, -SENSOR_HEIGHT); glEnd(); //監視領域(スポット) glColor3d(0.0, 0.0, 0.0); glBegin(GL_LINE_LOOP); glVertex3d(0.0, -2200.0, -SENSOR_HEIGHT); glVertex3d(3000.0, -2200.0, -SENSOR_HEIGHT); glVertex3d(3000.0, 2200.0, -SENSOR_HEIGHT); glVertex3d(0.0, 2200.0, -SENSOR_HEIGHT); glEnd(); glColor3d(0.0, 0.0, 0.0); glBegin(GL_LINE_LOOP); glVertex3d(0.0, 0.0, -SENSOR_HEIGHT); glVertex3d(6000.0, -0.0, -SENSOR_HEIGHT); glEnd(); glColor3d(0.0, 0.0, 0.0); glBegin(GL_LINE_LOOP); glVertex3d(0.0, 4000.0, -SENSOR_HEIGHT); glVertex3d(0.0, -4000.0, -SENSOR_HEIGHT); glEnd(); glBegin(GL_LINE_LOOP); glVertex3d(0.0, 0.0, 1000.0); glVertex3d(0.0, 0.0, -SENSOR_HEIGHT); glEnd(); //センサ glPushMatrix(); glColor3d(0.0, 0.0, 0.0); //色の設定 glTranslated(0.0, 0.0, 0.0);//平行移動値の設定 glutSolidSphere(100.0, 20, 20);//引数:(半径, Z軸まわりの分割数, Z軸に沿った分割数) glPopMatrix(); glutSwapBuffers(); } //if(SCAN_DATA.readNew) sleepSSM(0.005); }