int TrackerMultiMarker::calc(const unsigned char* nImage) { numDetected = 0; int tmpNumDetected; ARMarkerInfo *tmp_markers; if (useDetectLite) { if (arDetectMarkerLite(const_cast<unsigned char*> (nImage), this->thresh, &tmp_markers, &tmpNumDetected) < 0) return 0; } else { if (arDetectMarker(const_cast<unsigned char*> (nImage), this->thresh, &tmp_markers, &tmpNumDetected) < 0) return 0; } for (int i = 0; i < tmpNumDetected; i++) if (tmp_markers[i].id != -1) { detectedMarkers[numDetected] = tmp_markers[i]; detectedMarkerIDs[numDetected++] = tmp_markers[i].id; if (numDetected >= MAX_IMAGE_PATTERNS) // increase this value if more markers should be possible to be detected in one image... break; } if (executeMultiMarkerPoseEstimator(tmp_markers, tmpNumDetected, config) < 0) return 0; convertTransformationMatrixToOpenGLStyle(config->trans, this->gl_para); return numDetected; }
/* * Class: com_clab_artoolkit_port_JARToolkit * Method: JARDetectMarkerLite * Signature: (JI)[I */ JNIEXPORT jintArray JNICALL Java_net_sourceforge_jartoolkit_core_JARToolKit_detectMarkerLite__JI(JNIEnv *env, jobject, jlong image, jint thresh) { marker_num = 0; if( arDetectMarkerLite((ARUint8 *)image, thresh, &marker_info, &marker_num) < 0 ) return 0; jintArray ids = env->NewIntArray(marker_num); if(marker_num==0) return ids; jint *buffer = env->GetIntArrayElements(ids, 0); for(int i=0 ; i<marker_num ; i++) { buffer[i] = marker_info[i].id; } env->ReleaseIntArrayElements(ids, buffer, 0); return ids; }
/* * One and only one callback, now takes cloud, does everything else needed. */ void ARPublisher::getTransformationCallback (const sensor_msgs::PointCloud2ConstPtr & msg) { sensor_msgs::ImagePtr image_msg(new sensor_msgs::Image); ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; int i, k, j; /* do we need to initialize? */ if(!configured_) { if(msg->width == 0 || msg->height == 0) { ROS_ERROR ("Deformed cloud! Size = %d, %d.", msg->width, msg->height); return; } cam_param_.xsize = msg->width; cam_param_.ysize = msg->height; cam_param_.dist_factor[0] = msg->width/2; // x0 = cX from openCV calibration cam_param_.dist_factor[1] = msg->height/2; // y0 = cY from openCV calibration cam_param_.dist_factor[2] = 0; // f = -100*k1 from CV. Note, we had to do mm^2 to m^2, hence 10^8->10^2 cam_param_.dist_factor[3] = 1.0; // scale factor, should probably be >1, but who cares... arInit (); } /* convert cloud to PCL */ PointCloud cloud; pcl::fromROSMsg(*msg, cloud); /* get an OpenCV image from the cloud */ pcl::toROSMsg (cloud, *image_msg); cv_bridge::CvImagePtr cv_ptr; try { cv_ptr = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8); } catch (cv_bridge::Exception& e) { ROS_ERROR ("Could not convert from '%s' to 'bgr8'.", image_msg->encoding.c_str ()); } dataPtr = (ARUint8 *) cv_ptr->image.ptr(); /* detect the markers in the video frame */ if (arDetectMarkerLite (dataPtr, threshold_, &marker_info, &marker_num) < 0) { argCleanup (); return; } arPoseMarkers_.markers.clear (); /* check for known patterns */ for (i = 0; i < objectnum; i++) { k = -1; for (j = 0; j < marker_num; j++) { if (object[i].id == marker_info[j].id) { if (k == -1) k = j; else // make sure you have the best pattern (highest confidence factor) if (marker_info[k].cf < marker_info[j].cf) k = j; } } if (k == -1) { object[i].visible = 0; continue; } /* create a cloud for marker corners */ int d = marker_info[k].dir; PointCloud marker; marker.push_back( cloud.at( (int)marker_info[k].vertex[(4-d)%4][0], (int)marker_info[k].vertex[(4-d)%4][1] ) ); // upper left marker.push_back( cloud.at( (int)marker_info[k].vertex[(5-d)%4][0], (int)marker_info[k].vertex[(5-d)%4][1] ) ); // upper right marker.push_back( cloud.at( (int)marker_info[k].vertex[(6-d)%4][0], (int)marker_info[k].vertex[(6-d)%4][1] ) ); // lower right marker.push_back( cloud.at( (int)marker_info[k].vertex[(7-d)%4][0], (int)marker_info[k].vertex[(7-d)%4][1] ) ); /* create an ideal cloud */ double w = object[i].marker_width; PointCloud ideal; ideal.push_back( makeRGBPoint(-w/2,w/2,0) ); ideal.push_back( makeRGBPoint(w/2,w/2,0) ); ideal.push_back( makeRGBPoint(w/2,-w/2,0) ); ideal.push_back( makeRGBPoint(-w/2,-w/2,0) ); /* get transformation */ Eigen::Matrix4f t; TransformationEstimationSVD obj; obj.estimateRigidTransformation( marker, ideal, t ); /* get final transformation */ tf::Transform transform = tfFromEigen(t.inverse()); // any(transform == nan) tf::Matrix3x3 m = transform.getBasis(); tf::Vector3 p = transform.getOrigin(); bool invalid = false; for(int i=0; i < 3; i++) for(int j=0; j < 3; j++) invalid = (invalid || isnan(m[i][j]) || fabs(m[i][j]) > 1.0); for(int i=0; i < 3; i++) invalid = (invalid || isnan(p[i])); if(invalid) continue; /* publish the marker */ ar_pose::ARMarker ar_pose_marker; ar_pose_marker.header.frame_id = msg->header.frame_id; ar_pose_marker.header.stamp = msg->header.stamp; ar_pose_marker.id = object[i].id; ar_pose_marker.pose.pose.position.x = transform.getOrigin().getX(); ar_pose_marker.pose.pose.position.y = transform.getOrigin().getY(); ar_pose_marker.pose.pose.position.z = transform.getOrigin().getZ(); ar_pose_marker.pose.pose.orientation.x = transform.getRotation().getAxis().getX(); ar_pose_marker.pose.pose.orientation.y = transform.getRotation().getAxis().getY(); ar_pose_marker.pose.pose.orientation.z = transform.getRotation().getAxis().getZ(); ar_pose_marker.pose.pose.orientation.w = transform.getRotation().getW(); ar_pose_marker.confidence = marker_info->cf; arPoseMarkers_.markers.push_back (ar_pose_marker); /* publish transform */ if (publishTf_) { broadcaster_.sendTransform(tf::StampedTransform(transform, msg->header.stamp, msg->header.frame_id, object[i].name)); } /* publish visual marker */ if (publishVisualMarkers_) { tf::Vector3 markerOrigin (0, 0, 0.25 * object[i].marker_width * AR_TO_ROS); tf::Transform m (tf::Quaternion::getIdentity (), markerOrigin); tf::Transform markerPose = transform * m; // marker pose in the camera frame tf::poseTFToMsg (markerPose, rvizMarker_.pose); rvizMarker_.header.frame_id = msg->header.frame_id; rvizMarker_.header.stamp = msg->header.stamp; rvizMarker_.id = object[i].id; rvizMarker_.scale.x = 1.0 * object[i].marker_width * AR_TO_ROS; rvizMarker_.scale.y = 1.0 * object[i].marker_width * AR_TO_ROS; rvizMarker_.scale.z = 0.5 * object[i].marker_width * AR_TO_ROS; rvizMarker_.ns = "basic_shapes"; rvizMarker_.type = visualization_msgs::Marker::CUBE; rvizMarker_.action = visualization_msgs::Marker::ADD; switch (i) { case 0: rvizMarker_.color.r = 0.0f; rvizMarker_.color.g = 0.0f; rvizMarker_.color.b = 1.0f; rvizMarker_.color.a = 1.0; break; case 1: rvizMarker_.color.r = 1.0f; rvizMarker_.color.g = 0.0f; rvizMarker_.color.b = 0.0f; rvizMarker_.color.a = 1.0; break; default: rvizMarker_.color.r = 0.0f; rvizMarker_.color.g = 1.0f; rvizMarker_.color.b = 0.0f; rvizMarker_.color.a = 1.0; } rvizMarker_.lifetime = ros::Duration (); rvizMarkerPub_.publish (rvizMarker_); ROS_DEBUG ("Published visual marker"); } } arMarkerPub_.publish (arPoseMarkers_); ROS_DEBUG ("Published ar_multi markers"); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; float curPaddlePos[3]; int i; double err; /* grab a video frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); for( i = 0; i < marker_num; i++ ) marker_flag[i] = 0; /* get the paddle position */ paddleGetTrans(paddleInfo, marker_info, marker_flag, marker_num, &cparam); /* draw the 3D models */ glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); /* draw the paddle, base and menu */ if( paddleInfo->active ){ draw_paddle( paddleInfo); } /* get the translation from the multimarker pattern */ if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } //printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } //draw a red ground grid drawGroundGrid( config->trans, 20, 150.0f, 105.0f, 0.0f); /* find the paddle position relative to the base */ findPaddlePosition(curPaddlePos, paddleInfo->trans, config->trans); /* check for collisions with targets */ for(i=0;i<TARGET_NUM;i++){ myTarget[i].state = NOT_TOUCHED; if(checkCollision(curPaddlePos, myTarget[i].pos, 20.0f)) { myTarget[i].state = TOUCHED; fprintf(stderr,"touched !!\n"); } } /* draw the targets */ for(i=0;i<TARGET_NUM;i++){ draw(myTarget[i],config->trans); } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; float curPaddlePos[3]; int i; double err; double angle; err=0.; /* grab a video frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); for( i = 0; i < marker_num; i++ ) marker_flag[i] = 0; /* get the paddle position */ paddleGetTrans(paddleInfo, marker_info, marker_flag, marker_num, &cparam); /* draw the 3D models */ glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); /* get the translation from the multimarker pattern */ if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } // printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } //draw a red ground grid drawGroundGrid( config->trans, 15, 150.0, 110.0, 0.0); /* find the paddle position relative to the base */ if (paddleInfo->active) findPaddlePosition(curPaddlePos,paddleInfo->trans,config->trans); /* checking for paddle gesture */ if( paddleInfo->active) { int findItem=-1; if (myPaddleItem.item!=-1) { if( check_incline(paddleInfo->trans, config->trans, &angle) ) { myPaddleItem.x += 2.0 * cos(angle); myPaddleItem.y += 2.0 * sin(angle); if( myPaddleItem.x*myPaddleItem.x + myPaddleItem.y*myPaddleItem.y > 900.0 ) { myPaddleItem.x -= 2.0 * cos(angle); myPaddleItem.y -= 2.0 * sin(angle); myListItem.item[myPaddleItem.item].onpaddle=0; myListItem.item[myPaddleItem.item].pos[0]=curPaddlePos[0]; myListItem.item[myPaddleItem.item].pos[1]=curPaddlePos[1]; myPaddleItem.item = -1; } } } else { if ((findItem=check_pickup(paddleInfo->trans, config->trans,&myListItem, &angle))!=-1) { myPaddleItem.item=findItem; myPaddleItem.x =0.0; myPaddleItem.y =0.0; myPaddleItem.angle = 0.0; myListItem.item[myPaddleItem.item].onpaddle=1; } } } /* draw the item */ drawItems(config->trans,&myListItem); /* draw the paddle */ if( paddleInfo->active ){ draw_paddle(paddleInfo,&myPaddleItem); } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; double err; int i; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } /* for(i=0;i<3;i++) { for(j=0;j<4;j++) printf("%10.5f ", config->trans[i][j]); printf("\n"); } printf("\n"); */ argDrawMode3D(); argDraw3dCamera( 0, 0 ); glClearDepth( 1.0 ); glClear(GL_DEPTH_BUFFER_BIT); for( i = 0; i < config->marker_num; i++ ) { if( config->marker[i].visible >= 0 ) draw( config->trans, config->marker[i].trans, 0 ); else draw( config->trans, config->marker[i].trans, 1 ); } argSwapBuffers(); }
/* main loop */ static void mainLoop(void) { ARUint8 *dataPtr; ARMarkerInfo *marker_info; int marker_num; double err; int i; /* grab a vide frame */ if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) { arUtilSleep(2); return; } if( count == 0 ) arUtilTimerReset(); count++; /* detect the markers in the video frame */ if( arDetectMarkerLite(dataPtr, thresh, &marker_info, &marker_num) < 0 ) { cleanup(); exit(0); } argDrawMode2D(); if( !arDebug ) { argDispImage( dataPtr, 0,0 ); } else { argDispImage( dataPtr, 1, 1 ); if( arImageProcMode == AR_IMAGE_PROC_IN_HALF ) argDispHalfImage( arImage, 0, 0 ); else argDispImage( arImage, 0, 0); glColor3f( 1.0, 0.0, 0.0 ); glLineWidth( 1.0 ); for( i = 0; i < marker_num; i++ ) { argDrawSquare( marker_info[i].vertex, 0, 0 ); } glLineWidth( 1.0 ); } arVideoCapNext(); if( (err=arMultiGetTransMat(marker_info, marker_num, config)) < 0 ) { argSwapBuffers(); return; } printf("err = %f\n", err); if(err > 100.0 ) { argSwapBuffers(); return; } /* for(i=0;i<3;i++) { for(j=0;j<4;j++) printf("%10.5f ", config->trans[i][j]); printf("\n"); } printf("\n"); */ argDrawMode3D(); argDraw3dCamera( 0, 0 ); glClearDepth( 1.0 ); glScalef(1.0,1.0,5.0); glClear(GL_DEPTH_BUFFER_BIT); //PRINT DOS PREDIOS FANTASMAS (COM E SEM MARCADORES) if (mostraFantasmas == 1) { desenhaFantasmasSemTag(); //Desenha predios fantasmas com marcadores identificados glColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_FALSE); for(i = (config->marker_num) - 3; i < config->marker_num; i++ ) { if( config->marker[i].visible >= 0 ) { glScalef(1.0,1.0,2.0); draw( config->trans, config->marker[i].trans, 0 ); glScalef(1.0,1.0,0.5); }else{ glScalef(1.0,1.0,2.0); draw( config->trans, config->marker[i].trans, 1 ); glScalef(1.0,1.0,0.5); } } glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE); } desenhaCarros(); for (i = 0; i < (config->marker_num) - 3; i++ ) { if( config->marker[i].visible >= 0 ) draw( config->trans, config->marker[i].trans, 0 ); else draw( config->trans, config->marker[i].trans, 1 ); } argSwapBuffers(); }