예제 #1
0
/*
* Class:     com_clab_artoolkit_port_JARToolkit
* Method:    getTransMatrixContJava3D
* Signature: ([DIIFF[D)Z
*/
JNIEXPORT jboolean JNICALL Java_net_sourceforge_jartoolkit_core_JARToolKit_getTransMatrixContJava3D___3DIIFF_3D(JNIEnv *env, jobject, jdoubleArray inMatrix, jint patternID, jint patt_width, jfloat patt_centerX, jfloat patt_centerY, jdoubleArray conv)
{
	double patt_trans[3][4];
	int j, i;
	double patt_center[2] = {patt_centerX, patt_centerY};
	double prev_conv[3][4];

	jdouble *matrix = env->GetDoubleArrayElements(inMatrix, 0);

	for( j = 0; j < marker_num; j++ )
	{
		if( patternID == marker_info[j].id )
			break;
	}

	if( j >= marker_num )
	{
		env->ReleaseDoubleArrayElements(inMatrix, matrix, 0);
		return false;
	}

	jdouble *pre_conv = env->GetDoubleArrayElements(conv, 0);
	for(j=0 ; j<3 ; j++)
	{
		for(i=0 ; i<4 ; i++)
		{
			prev_conv[j][i] = pre_conv[i*4+j];
		}
	}
	env->ReleaseDoubleArrayElements(conv, pre_conv, 0);

	if( arGetTransMatCont(&marker_info[j], prev_conv, patt_center,patt_width, patt_trans) < 0 )
	{
		env->ReleaseDoubleArrayElements(inMatrix, matrix, 0);
		return false;
	}

	for( j = 0; j < 3; j++ )
	{
		for( i = 0; i < 4; i++ )
		{
			matrix[i*4+j] = patt_trans[j][i];
		}
	}

	matrix[1] = -matrix[1];
	matrix[4] = -matrix[4];
	matrix[6] = -matrix[6];
	matrix[9] = -matrix[9];
	matrix[13] = -matrix[13];

	matrix[0*4+3] = matrix[1*4+3] = matrix[2*4+3] = 0.0;
	matrix[3*4+3] = 1.0;

	env->ReleaseDoubleArrayElements(inMatrix, matrix, 0);
	return true;
}
예제 #2
0
/*
* Class:     com_clab_artoolkit_port_JARToolkit
* Method:    getTransMatrixCont
* Signature: ([DIIFF[D)Z
*/
JNIEXPORT jboolean JNICALL Java_net_sourceforge_jartoolkit_core_JARToolKit_getTransMatrixCont___3DIIFF_3D(JNIEnv *env, jobject, jdoubleArray inMatrix, jint patternID, jint patt_width, jfloat patt_centerX, jfloat patt_centerY, jdoubleArray conv)
{
	double patt_trans[3][4];
	int k, j, i;
	double patt_center[2] = {patt_centerX, patt_centerY};
	double prev_conv[3][4];

	jdouble *matrix = env->GetDoubleArrayElements(inMatrix, 0);

	k = -1;
	for (j = 0; j < marker_num; j++)
	{
		if (patternID == marker_info[j].id)
		{
			if (k = -1) k = j;
			else if (marker_info[k].cf < marker_info[j].cf ) k = j;
		}
	}

	if (k == -1) 
	{
		env->ReleaseDoubleArrayElements(inMatrix, matrix, 0);
		return false;
	}

	jdouble *pre_conv = env->GetDoubleArrayElements(conv, 0);
	for (j=0; j<3; j++)
	{
		for( i=0; i<4; i++)
		{
			prev_conv[j][i] = pre_conv[i*4+j];
		}
	}
	env->ReleaseDoubleArrayElements(conv, pre_conv, 0);

	if (arGetTransMatCont(&marker_info[k], prev_conv, patt_center, patt_width, patt_trans) < 0)
	{
		env->ReleaseDoubleArrayElements(inMatrix, matrix, 0);
		return false;
	}

	/*
	for( j = 0; j < 3; j++ )
	{
	for( i = 0; i < 4; i++ )
	{
	matrix[i*4+j] = patt_trans[j][i];
	}
	}

	matrix[0*4+3] = matrix[1*4+3] = matrix[2*4+3] = 0.0;
	matrix[3*4+3] = 1.0;
	*/
	argConvGlpara(patt_trans, matrix);
	env->ReleaseDoubleArrayElements(inMatrix, matrix, 0);
	return true;
}
예제 #3
0
/* main loop */
static void mainLoop(void)
{
    static int      contF = 0;
    ARUint8         *dataPtr;
    ARMarkerInfo    *marker_info;
    int             marker_num;
    int             j, k;

    /* grab a vide frame */
    if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) {
        arUtilSleep(2);
        return;
    }
    if( count == 0 ) arUtilTimerReset();
    count++;

    argDrawMode2D();
    argDispImage( dataPtr, 0,0 );

    /* detect the markers in the video frame */
    if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) {
        cleanup();
        exit(0);
    }

    arVideoCapNext();

    /* check for object visibility */
    k = -1;
    for( j = 0; j < marker_num; j++ ) {
        if( patt_id == marker_info[j].id ) {
            if( k == -1 ) k = j;
            else if( marker_info[k].cf < marker_info[j].cf ) k = j;
        }
    }
    if( k == -1 ) {
        contF = 0;
        argSwapBuffers();
        return;
    }

    /* get the transformation between the marker and the real camera */
    if( mode == 0 || contF == 0 ) {
        arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans);
		
    }
    else {
        arGetTransMatCont(&marker_info[k], patt_trans, patt_center, patt_width, patt_trans);
    }
    contF = 1;

    draw( patt_trans );

    argSwapBuffers();
}
예제 #4
0
파일: ARTK.cpp 프로젝트: binzhang/ARTK_MMD
//==================
// マーカー検出処理
//==================
bool cARTK::update( void )
{
	ARUint8			*pImage;
	ARMarkerInfo	*pMarkerInfo;
    int             iNumDetectedMarker;

	// カメラ画像の取得
	if( (pImage = (ARUint8 *)arVideoGetImage()) == NULL ) {
        arUtilSleep(2);
        return false;
    }
	memcpy( m_pARTImage, pImage, m_uiARTImageSize );

	m_bMarkerFound = false;

	// カメラ画像からマーカーを検出
	if( arDetectMarker( m_pARTImage, 130, &pMarkerInfo, &iNumDetectedMarker ) < 0 )
	{
		exit( -1 );
	}

	// 検出されたマーカー情報の中から一番信頼性の高いものを探す
	int		k = -1;
	for( int j = 0 ; j < iNumDetectedMarker ; j++ )
	{
		if( pMarkerInfo[j].id == m_iPattID )
		{
			if( k == -1 || pMarkerInfo[j].cf > pMarkerInfo[k].cf )	k = j;
		}
	}

	if( k != -1 )
	{
		// カメラのトランスフォーム行列を取得
		if( m_bFirstTime )
			arGetTransMat( &(pMarkerInfo[k]), m_dPattCenter, m_dPattWidth, m_dPattTransMat );
		else
			arGetTransMatCont( &(pMarkerInfo[k]), m_dPattTransMat, m_dPattCenter, m_dPattWidth, m_dPattTransMat );

		m_bFirstTime = false;

		m_bMarkerFound = true;
	}

	// 次のカメラ画像のキャプチャを開始
	arVideoCapNext();

	return true;
}
예제 #5
0
  void ARBundlePublisher::getMarkerTransform(int knownPatternCount, ARMarkerInfo *marker_info, int seenPatternCount)	{
  
      // calculate the transform for each marker
      if (object[knownPatternCount].visible == 0)	//if the marker was not found the previous time
      {
        arGetTransMat (&marker_info[seenPatternCount], object[knownPatternCount].marker_center, object[knownPatternCount].marker_width, object[knownPatternCount].trans);
      }
      else	//if the marker was found the previous time, use the transform with history
      {
        arGetTransMatCont (&marker_info[seenPatternCount], object[knownPatternCount].trans,
                           object[knownPatternCount].marker_center, object[knownPatternCount].marker_width, object[knownPatternCount].trans);
      }


  }
예제 #6
0
	void SingleTarget::update(ARMarkerInfo* targetInfo, bool useHistory)
	{

		if (_active == false) 
		{
			// If the target isn't active, then it can't be valid, and should not be updated either.
			_valid = false;
			return;
		}

		if (targetInfo == 0L) 
		{
			// Invalid target info cannot be used for update
			_valid = false;
			return;
		} 

		// Valid target info means the tracker detected and tracked the target
		_valid = true;

		// Use history-based arGetTransMatCont if flag is set and we have inital data from a call to arGetTransMat
		if (useHistory && mInitialData) 
		{
			arGetTransMatCont(targetInfo, patt_trans, patt_center, patt_width, patt_trans);
		} 
		else 
		{
			arGetTransMat(targetInfo, patt_center, patt_width, patt_trans);
			mInitialData = true; // Need to get inital data before arGetTransMatCont can be used
		}

		_confidence = targetInfo->cf;

		double modelView[16];
		arglCameraViewRH(patt_trans, modelView, 1.0f);
		updateTransform(osg::Matrix(modelView));

	}
예제 #7
0
/* main loop */
static void mainLoop(void)
{
    ARUint8         *dataPtr;
    ARMarkerInfo    *marker_info;
    int             marker_num;
    int             j, k;
	int				i;

    /* grab a vide frame */
    if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) {
        arUtilSleep(2);
        return;
    }
    if( count == 0 ) arUtilTimerReset();
    count++;

    argDrawMode2D();
    argDispImage( dataPtr, 0,0 );

    /* detect the markers in the video frame */
    if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) {
        cleanup();
        exit(0);
    }

    arVideoCapNext();

    /* check for object visibility */
   	for( i = 0; i < PTT_NUM; i++){
		k = -1;
	    for( j = 0; j < marker_num; j++ ) {
	        if( object[i].patt_id == marker_info[j].id ) {
	            if( k == -1 ) k = j;
	            else if( marker_info[k].cf < marker_info[j].cf ) k = j;
	        }	
	    }
		if( k == -1 ) { /* not find marker */
			object[i].visible = 0;
			isFirst[i] = 1;
		}
		else{
			/* get the transformation between the marker and the real camera */
			if( isFirst[i]){
				arGetTransMat(&marker_info[k], object[i].patt_center, object[i].patt_width, object[i].patt_trans);
			}else{
				arGetTransMatCont(&marker_info[k], object[i].patt_trans, object[i].patt_center, object[i].patt_width, object[i].patt_trans);
			}
			object[i].visible = 1;
			isFirst[i] = 0;
			
			/* 追加 */
			if(i == PTT2_MARK_ID){
				arUtilMatInv( object[PTT2_MARK_ID].patt_trans, itrans2); // 逆行列の計算
			}
		}
	}

	//Initialize(); // fix me
	draw();

	argSwapBuffers();
}
예제 #8
0
int ActuatorARTKSM::searchActuatorOnFrame(ARMarkerInfo **marker_info, int marker_num){

	bool	cdChanged = false;
	int		cd=100;
	int		toggleBase = 0;
	int		j, k;

		//SACRA ACOCHAMBRO
//	double	m1Trans[3][4];
//	double	m2Trans[3][4];
//	double	mResult[3][4]; 
//	double	matInv[3][4];

	static	double diff[3][4];
	static	int matInit=1;

	ARMarkerInfo           *auxMarker_info;

	auxMarker_info = *marker_info;


				k = -1;
				for (j = 0; j < marker_num; j++) {
					if (auxMarker_info[j].id == this->patternNumber) {
						if( k == -1 ) k = j; // First marker detected.
						else if (auxMarker_info[k].cf < auxMarker_info[j].cf) k = j; }
				}

				if (k != -1) {
					if (this->visible == 0) {
						arGetTransMat(&auxMarker_info[k], this->markerCenter, this->markerWidth, this->markerTrans);

						//if(matInit || cdChanged){
						//	for (int u=0;u<3;u++)
						//		for (int v=0;v<4;v++){ m1Trans[u][v]=0.0; m2Trans[u][v]=0.0; }

						//		m1Trans[0][0] = m2Trans[0][0] = 1.0;
						//		m1Trans[1][1] = m1Trans[2][2] = -1.0;
						//		m2Trans[1][1] = m2Trans[2][2] = -1.0;
						//		arUtilMatInv(m1Trans, matInv);
						//		arUtilMatMul(matInv,m2Trans,diff);
						//		matInit = 0;
						//		cdChanged=false;
						//}

					} else { 
						arGetTransMatCont(&auxMarker_info[k], this->markerTrans, this->markerCenter, this->markerWidth, this->markerTrans);
					}
					this->visible = 1;
				/*	arUtilMatMul(this->markerTrans,diff,mResult);
					for(int u=0;u<3;u++)
						for(int v=0;v<4;v++)
							this->markerTrans[u][v] = mResult[u][v];*/

					this->updateTrans( this->markerTrans );
					this->updateButton0( this->visible );
					//printf("\n Found Actuator %d %s", (*a).id, (*a).name);
					return TRUE;
				} else { 
					this->visible = 0; 
					this->updateButton0( this->visible);
					return 2;
				}
}
예제 #9
0
  void ARSinglePublisher::getTransformationCallback (const sensor_msgs::ImageConstPtr & image_msg)
  {
    ARUint8 *dataPtr;
    ARMarkerInfo *marker_info;
    int marker_num;
    int i, k;

    /* Get the image from ROSTOPIC
     * NOTE: the dataPtr format is BGR because the ARToolKit library was
     * build with V4L, dataPtr format change according to the 
     * ARToolKit configure option (see config.h).*/
    try
    {
      capture_ = bridge_.imgMsgToCv (image_msg, "bgr8");
    }
    catch (sensor_msgs::CvBridgeException & e)
    {
      ROS_ERROR ("Could not convert from '%s' to 'bgr8'.", image_msg->encoding.c_str ());
    }
    //cvConvertImage(capture_,capture_,CV_CVTIMG_FLIP); //flip image
    dataPtr = (ARUint8 *) capture_->imageData;

    // detect the markers in the video frame 
    if (arDetectMarker (dataPtr, threshold_, &marker_info, &marker_num) < 0)
    {
      ROS_FATAL ("arDetectMarker failed");
      ROS_BREAK ();             // FIXME: I don't think this should be fatal... -Bill
    }

    // check for known patterns
    k = -1;
    for (i = 0; i < marker_num; i++)
    {
      if (marker_info[i].id == patt_id_)
      {
        ROS_DEBUG ("Found pattern: %d ", patt_id_);

        // make sure you have the best pattern (highest confidence factor)
        if (k == -1)
          k = i;
        else if (marker_info[k].cf < marker_info[i].cf)
          k = i;
      }
    }

    if (k != -1)
    {
      // **** get the transformation between the marker and the real camera
      double arQuat[4], arPos[3];

      if (!useHistory_ || contF == 0)
        arGetTransMat (&marker_info[k], marker_center_, markerWidth_, marker_trans_);
      else
        arGetTransMatCont (&marker_info[k], marker_trans_, marker_center_, markerWidth_, marker_trans_);

      contF = 1;

      //arUtilMatInv (marker_trans_, cam_trans);
      arUtilMat2QuatPos (marker_trans_, arQuat, arPos);

      // **** convert to ROS frame

      double quat[4], pos[3];
    
      pos[0] = arPos[0] * AR_TO_ROS;
      pos[1] = arPos[1] * AR_TO_ROS;
      pos[2] = arPos[2] * AR_TO_ROS;

      quat[0] = -arQuat[0];
      quat[1] = -arQuat[1];
      quat[2] = -arQuat[2];
      quat[3] = arQuat[3];

      ROS_DEBUG (" QUAT: Pos x: %3.5f  y: %3.5f  z: %3.5f", pos[0], pos[1], pos[2]);
      ROS_DEBUG ("     Quat qx: %3.5f qy: %3.5f qz: %3.5f qw: %3.5f", quat[0], quat[1], quat[2], quat[3]);

      // **** publish the marker

		  ar_pose_marker_.header.frame_id = image_msg->header.frame_id;
		  ar_pose_marker_.header.stamp    = image_msg->header.stamp;
		  ar_pose_marker_.id              = marker_info->id;

		  ar_pose_marker_.pose.pose.position.x = pos[0];
		  ar_pose_marker_.pose.pose.position.y = pos[1];
		  ar_pose_marker_.pose.pose.position.z = pos[2];

		  ar_pose_marker_.pose.pose.orientation.x = quat[0];
		  ar_pose_marker_.pose.pose.orientation.y = quat[1];
		  ar_pose_marker_.pose.pose.orientation.z = quat[2];
		  ar_pose_marker_.pose.pose.orientation.w = quat[3];
		
		  ar_pose_marker_.confidence = marker_info->cf;

		  arMarkerPub_.publish(ar_pose_marker_);
		  ROS_DEBUG ("Published ar_single marker");
		
      // **** publish transform between camera and marker

      btQuaternion rotation (quat[0], quat[1], quat[2], quat[3]);
      btVector3 origin(pos[0], pos[1], pos[2]);
      btTransform t(rotation, origin);

      if(publishTf_)
      {
        if(reverse_transform_)
        {
          tf::StampedTransform markerToCam (t.inverse(), image_msg->header.stamp, markerFrame_.c_str(), image_msg->header.frame_id);
          broadcaster_.sendTransform(markerToCam);
        } else {
          tf::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, markerFrame_.c_str());
          broadcaster_.sendTransform(camToMarker);
        }
      }

      // **** publish visual marker

      if(publishVisualMarkers_)
      {
        btVector3 markerOrigin(0, 0, 0.25 * markerWidth_ * AR_TO_ROS);
        btTransform m(btQuaternion::getIdentity(), markerOrigin);
        btTransform markerPose = t * m; // marker pose in the camera frame
      
        tf::poseTFToMsg(markerPose, rvizMarker_.pose);

			  rvizMarker_.header.frame_id = image_msg->header.frame_id;
			  rvizMarker_.header.stamp = image_msg->header.stamp;
			  rvizMarker_.id = 1;

			  rvizMarker_.scale.x = 1.0 * markerWidth_ * AR_TO_ROS;
			  rvizMarker_.scale.y = 1.0 * markerWidth_ * AR_TO_ROS;
			  rvizMarker_.scale.z = 0.5 * markerWidth_ * AR_TO_ROS;
			  rvizMarker_.ns = "basic_shapes";
			  rvizMarker_.type = visualization_msgs::Marker::CUBE;
			  rvizMarker_.action = visualization_msgs::Marker::ADD;
			  rvizMarker_.color.r = 0.0f;
			  rvizMarker_.color.g = 1.0f;
			  rvizMarker_.color.b = 0.0f;
			  rvizMarker_.color.a = 1.0;
			  rvizMarker_.lifetime = ros::Duration();
			
			  rvizMarkerPub_.publish(rvizMarker_);
			  ROS_DEBUG ("Published visual marker");
      }
    }
    else
    {
      contF = 0;
      ROS_DEBUG ("Failed to locate marker");
    }
  }
예제 #10
0
/*
 * Class:     net_towerdefender_image_MarkerInfo
 * Method:    artoolkit_detectmarkers
 * Signature: ([B[D)I
 */
JNIEXPORT jint JNICALL Java_net_towerdefender_image_ARToolkit_artoolkit_1detectmarkers(
		JNIEnv *env, jobject object, jbyteArray image, jobject transMatMonitor) {
	ARUint8 *dataPtr;
	ARMarkerInfo *marker_info;
	double *matrixPtr;
	int marker_num;
	int j, k = -1;
	Object* curObject;

	/* grab a vide frame */
	dataPtr = (*env)->GetByteArrayElements(env, image, JNI_FALSE);
	if (count == 0)
		arUtilTimerReset();
	count++;

	/* detect the markers in the video frame */
	if (arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0) {
		__android_log_write(ANDROID_LOG_ERROR, "AR native",
				"arDetectMarker failed!!");
		jclass exc = (*env)->FindClass(env,
				"net/towerdefender/exceptions/ARException");
		if (exc != NULL)
			(*env)->ThrowNew(env, exc, "failed to detect marker");
	}
#ifdef DEBUG_LOGGING
	__android_log_print(ANDROID_LOG_INFO,"AR native","detected %d markers",marker_num);
#endif

	//lock the matrix
	/*(*env)->MonitorEnter(env, transMatMonitor);
	 cur_marker_id = k;
	 argConvGlpara(patt_trans, gl_para);
	 (*env)->MonitorExit(env, transMatMonitor);*/

	static jfieldID visibleField = NULL;
	static jfieldID glMatrixField = NULL;
	static jfieldID transMatField = NULL;
	static jfieldID vertexField = NULL;
	//static jfieldID dirField = NULL;
	jclass arObjectClass = NULL;
	jfloatArray glMatrixArrayObj = NULL;
	jdoubleArray transMatArrayObj = NULL;
	jdoubleArray vertexArrayObj = NULL;
	//jint dirObj = NULL;

#ifdef DEBUG_LOGGING
	__android_log_write(ANDROID_LOG_INFO,"AR native","done detecting markers, going to iterate over markers now");
#endif
	//iterate over objects:
	list_iterator_start(&objects); /* starting an iteration "session" */
	int itCount = 0;
	while (list_iterator_hasnext(&objects)) { /* tell whether more values available */
		curObject = (Object *) list_iterator_next(&objects); /* get the next value */
#ifdef DEBUG_LOGGING
		__android_log_print(ANDROID_LOG_INFO,"AR native","now handling object with id %d, in %d iteration",curObject->name, itCount);
#endif
		itCount++;
		// //get field ID'		
		if (visibleField == NULL) {
			if (arObjectClass == NULL) {
				if (curObject->objref != NULL)
					arObjectClass = (*env)->GetObjectClass(env,
							curObject->objref);
			}
			if (arObjectClass != NULL) {
				visibleField = (*env)->GetFieldID(env, arObjectClass, "visible",
						"Z");				//Z means boolean
			}
		}
		if (glMatrixField == NULL) {
			if (arObjectClass == NULL) {
				if (curObject->objref != NULL)
					arObjectClass = (*env)->GetObjectClass(env,
							curObject->objref);
			}
			if (arObjectClass != NULL) {
				glMatrixField = (*env)->GetFieldID(env, arObjectClass,
						"glMatrix", "[F");			//[F means array of floats
			}
		}

		if (transMatField == NULL) {
			if (arObjectClass == NULL) {
				if (curObject->objref != NULL)
					arObjectClass = (*env)->GetObjectClass(env,
							curObject->objref);
			}
			if (arObjectClass != NULL) {
				transMatField = (*env)->GetFieldID(env, arObjectClass,
						"transMat", "[D");			//[D means array of doubles
			}
		}
		if (vertexField == NULL) {
			if (arObjectClass == NULL) {
				if (curObject->objref != NULL)
					arObjectClass = (*env)->GetObjectClass(env,
							curObject->objref);
			}
			if (arObjectClass != NULL) {
				vertexField = (*env)->GetFieldID(env, arObjectClass, "vertexMat",
						"[D");				//[F means array of floats
			}
		}
		/*if (dirField == NULL) {
					if (arObjectClass == NULL) {
						if (curObject->objref != NULL)
							arObjectClass = (*env)->GetObjectClass(env,
									curObject->objref);
					}
					if (arObjectClass != NULL) {
						dirField = (*env)->GetFieldID(env, arObjectClass, "dir",
								"I");				//[F means array of floats
					}
				}

*/
		if (visibleField == NULL || glMatrixField == NULL
				|| transMatField == NULL /*|| dirField == NULL*/) {
			//something went wrong..
#ifdef DEBUG_LOGGING
			__android_log_write(ANDROID_LOG_INFO,"AR native","error: either visibleField or glMatrixField or transMatField null");
#endif
			continue;
		}

		// check for object visibility
		k = -1;
		for (j = 0; j < marker_num; j++) {
#ifdef DEBUG_LOGGING
			__android_log_print(ANDROID_LOG_INFO,"AR native","marker with id: %d", marker_info[j].id);
#endif
			if (curObject->id == marker_info[j].id) {
				if (k == -1) {
					k = j;
#ifdef DEBUG_LOGGING
					__android_log_print(ANDROID_LOG_INFO,"AR native","detected object %d with marker %d and object marker %d",curObject->name,k,curObject->id);
#endif
				} else if (marker_info[k].cf < marker_info[j].cf) {
#ifdef DEBUG_LOGGING
					__android_log_print(ANDROID_LOG_INFO,"AR native","detected better object %d with marker %d and object marker %d",curObject->name,k,curObject->id);
#endif
					k = j;
				}
			}
		}
		if (k == -1) {
			//object not visible
			curObject->contF = 0;
			(*env)->SetBooleanField(env, curObject->objref, visibleField,
					JNI_FALSE);
#ifdef DEBUG_LOGGING
			__android_log_print(ANDROID_LOG_INFO,"AR native","object %d  not visible, with marker ID %d",curObject->name,curObject->id);
#endif
			continue;
		}
		//object visible

		//lock the object
#ifdef DEBUG_LOGGING
		__android_log_write(ANDROID_LOG_INFO,"AR native","locking object");
#endif
		(*env)->MonitorEnter(env, curObject->objref);
#ifdef DEBUG_LOGGING
		__android_log_write(ANDROID_LOG_INFO,"AR native","done locking object...obtaining arrays");
#endif
		//access the arrays of the current object
		glMatrixArrayObj = (*env)->GetObjectField(env, curObject->objref,
				glMatrixField);
		transMatArrayObj = (*env)->GetObjectField(env, curObject->objref,
				transMatField);
		vertexArrayObj = (*env)->GetObjectField(env, curObject->objref,
				vertexField);
		/*dirObj = (*env)->GetObjectField(env, curObject->objref,
						dirField);*/
		if (transMatArrayObj == NULL || glMatrixArrayObj == NULL || vertexArrayObj == NULL /*|| dirObj == NULL*/) {
#ifdef DEBUG_LOGGING
			__android_log_write(ANDROID_LOG_INFO,"AR native","failed to fetch the matrix arrays objects");
#endif
			continue;				//something went wrong
		}
		float *glMatrix = (*env)->GetFloatArrayElements(env, glMatrixArrayObj,
				JNI_FALSE);
		if (glMatrix == NULL) {
#ifdef DEBUG_LOGGING
			__android_log_write(ANDROID_LOG_INFO,"AR native","failed to fetch the matrix arrays");
#endif
			continue;				//something went wrong
		}
		double *vertexMatrix = (*env)->GetDoubleArrayElements(env, vertexArrayObj,
				JNI_FALSE);
		if (vertexMatrix == NULL) {
#ifdef DEBUG_LOGGING
			__android_log_write(ANDROID_LOG_INFO,"AR native","failed to fetch the vertex arrays");
#endif
			continue;				//something went wrong
		}

		double* transMat = (*env)->GetDoubleArrayElements(env, transMatArrayObj,
				JNI_FALSE);
		if (transMat == NULL) {
#ifdef DEBUG_LOGGING
			__android_log_write(ANDROID_LOG_INFO,"AR native","failed to fetch the matrix arrays");
#endif
			continue;				//something went wrong
		}
#ifdef DEBUG_LOGGING
		__android_log_write(ANDROID_LOG_INFO,"AR native","calculating trans mat now");
#endif

		/*int* dirPtr = (*env)->GetIntArrayElements(env, dirObj,
						JNI_FALSE);
				if (dirPtr == NULL) {
		#ifdef DEBUG_LOGGING
					__android_log_write(ANDROID_LOG_INFO,"AR native","failed to fetch the matrix arrays");
		#endif
					continue;				//something went wrong
				}
		#ifdef DEBUG_LOGGING
				__android_log_write(ANDROID_LOG_INFO,"AR native","calculating trans mat now");
		#endif
*/
		// get the transformation between the marker and the real camera 
		if (curObject->contF == 0) {
			arGetTransMat(&marker_info[k], curObject->marker_center,
					curObject->marker_width, transMat);
		} else {
			arGetTransMatCont(&marker_info[k], transMat,
					curObject->marker_center, curObject->marker_width,
					transMat);
		}
		curObject->contF = 1;
#ifdef DEBUG_LOGGING
		__android_log_write(ANDROID_LOG_INFO,"AR native","calculating OpenGL trans mat now");
#endif
		argConvGlpara(transMat, glMatrix);
		int vertexId = 0;
		double inx,iny,outx,outy;
		for (vertexId = 0 ; vertexId < 8 ; vertexId+=2)
		{
			inx = ((double *)marker_info[k].vertex)[vertexId];
			iny = ((double *)marker_info[k].vertex)[vertexId+1];
			arParamIdeal2Observ	( arParam.dist_factor, inx,iny,&outx,&outy);
			vertexMatrix[vertexId] = outx;
			vertexMatrix[vertexId+1] = outy;
		}

		//*dirPtr = marker_info[k].dir;
		//argConvGlpara(patt_trans, gl_para);
#ifdef DEBUG_LOGGING
		__android_log_write(ANDROID_LOG_INFO,"AR native","releasing arrays");
#endif
		(*env)->ReleaseFloatArrayElements(env, glMatrixArrayObj, glMatrix, 0);
		(*env)->ReleaseDoubleArrayElements(env, transMatArrayObj, transMat, 0);
		(*env)->ReleaseDoubleArrayElements(env, vertexArrayObj, vertexMatrix, 0);
		//(*env)->ReleaseDoubleArrayElements(env, dirObj, dirPtr, 0);

		(*env)->SetBooleanField(env, curObject->objref, visibleField, JNI_TRUE);
#ifdef DEBUG_LOGGING
		__android_log_write(ANDROID_LOG_INFO,"AR native","releasing lock");
#endif		
		//release the lock on the object
		(*env)->MonitorExit(env, curObject->objref);
#ifdef DEBUG_LOGGING
		__android_log_write(ANDROID_LOG_INFO,"AR native","done releasing lock");
#endif
	}
	list_iterator_stop(&objects); /* starting the iteration "session" */
#ifdef DEBUG_LOGGING
	__android_log_write(ANDROID_LOG_INFO,"AR native","releasing image array");
#endif
	(*env)->ReleaseByteArrayElements(env, image, dataPtr, 0);
#ifdef DEBUG_LOGGING
	__android_log_write(ANDROID_LOG_INFO,"AR native","releasing image array");
#endif
	return marker_num;
}
예제 #11
0
/*
* Class:     com_clab_artoolkit_port_JARToolkit
* Method:    JARGetTransMatrixCont
* Signature: (IIFF[D)[D
*/
JNIEXPORT jdoubleArray JNICALL Java_net_sourceforge_jartoolkit_core_JARToolKit_getTransMatrixCont__IIFF_3D(JNIEnv *env, jobject, jint patternID, jint patt_width, jfloat patt_centerX, jfloat patt_centerY, jdoubleArray conv)
{
	double patt_trans[3][4];
	int k, j, i;
	double patt_center[2] = {patt_centerX, patt_centerY};
	double prev_conv[3][4];

	jdoubleArray matrix = env->NewDoubleArray(16);

	jdouble *buffer = env->GetDoubleArrayElements(matrix, 0);

	for (i=1 ;i<15; i++)
		buffer[i] = 0.0;

	buffer[0] = 1.0;
	buffer[5] = 1.0;
	buffer[10] = 1.0;
	buffer[15] = 1.0;

	k = -1;
	for (j = 0; j < marker_num; j++)
	{
		if (patternID == marker_info[j].id)
		{
			if (k = -1) k = j;
			else if (marker_info[k].cf < marker_info[j].cf ) k = j;
		}
	}

	if (k == -1) 
	{
		env->ReleaseDoubleArrayElements(matrix, buffer, 0);
		return matrix;
	}

	jdouble *pre_conv = env->GetDoubleArrayElements(conv, 0);
	for (j=0; j < 3; j++)
	{
		for (i=0; i < 4; i++)
		{
			prev_conv[j][i] = pre_conv[i*4+j];
		}
	}
	env->ReleaseDoubleArrayElements(conv, pre_conv, 0);
	if (arGetTransMatCont(&marker_info[k], prev_conv, patt_center,patt_width, patt_trans) < 0)
	{
		env->ReleaseDoubleArrayElements(matrix, buffer, 0);
		return matrix;
	}
	/*
	for( j = 0; j < 3; j++ )
	{
	for( i = 0; i < 4; i++ )
	{
	buffer[i*4+j] = patt_trans[j][i];
	}
	}

	buffer[0*4+3] = buffer[1*4+3] = buffer[2*4+3] = 0.0;
	buffer[3*4+3] = 1.0;
	*/
	argConvGlpara(patt_trans, buffer);
	env->ReleaseDoubleArrayElements(matrix, buffer, 0);
	return matrix;
}
예제 #12
0
AR_TEMPL_FUNC ARFloat
AR_TEMPL_TRACKER::arGetTransMatCont2(ARMarkerInfo *marker_info, ARFloat center[2], ARFloat width, ARFloat conv[3][4])
{
	return arGetTransMatCont(marker_info, conv, center, width, conv);
}
예제 #13
0
/*
* Class:     com_clab_artoolkit_port_JARToolkit
* Method:    getTransMatrixContJava3D
* Signature: (IIFF[D)[D
*/
JNIEXPORT jdoubleArray JNICALL Java_net_sourceforge_jartoolkit_core_JARToolKit_getTransMatrixContJava3D__IIFF_3D(JNIEnv *env, jobject, jint patternID, jint patt_width, jfloat patt_centerX, jfloat patt_centerY, jdoubleArray conv)
{
	double patt_trans[3][4];
	int j, i;
	double patt_center[2] = {patt_centerX, patt_centerY};
	double prev_conv[3][4];

	jdoubleArray matrix = env->NewDoubleArray(16);

	jdouble *buffer = env->GetDoubleArrayElements(matrix, 0);

	for(i=1 ; i<15 ; i++)
		buffer[i] = 0.0;

	buffer[0] = 1.0;
	buffer[5] = 1.0;
	buffer[10] = 1.0;
	buffer[15] = 1.0;

	for( j = 0; j < marker_num; j++ )
	{
		if( patternID == marker_info[j].id )
			break;
	}

	if( j >= marker_num )
	{
		env->ReleaseDoubleArrayElements(matrix, buffer, 0);
		return matrix;
	}

	jdouble *pre_conv = env->GetDoubleArrayElements(conv, 0);
	for(j=0 ; j<3 ; j++)
	{
		for(i=0 ; i<4 ; i++)
		{
			prev_conv[j][i] = pre_conv[i*4+j];
		}
	}
	env->ReleaseDoubleArrayElements(conv, pre_conv, 0);

	if( arGetTransMatCont(&marker_info[j], prev_conv, patt_center,patt_width, patt_trans) < 0 )
	{
		env->ReleaseDoubleArrayElements(matrix, buffer, 0);
		return matrix;
	}

	for( j = 0; j < 3; j++ )
	{
		for( i = 0; i < 4; i++ )
		{
			buffer[i*4+j] = patt_trans[j][i];
		}
	}

	buffer[1] = -buffer[1];
	buffer[4] = -buffer[4];
	buffer[6] = -buffer[6];
	buffer[9] = -buffer[9];
	buffer[13] = -buffer[13];

	buffer[0*4+3] = buffer[1*4+3] = buffer[2*4+3] = 0.0;
	buffer[3*4+3] = 1.0;

	env->ReleaseDoubleArrayElements(matrix, buffer, 0);
	return matrix;
}
예제 #14
0
  void ARMultiPublisher::getTransformationCallback (const sensor_msgs::ImageConstPtr & image_msg)
  {
    ARUint8 *dataPtr;
    ARMarkerInfo *marker_info;
    int marker_num;
    int i, k, j;

    /* Get the image from ROSTOPIC
     * NOTE: the dataPtr format is BGR because the ARToolKit library was
     * build with V4L, dataPtr format change according to the
     * ARToolKit configure option (see config.h).*/
#if ROS_VERSION_MINIMUM(1, 9, 0)
    try
    {
      capture_ = cv_bridge::toCvCopy (image_msg, sensor_msgs::image_encodings::BGR8);
    }
    catch (cv_bridge::Exception& e)
    {
      ROS_ERROR("cv_bridge exception: %s", e.what());
    }
    dataPtr = (ARUint8 *) ((IplImage) capture_->image).imageData;
#else
    try
    {
      capture_ = bridge_.imgMsgToCv (image_msg, "bgr8");
    }
    catch (sensor_msgs::CvBridgeException & e)
    {
      ROS_ERROR("cv_bridge exception: %s", e.what());
    }
    dataPtr = (ARUint8 *) capture_->imageData;
#endif

    // detect the markers in the video frame
    if (arDetectMarker (dataPtr, threshold_, &marker_info, &marker_num) < 0)
    {
      argCleanup ();
      ROS_BREAK ();
    }

    arPoseMarkers_.markers.clear ();
    // check for known patterns
    for (i = 0; i < objectnum; i++)
    {
      k = -1;
      for (j = 0; j < marker_num; j++)
      {
        if (object[i].id == marker_info[j].id)
        {
          if (k == -1)
            k = j;
          else                  // make sure you have the best pattern (highest confidence factor)
          if (marker_info[k].cf < marker_info[j].cf)
            k = j;
        }
      }
      if (k == -1)
      {
        object[i].visible = 0;
        continue;
      }

      // calculate the transform for each marker
      if (object[i].visible == 0)
      {
        arGetTransMat (&marker_info[k], object[i].marker_center, object[i].marker_width, object[i].trans);
      }
      else
      {
        arGetTransMatCont (&marker_info[k], object[i].trans,
                           object[i].marker_center, object[i].marker_width, object[i].trans);
      }
      object[i].visible = 1;

      double arQuat[4], arPos[3];

      //arUtilMatInv (object[i].trans, cam_trans);
      arUtilMat2QuatPos (object[i].trans, arQuat, arPos);

      // **** convert to ROS frame

      double quat[4], pos[3];

      pos[0] = arPos[0] * AR_TO_ROS;
      pos[1] = arPos[1] * AR_TO_ROS;
      pos[2] = arPos[2] * AR_TO_ROS;

      quat[0] = -arQuat[0];
      quat[1] = -arQuat[1];
      quat[2] = -arQuat[2];
      quat[3] = arQuat[3];

      ROS_DEBUG (" Object num %i ------------------------------------------------", i);
      ROS_DEBUG (" QUAT: Pos x: %3.5f  y: %3.5f  z: %3.5f", pos[0], pos[1], pos[2]);
      ROS_DEBUG ("     Quat qx: %3.5f qy: %3.5f qz: %3.5f qw: %3.5f", quat[0], quat[1], quat[2], quat[3]);

      // **** publish the marker

      ar_pose::ARMarker ar_pose_marker;
      ar_pose_marker.header.frame_id = image_msg->header.frame_id;
      ar_pose_marker.header.stamp = image_msg->header.stamp;
      ar_pose_marker.id = object[i].id;

      ar_pose_marker.pose.pose.position.x = pos[0];
      ar_pose_marker.pose.pose.position.y = pos[1];
      ar_pose_marker.pose.pose.position.z = pos[2];

      ar_pose_marker.pose.pose.orientation.x = quat[0];
      ar_pose_marker.pose.pose.orientation.y = quat[1];
      ar_pose_marker.pose.pose.orientation.z = quat[2];
      ar_pose_marker.pose.pose.orientation.w = quat[3];

      ar_pose_marker.confidence = round(marker_info->cf * 100);
      arPoseMarkers_.markers.push_back (ar_pose_marker);

      // **** publish transform between camera and marker

#if ROS_VERSION_MINIMUM(1, 9, 0)
      tf::Quaternion rotation (quat[0], quat[1], quat[2], quat[3]);
      tf::Vector3 origin (pos[0], pos[1], pos[2]);
      tf::Transform t (rotation, origin);
#else
// DEPRECATED: Fuerte support ends when Hydro is released
      btQuaternion rotation (quat[0], quat[1], quat[2], quat[3]);
      btVector3 origin (pos[0], pos[1], pos[2]);
      btTransform t (rotation, origin);
#endif

      if (publishTf_)
      {
        tf::StampedTransform camToMarker (t, image_msg->header.stamp, image_msg->header.frame_id, object[i].name);
        broadcaster_.sendTransform(camToMarker);
      }

      // **** publish visual marker

      if (publishVisualMarkers_)
      {
#if ROS_VERSION_MINIMUM(1, 9, 0)
        tf::Vector3 markerOrigin (0, 0, 0.25 * object[i].marker_width * AR_TO_ROS);
        tf::Transform m (tf::Quaternion::getIdentity (), markerOrigin);
        tf::Transform markerPose = t * m; // marker pose in the camera frame
#else
// DEPRECATED: Fuerte support ends when Hydro is released
        btVector3 markerOrigin (0, 0, 0.25 * object[i].marker_width * AR_TO_ROS);
        btTransform m (btQuaternion::getIdentity (), markerOrigin);
        btTransform markerPose = t * m; // marker pose in the camera frame
#endif

        tf::poseTFToMsg (markerPose, rvizMarker_.pose);

        rvizMarker_.header.frame_id = image_msg->header.frame_id;
        rvizMarker_.header.stamp = image_msg->header.stamp;
        rvizMarker_.id = object[i].id;

        rvizMarker_.scale.x = 1.0 * object[i].marker_width * AR_TO_ROS;
        rvizMarker_.scale.y = 1.0 * object[i].marker_width * AR_TO_ROS;
        rvizMarker_.scale.z = 0.5 * object[i].marker_width * AR_TO_ROS;
        rvizMarker_.ns = "basic_shapes";
        rvizMarker_.type = visualization_msgs::Marker::CUBE;
        rvizMarker_.action = visualization_msgs::Marker::ADD;
        switch (i)
        {
          case 0:
            rvizMarker_.color.r = 0.0f;
            rvizMarker_.color.g = 0.0f;
            rvizMarker_.color.b = 1.0f;
            rvizMarker_.color.a = 1.0;
            break;
          case 1:
            rvizMarker_.color.r = 1.0f;
            rvizMarker_.color.g = 0.0f;
            rvizMarker_.color.b = 0.0f;
            rvizMarker_.color.a = 1.0;
            break;
          default:
            rvizMarker_.color.r = 0.0f;
            rvizMarker_.color.g = 1.0f;
            rvizMarker_.color.b = 0.0f;
            rvizMarker_.color.a = 1.0;
        }
        rvizMarker_.lifetime = ros::Duration (1.0);

        rvizMarkerPub_.publish(rvizMarker_);
        ROS_DEBUG ("Published visual marker");
      }
    }
    arMarkerPub_.publish(arPoseMarkers_);
    ROS_DEBUG ("Published ar_multi markers");
  }
예제 #15
0
void ARSinglePublisher::getTransformationCallback(const sensor_msgs::ImageConstPtr& image_msg)
{
  // ROS_INFO("======================================================");
  // ROS_INFO("Callback...");

  ARMarkerInfo *marker_info;
  int marker_num;
  int i, k;

  // Get the image from ROSTOPIC
  // NOTE: the data_ptr format is BGR because the ARToolKit library was
  // build with V4L, data_ptr format change according to the
  // ARToolKit configure option (see config.h).
  try
  {
    capture_ = bridge_.imgMsgToCv(image_msg, "bgr8");
  }
  catch (sensor_msgs::CvBridgeException & e)
  {
    ROS_ERROR("Could not convert from >%s< to 'bgr8'.", image_msg->encoding.c_str ());
    return;
  }

//  const int WIDTH = 640;
//  const int HEIGHT = 480;
//  // declare a destination IplImage object with correct size, depth and channels
//  IplImage *destination = cvCreateImage(cvSize(WIDTH, HEIGHT), capture_->depth, capture_->nChannels);
//  // use cvResize to resize source to a destination image
//  cvResize(capture_, destination);
//  // save image with a name supplied with a second argument
  //  std::string filename = "/tmp/" + marker_frame_ + ".jpg";
  //  cvSaveImage(filename.c_str(), destination);
//  ROS_INFO("BEFORE: Depth = >%i<.", capture_->depth);
//  ROS_INFO("BEFORE: nChannels = >%i<.", capture_->nChannels);
//  ROS_INFO("BEFORE: Width = >%i<.", capture_->width);
//  ROS_INFO("BEFORE: WidthStep = >%i<.", capture_->widthStep);
//  ROS_INFO("BEFORE: Height = >%i<.", capture_->height);
//  ROS_INFO("BEFORE: ImageSize = >%i<.", capture_->imageSize);
//  ROS_INFO("BEFORE: nSize = >%i<.", capture_->nSize);
//  ROS_INFO("BEFORE: dataOrder = >%i<.", capture_->dataOrder);
//  ROS_INFO("BEFORE: origin = >%i<.", capture_->origin);
//  capture_ = destination;
//  // memcpy(capture_->imageData, destination->imageData, destination->imageSize);
//  ROS_INFO("AFTER:  Depth = >%i<.", capture_->depth);
//  ROS_INFO("AFTER:  nChannels = >%i<.", capture_->nChannels);
//  ROS_INFO("AFTER:  Width = >%i<.", capture_->width);
//  ROS_INFO("AFTER:  WidthStep = >%i<.", capture_->widthStep);
//  ROS_INFO("AFTER:  Height = >%i<.", capture_->height);
//  ROS_INFO("AFTER:  ImageSize = >%i<.", capture_->imageSize);
//  ROS_INFO("AFTER:  nSize = >%i<.", capture_->nSize);
//  ROS_INFO("AFTER:  dataOrder = >%i<.", capture_->dataOrder);
//  ROS_INFO("AFTER:  origin = >%i<.", capture_->origin);

  // cvConvertImage(capture_, capture_, CV_CVTIMG_FLIP); //flip image
  ARUint8 *data_ptr = (ARUint8 *)capture_->imageData;

  // detect the markers in the video frame
  if (arDetectMarker(data_ptr, threshold_, &marker_info, &marker_num) < 0)
  {
    ROS_FATAL ("arDetectMarker failed");
    ROS_BREAK (); // FIXME: I don't think this should be fatal... -Bill
  }

  // check for known patterns
  k = -1;
  for (i = 0; i < marker_num; i++)
  {
    if (marker_info[i].id == patt_id_)
    {
      ROS_DEBUG("Found pattern: %d ", patt_id_);

      // make sure you have the best pattern (highest confidence factor)
      if (k == -1)
        k = i;
      else if (marker_info[k].cf < marker_info[i].cf)
        k = i;
    }
  }

  if (k != -1)
  {
    if (!use_history_ || cont_f_ == 0)
    {
      arGetTransMat(&marker_info[k], marker_center_, marker_width_, marker_trans_);
    }
    else
    {
      arGetTransMatCont(&marker_info[k], marker_trans_, marker_center_, marker_width_, marker_trans_);
    }

    cont_f_ = 1;

    // get the transformation between the marker and the real camera
    double arQuat[4], arPos[3];

    // arUtilMatInv (marker_trans_, cam_trans);
    arUtilMat2QuatPos(marker_trans_, arQuat, arPos);

    // error checking
    if(fabs(sqrt(arQuat[0]*arQuat[0] + arQuat[1]*arQuat[1] + arQuat[2]*arQuat[2] + arQuat[3]*arQuat[3]) - 1.0) > 0.0001)
    {
      ROS_WARN("Skipping invalid frame. Computed quaternion is invalid.");
    }
    if(std::isnan(arQuat[0]) || std::isnan(arQuat[1]) || std::isnan(arQuat[2]) || std::isnan(arQuat[3]))
    {
      ROS_WARN("Skipping invalid frame because computed orientation is not a number.");
      return;
    }
    if(std::isinf(arQuat[0]) || std::isinf(arQuat[1]) || std::isinf(arQuat[2]) || std::isinf(arQuat[3]))
    {
      ROS_WARN("Skipping invalid frame because computed orientation is infinite.");
      return;
    }

    // convert to ROS frame

    double quat[4], pos[3];

    pos[0] = arPos[0] * AR_TO_ROS;
    pos[1] = arPos[1] * AR_TO_ROS;
    pos[2] = arPos[2] * AR_TO_ROS;

    quat[0] = -arQuat[0];
    quat[1] = -arQuat[1];
    quat[2] = -arQuat[2];
    quat[3] = arQuat[3];

    ROS_DEBUG("  Pos x: %3.5f  y: %3.5f  z: %3.5f", pos[0], pos[1], pos[2]);
    ROS_DEBUG("  Quat qx: %3.5f qy: %3.5f qz: %3.5f qw: %3.5f", quat[0], quat[1], quat[2], quat[3]);

    // publish the marker

    ar_target_marker_.confidence = marker_info->cf;

    ar_marker_publisher_.publish(ar_target_marker_);
    ROS_DEBUG ("Published ar_single marker");

    // publish transform between camera and marker

    tf::Quaternion rotation(quat[0], quat[1], quat[2], quat[3]);
    tf::Vector3 origin(pos[0], pos[1], pos[2]);
    tf::Transform transform(rotation, origin);

    // TODO: figure out why this happens once in a while...
    if(transform.getOrigin().getZ() < 0.0)
    {
      transform.setOrigin(-transform.getOrigin());
    }

    if (publish_tf_)
    {
      if (reverse_transform_)
      {
        ROS_ASSERT_MSG(false, "Reverse transform is not debugged yet.");
        // tf::StampedTransform marker_to_cam(t.inverse(), image_msg->header.stamp, marker_frame_.c_str(), image_msg->header.frame_id);
        tf::StampedTransform marker_to_cam(transform.inverse(), image_msg->header.stamp, marker_frame_, camera_frame_);
        tf_broadcaster_.sendTransform(marker_to_cam);
      }
      else
      {
        tf::Transform offseted_transform = transform * marker_offset_;
        // tf::StampedTransform cam_to_marker(t, image_msg->header.stamp, image_msg->header.frame_id, marker_frame_.c_str());
        tf::StampedTransform cam_to_marker(offseted_transform, image_msg->header.stamp, camera_frame_, marker_frame_);
        // tf::StampedTransform cam_to_marker(transform, image_msg->header.stamp, camera_frame_, marker_frame_);
        tf_broadcaster_.sendTransform(cam_to_marker);
      }
    }

    // publish visual marker
    publishMarker(transform, image_msg->header.stamp);
  }
  else
  {
    cont_f_ = 0;
    ROS_WARN("Failed to locate marker.");
  }

}
예제 #16
0
static void Idle(void)
{
	static int ms_prev;
	int ms;
	float s_elapsed;
	ARUint8 *image;

	ARMarkerInfo    *marker_info;					// Pointer to array holding the details of detected markers.
    int             marker_num;						// Count of number of markers detected.
    int             i, j, k;
	
	// Find out how long since Idle() last ran.
	ms = glutGet(GLUT_ELAPSED_TIME);
	s_elapsed = (float)(ms - ms_prev) * 0.001;
	if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
	ms_prev = ms;
	
	// Update drawing.
	arVrmlTimerUpdate();
	
	// Grab a video frame.
	if ((image = arVideoGetImage()) != NULL) {
		gARTImage = image;	// Save the fetched image.
		gPatt_found = FALSE;	// Invalidate any previous detected markers.
		
		gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
		
		// Detect the markers in the video frame.
		if (arDetectMarker(gARTImage, gARTThreshhold, &marker_info, &marker_num) < 0) {
			exit(-1);
		}
				
		// Check for object visibility.
		
		for (i = 0; i < gObjectDataCount; i++) {
		
			// Check through the marker_info array for highest confidence
			// visible marker matching our object's pattern.
			k = -1;
			for (j = 0; j < marker_num; j++) {
				if (marker_info[j].id == gObjectData[i].id) {
					if( k == -1 ) k = j; // First marker detected.
					else if (marker_info[k].cf < marker_info[j].cf) k = j; // Higher confidence marker detected.
				}
			}
			
			if (k != -1) {
				// Get the transformation between the marker and the real camera.
				//fprintf(stderr, "Saw object %d.\n", i);
				if (gObjectData[i].visible == 0) {
					arGetTransMat(&marker_info[k],
								  gObjectData[i].marker_center, gObjectData[i].marker_width,
								  gObjectData[i].trans);
				} else {
					arGetTransMatCont(&marker_info[k], gObjectData[i].trans,
									  gObjectData[i].marker_center, gObjectData[i].marker_width,
									  gObjectData[i].trans);
				}
				gObjectData[i].visible = 1;
				gPatt_found = TRUE;
			} else {
				gObjectData[i].visible = 0;
			}
		}
		
		// Tell GLUT to update the display.
		glutPostRedisplay();
	}
}
예제 #17
0
/* main loop */
static void mainLoop(void)
{
    static int      contF = 0;
    ARUint8         *dataPtr;
    ARMarkerInfo    *marker_info;
    int             marker_num;
    int             j, k;

	//update new data
	g_MyKinect.Update();

#ifdef USE_USERDETECTOR
	if(g_MyKinect.userStatus.isPlayerVisible())
	{
		XV3 tmp = g_MyKinect.userDetector->getSkeletonJointPosition(XN_SKEL_RIGHT_HAND);
		printf("Right hand position: %.2f %.2f %.2f\n", tmp.X, tmp.Y, tmp.Z);
	}
#endif

	if(drawFromKinect)
	{
		//get image data to detect marker
		if( (dataPtr = (ARUint8 *)g_MyKinect.GetBGRA32Image()) == NULL ) {
			arUtilSleep(2);
			return;
		}
	}
	else
	{
		 /* grab a vide frame */
		if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) {
		  arUtilSleep(2);
		 return;
		}
	}

    if( count == 0 ) arUtilTimerReset();
    count++;

    /* detect the markers in the video frame */
    if( arDetectMarker(dataPtr, thresh, &marker_info, &marker_num) < 0 ) {
        cleanup();
        exit(0);
    }

	if(drawFromKinect)
	{
		//option . You can choose many display mode. image, Depth by Color, depth mixed image
		if(displayMode == 2)
			dataPtr = (ARUint8 *)g_MyKinect.GetDepthDrewByColor();
		else
			if(displayMode == 3)
				dataPtr = (ARUint8 *)g_MyKinect.GetDepthMixedImage();
	}

	argDrawMode2D();
    argDispImage( dataPtr, 0,0 );

	arVideoCapNext();

    /* check for object visibility */
    k = -1;
    for( j = 0; j < marker_num; j++ ) {
        if( patt_id == marker_info[j].id ) {
            if( k == -1 ) k = j;
            else if( marker_info[k].cf < marker_info[j].cf ) k = j;
        }
    }
    if( k == -1 ) {
        contF = 0;
        argSwapBuffers();
        return;
    }

    /* get the transformation between the marker and the real camera */
    if( mode == 0 || contF == 0 ) {
        arGetTransMat(&marker_info[k], patt_center, patt_width, patt_trans);
    }
    else {
        arGetTransMatCont(&marker_info[k], patt_trans, patt_center, patt_width, patt_trans);
    }
    contF = 1;

    draw( patt_trans );

    argSwapBuffers();
}
예제 #18
0
/* main loop */
static void mainLoop(void)
{
    ARUint8         *dataPtr;
    ARMarkerInfo    *marker_info;
    int             marker_num;
    int             i,j,k;

    /* grab a video frame */
    if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) {
        arUtilSleep(2);
        return;
    }

    if( count == 0 ) arUtilTimerReset();
    count++;

    /*draw the video*/
    argDrawMode2D();
    argDispImage( dataPtr, 0,0 );

    /* capture the next video frame */
    arVideoCapNext();

    glColor3f( 1.0, 0.0, 0.0 );
    glLineWidth(6.0);

    /* detect the markers in the video frame */
    if(arDetectMarker(dataPtr, thresh,
                      &marker_info, &marker_num) < 0 ) {
        cleanup();
        exit(0);
    }
    for( i = 0; i < marker_num; i++ ) {
        argDrawSquare(marker_info[i].vertex,0,0);
    }

    /* check for known patterns */
    for( i = 0; i < objectnum; i++ ) {
        k = -1;
        for( j = 0; j < marker_num; j++ ) {
            if( object[i].id == marker_info[j].id) {

                /* you've found a pattern */
                //printf("Found pattern: %d ",patt_id);
                glColor3f( 0.0, 1.0, 0.0 );
                argDrawSquare(marker_info[j].vertex,0,0);

                if( k == -1 ) k = j;
                else /* make sure you have the best pattern (highest confidence factor) */
                    if( marker_info[k].cf < marker_info[j].cf ) k = j;
            }
        }
        if( k == -1 ) {
            object[i].visible = 0;
            continue;
        }

        /* calculate the transform for each marker */
        if( object[i].visible == 0 ) {
            arGetTransMat(&marker_info[k],
                          object[i].marker_center, object[i].marker_width,
                          object[i].trans);
        }
        else {
            arGetTransMatCont(&marker_info[k], object[i].trans,
                              object[i].marker_center, object[i].marker_width,
                              object[i].trans);
        }
        object[i].visible = 1;
    }

    /*check for object collisions between marker 0 and 1 */
    if(object[0].visible && object[1].visible) {
        if(checkCollisions(object[0],object[1],COLLIDE_DIST)) {
            object[0].collide = 1;
            object[1].collide = 1;
        }
        else {
            object[0].collide = 0;
            object[1].collide = 0;
        }
    }

    /* draw the AR graphics */
    draw( object, objectnum );

    /*swap the graphics buffers*/
    argSwapBuffers();
}
예제 #19
0
//=======================================================
// メインループ関数
//=======================================================
void MainLoop(void)
{
	ARUint8          *image;			// カメラキャプチャ画像
	ARMarkerInfo     *marker_info;		// マーカ情報
	int              marker_num;		// 検出されたマーカの数
	int              i, j, k;

	// カメラ画像の取得
	if( (image = (ARUint8 *)arVideoGetImage()) == NULL ){
		arUtilSleep( 2 );
		return;
	}
	if( count == 0 ) arUtilTimerReset();
	count++;

	// カメラ画像の描画
	argDrawMode2D();
	argDispImage( image, 0, 0 );


	// マーカの検出と認識
	if( arDetectMarker( image, thresh, &marker_info, &marker_num ) < 0 ){
		Cleanup();
		exit(0);
	}

	// 次の画像のキャプチャ指示
	arVideoCapNext();

	// 3Dオブジェクトを描画するための準備
	argDrawMode3D();
	argDraw3dCamera( 0, 0 );
	glClearDepth(1.0);					// デプスバッファの消去値
	glClear( GL_DEPTH_BUFFER_BIT );		// デプスバッファの初期化

	if(movex[0]!=0 && movex[3]!=0 && movex[7]!=0){
		rmove++;
		
		if(rmove!=0){
			Drawnashi( marker[3].mark_id, marker[3].patt_trans);
		}
		if(rmove>40.0){
			rmove=0.0;
			for(int i=0;i<MARK_NUM;i++){
				movex[i]=0;
			}
		}
	}else{
		// マーカの一致度の比較
		for( i=0; i<MARK_NUM; i++ ){
			k = -1;
			for( j=0; j<marker_num; j++ ){
				if( marker[i].patt_id == marker_info[j].id ){
					if( k == -1 ) k = j;
					else if( marker_info[k].cf < marker_info[j].cf ) k = j;
				}
			}

			// マーカーが見つからなかったとき
			if( k == -1 ){
				if(marker[i].visible != 0){
					midi_out(i+1);
					midi_stop(i+1);
					movex[i]=1;
					marker[i].visible = 0;
				}else if(movex[i]!=0){
					DrawObject( marker[i].mark_id, marker[i].patt_trans,i );
				}
			}else{
				// 座標変換行列を取得
				if( marker[i].visible == 0 ) {
					// 1フレームを使ってマーカの位置・姿勢(座標変換行列)の計算
					arGetTransMat( &marker_info[k], marker[i].patt_center, marker[i].patt_width, marker[i].patt_trans );    //初回の認識ではarGetTransMatを2回目以降ではarGetTransMatContを使うと安定するらしい
					marker[i].visible = 1;

				} else {
					// 前のフレームを使ってマーカの位置・姿勢(座標変換行列)の計算
					arGetTransMatCont( &marker_info[k], marker[i].patt_trans, marker[i].patt_center, marker[i].patt_width, marker[i].patt_trans );
				}

				// 3Dオブジェクトの描画
				if(movex[i]!=0){
					DrawObject( marker[i].mark_id, marker[i].patt_trans,i );
				}
			}
			if(movex[i]>=40.0) movex[i]=0;
			if(movex[i]!=0) movex[i]++;
		}
	}
	// バッファの内容を画面に表示
	argSwapBuffers();
}