static void mainLoop(void) { static int ms_prev; int ms; float s_elapsed; ARUint8 *image; ARMarkerInfo* markerInfo; int markerNum; ARdouble err; int i, j, k; // Calculate time delta. ms = glutGet(GLUT_ELAPSED_TIME); s_elapsed = (float)(ms - ms_prev) * 0.001f; ms_prev = ms; // Grab a video frame. if ((image = arVideoGetImage()) != NULL) { gARTImage = image; // Save the fetched image. gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Detect the markers in the video frame. if (arDetectMarker(gARHandle, gARTImage) < 0) { exit(-1); } // Get detected markers markerInfo = arGetMarker(gARHandle); markerNum = arGetMarkerNum(gARHandle); // Update markers. for (i = 0; i < markersSquareCount; i++) { markersSquare[i].validPrev = markersSquare[i].valid; // Check through the marker_info array for highest confidence // visible marker matching our preferred pattern. k = -1; if (markersSquare[i].patt_type == AR_PATTERN_TYPE_TEMPLATE) { for (j = 0; j < markerNum; j++) { if (markersSquare[i].patt_id == markerInfo[j].idPatt) { if (k == -1) { if (markerInfo[j].cfPatt >= markersSquare[i].matchingThreshold) k = j; // First marker detected. } else if (markerInfo[j].cfPatt > markerInfo[k].cfPatt) k = j; // Higher confidence marker detected. } } if (k != -1) { markerInfo[k].id = markerInfo[k].idPatt; markerInfo[k].cf = markerInfo[k].cfPatt; markerInfo[k].dir = markerInfo[k].dirPatt; } } else { for (j = 0; j < markerNum; j++) { if (markersSquare[i].patt_id == markerInfo[j].idMatrix) { if (k == -1) { if (markerInfo[j].cfMatrix >= markersSquare[i].matchingThreshold) k = j; // First marker detected. } else if (markerInfo[j].cfMatrix > markerInfo[k].cfMatrix) k = j; // Higher confidence marker detected. } } if (k != -1) { markerInfo[k].id = markerInfo[k].idMatrix; markerInfo[k].cf = markerInfo[k].cfMatrix; markerInfo[k].dir = markerInfo[k].dirMatrix; } } if (k != -1) { markersSquare[i].valid = TRUE; ARLOGd("Marker %d matched pattern %d.\n", i, markerInfo[k].id); // Get the transformation between the marker and the real camera into trans. if (markersSquare[i].validPrev && useContPoseEstimation) { err = arGetTransMatSquareCont(gAR3DHandle, &(markerInfo[k]), markersSquare[i].trans, markersSquare[i].marker_width, markersSquare[i].trans); } else { err = arGetTransMatSquare(gAR3DHandle, &(markerInfo[k]), markersSquare[i].marker_width, markersSquare[i].trans); } } else { markersSquare[i].valid = FALSE; } if (markersSquare[i].valid) { // Filter the pose estimate. if (markersSquare[i].ftmi) { if (arFilterTransMat(markersSquare[i].ftmi, markersSquare[i].trans, !markersSquare[i].validPrev) < 0) { ARLOGe("arFilterTransMat error with marker %d.\n", i); } } if (!markersSquare[i].validPrev) { // Marker has become visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerAppeared(i); } // We have a new pose, so set that. arglCameraViewRH((const ARdouble (*)[4])markersSquare[i].trans, markersSquare[i].pose.T, 1.0f /*VIEW_SCALEFACTOR*/); // Tell any dependent objects about the update. VirtualEnvironmentHandleARMarkerWasUpdated(i, markersSquare[i].pose); } else { if (markersSquare[i].validPrev) { // Marker has ceased to be visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerDisappeared(i); } } } // Tell GLUT the display has changed. glutPostRedisplay(); } else { arUtilSleep(2); } }
JNIEXPORT void JNICALL JNIFUNCTION_NATIVE(nativeVideoFrame(JNIEnv* env, jobject obj, jbyteArray pinArray)) { int i, j, k; jbyte* inArray; ARdouble err; if (!videoInited) { #ifdef DEBUG LOGI("nativeVideoFrame !VIDEO\n"); #endif return; // No point in trying to track until video is inited. } if (!gARViewInited) { return; // Also, we won't track until the ARView has been inited. #ifdef DEBUG LOGI("nativeVideoFrame !ARVIEW\n"); #endif } #ifdef DEBUG LOGI("nativeVideoFrame\n"); #endif // Copy the incoming YUV420 image in pinArray. env->GetByteArrayRegion(pinArray, 0, gVideoFrameSize, (jbyte *)gVideoFrame); // As of ARToolKit v5.0, NV21 format video frames are handled natively, // and no longer require colour conversion to RGBA. // If you still require RGBA format information from the video, // here is where you'd do the conversion: // color_convert_common(gVideoFrame, gVideoFrame + videoWidth*videoHeight, videoWidth, videoHeight, myRGBABuffer); videoFrameNeedsPixelBufferDataUpload = true; // Note that buffer needs uploading. (Upload must be done on OpenGL context's thread.) // Run marker detection on frame arDetectMarker(arHandle, gVideoBuffer); // Get detected markers ARMarkerInfo* markerInfo = arGetMarker(arHandle); int markerNum = arGetMarkerNum(arHandle); // Update markers. for (i = 0; i < markersSquareCount; i++) { markersSquare[i].validPrev = markersSquare[i].valid; // Check through the marker_info array for highest confidence // visible marker matching our preferred pattern. k = -1; if (markersSquare[i].patt_type == AR_PATTERN_TYPE_TEMPLATE) { for (j = 0; j < markerNum; j++) { if (markersSquare[i].patt_id == markerInfo[j].idPatt) { if (k == -1) { if (markerInfo[j].cfPatt >= markersSquare[i].matchingThreshold) k = j; // First marker detected. } else if (markerInfo[j].cfPatt > markerInfo[k].cfPatt) k = j; // Higher confidence marker detected. } } if (k != -1) { markerInfo[k].id = markerInfo[k].idPatt; markerInfo[k].cf = markerInfo[k].cfPatt; markerInfo[k].dir = markerInfo[k].dirPatt; } } else { for (j = 0; j < markerNum; j++) { if (markersSquare[i].patt_id == markerInfo[j].idMatrix) { if (k == -1) { if (markerInfo[j].cfMatrix >= markersSquare[i].matchingThreshold) k = j; // First marker detected. } else if (markerInfo[j].cfMatrix > markerInfo[k].cfMatrix) k = j; // Higher confidence marker detected. } } if (k != -1) { markerInfo[k].id = markerInfo[k].idMatrix; markerInfo[k].cf = markerInfo[k].cfMatrix; markerInfo[k].dir = markerInfo[k].dirMatrix; } } if (k != -1) { markersSquare[i].valid = TRUE; #ifdef DEBUG LOGI("Marker %d matched pattern %d.\n", k, markerInfo[k].id); #endif // Get the transformation between the marker and the real camera into trans. if (markersSquare[i].validPrev) { err = arGetTransMatSquareCont(ar3DHandle, &(markerInfo[k]), markersSquare[i].trans, markersSquare[i].marker_width, markersSquare[i].trans); } else { err = arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), markersSquare[i].marker_width, markersSquare[i].trans); } } else { markersSquare[i].valid = FALSE; } if (markersSquare[i].valid) { // Filter the pose estimate. if (markersSquare[i].ftmi) { if (arFilterTransMat(markersSquare[i].ftmi, markersSquare[i].trans, !markersSquare[i].validPrev) < 0) { LOGE("arFilterTransMat error with marker %d.\n", i); } } if (!markersSquare[i].validPrev) { // Marker has become visible, tell any dependent objects. //ARMarkerAppearedNotification } // We have a new pose, so set that. arglCameraViewRHf(markersSquare[i].trans, markersSquare[i].pose.T, 1.0f /*VIEW_SCALEFACTOR*/); // Tell any dependent objects about the update. //ARMarkerUpdatedPoseNotification } else { if (markersSquare[i].validPrev) { // Marker has ceased to be visible, tell any dependent objects. //ARMarkerDisappearedNotification } } } }
JNIEXPORT void JNICALL JNIFUNCTION_NATIVE(nativeVideoFrame(JNIEnv* env, jobject obj, jbyteArray pinArray)) { int i, j, k; jbyte* inArray; if (!videoInited) { #ifdef DEBUG LOGD("nativeVideoFrame !VIDEO\n"); #endif return; // No point in trying to track until video is inited. } if (!nftDataLoaded) { if (!nftDataLoadingThreadHandle || threadGetStatus(nftDataLoadingThreadHandle) < 1) { #ifdef DEBUG LOGD("nativeVideoFrame !NFTDATA\n"); #endif return; } else { nftDataLoaded = true; threadWaitQuit(nftDataLoadingThreadHandle); threadFree(&nftDataLoadingThreadHandle); // Clean up. } } if (!gARViewInited) { return; // Also, we won't track until the ARView has been inited. #ifdef DEBUG LOGD("nativeVideoFrame !ARVIEW\n"); #endif } #ifdef DEBUG LOGD("nativeVideoFrame\n"); #endif // Copy the incoming YUV420 image in pinArray. env->GetByteArrayRegion(pinArray, 0, gVideoFrameSize, (jbyte *)gVideoFrame); // As of ARToolKit v5.0, NV21 format video frames are handled natively, // and no longer require colour conversion to RGBA. // If you still require RGBA format information from the video, // here is where you'd do the conversion: // color_convert_common(gVideoFrame, gVideoFrame + videoWidth*videoHeight, videoWidth, videoHeight, myRGBABuffer); videoFrameNeedsPixelBufferDataUpload = true; // Note that buffer needs uploading. (Upload must be done on OpenGL context's thread.) // Run marker detection on frame if (trackingThreadHandle) { // Perform NFT tracking. float err; int ret; int pageNo; if( detectedPage == -2 ) { trackingInitStart( trackingThreadHandle, gVideoFrame ); detectedPage = -1; } if( detectedPage == -1 ) { ret = trackingInitGetResult( trackingThreadHandle, trackingTrans, &pageNo); if( ret == 1 ) { if (pageNo >= 0 && pageNo < surfaceSetCount) { #ifdef DEBUG LOGE("Detected page %d.\n", pageNo); #endif detectedPage = pageNo; ar2SetInitTrans(surfaceSet[detectedPage], trackingTrans); } else { LOGE("Detected bad page %d.\n", pageNo); detectedPage = -2; } } else if( ret < 0 ) { #ifdef DEBUG LOGE("No page detected.\n"); #endif detectedPage = -2; } } if( detectedPage >= 0 && detectedPage < surfaceSetCount) { if( ar2Tracking(ar2Handle, surfaceSet[detectedPage], gVideoFrame, trackingTrans, &err) < 0 ) { #ifdef DEBUG LOGE("Tracking lost.\n"); #endif detectedPage = -2; } else { #ifdef DEBUG LOGE("Tracked page %d (max %d).\n", detectedPage, surfaceSetCount - 1); #endif } } } else { LOGE("Error: trackingThreadHandle\n"); detectedPage = -2; } // Update markers. for (i = 0; i < markersNFTCount; i++) { markersNFT[i].validPrev = markersNFT[i].valid; if (markersNFT[i].pageNo >= 0 && markersNFT[i].pageNo == detectedPage) { markersNFT[i].valid = TRUE; for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) markersNFT[i].trans[j][k] = trackingTrans[j][k]; } else markersNFT[i].valid = FALSE; if (markersNFT[i].valid) { // Filter the pose estimate. if (markersNFT[i].ftmi) { if (arFilterTransMat(markersNFT[i].ftmi, markersNFT[i].trans, !markersNFT[i].validPrev) < 0) { LOGE("arFilterTransMat error with marker %d.\n", i); } } if (!markersNFT[i].validPrev) { // Marker has become visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerAppeared(i); } // We have a new pose, so set that. arglCameraViewRHf(markersNFT[i].trans, markersNFT[i].pose.T, 1.0f /*VIEW_SCALEFACTOR*/); // Tell any dependent objects about the update. VirtualEnvironmentHandleARMarkerWasUpdated(i, markersNFT[i].pose); } else { if (markersNFT[i].validPrev) { // Marker has ceased to be visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerDisappeared(i); } } } }
static void mainLoop(void) { static int ms_prev; int ms; float s_elapsed; ARUint8 *image; int i, j, k; // Calculate time delta. ms = glutGet(GLUT_ELAPSED_TIME); s_elapsed = (float)(ms - ms_prev) * 0.001f; ms_prev = ms; // Grab a video frame. if ((image = arVideoGetImage()) != NULL) { gARTImage = image; // Save the fetched image. // Calculate FPS every 30 frames. if (gCallCountMarkerDetect % 30 == 0) { gFPS = 30.0/arUtilTimer(); arUtilTimerReset(); gCallCountMarkerDetect = 0; } gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Run marker detection on frame if (threadHandle) { // Perform NFT tracking. float err; int ret; int pageNo; if( detectedPage == -2 ) { trackingInitStart( threadHandle, gARTImage ); detectedPage = -1; } if( detectedPage == -1 ) { ret = trackingInitGetResult( threadHandle, trackingTrans, &pageNo); if( ret == 1 ) { if (pageNo >= 0 && pageNo < surfaceSetCount) { ARLOGd("Detected page %d.\n", pageNo); detectedPage = pageNo; ar2SetInitTrans(surfaceSet[detectedPage], trackingTrans); } else { ARLOGe("Detected bad page %d.\n", pageNo); detectedPage = -2; } } else if( ret < 0 ) { ARLOGd("No page detected.\n"); detectedPage = -2; } } if( detectedPage >= 0 && detectedPage < surfaceSetCount) { if( ar2Tracking(ar2Handle, surfaceSet[detectedPage], gARTImage, trackingTrans, &err) < 0 ) { ARLOGd("Tracking lost.\n"); detectedPage = -2; } else { ARLOGd("Tracked page %d (max %d).\n", detectedPage, surfaceSetCount - 1); } } } else { ARLOGe("Error: threadHandle\n"); detectedPage = -2; } // Update markers. for (i = 0; i < markersNFTCount; i++) { markersNFT[i].validPrev = markersNFT[i].valid; if (markersNFT[i].pageNo >= 0 && markersNFT[i].pageNo == detectedPage) { markersNFT[i].valid = TRUE; for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) markersNFT[i].trans[j][k] = trackingTrans[j][k]; } else markersNFT[i].valid = FALSE; if (markersNFT[i].valid) { // Filter the pose estimate. if (markersNFT[i].ftmi) { if (arFilterTransMat(markersNFT[i].ftmi, markersNFT[i].trans, !markersNFT[i].validPrev) < 0) { ARLOGe("arFilterTransMat error with marker %d.\n", i); } } if (!markersNFT[i].validPrev) { // Marker has become visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerAppeared(i); } // We have a new pose, so set that. arglCameraViewRH(markersNFT[i].trans, markersNFT[i].pose.T, VIEW_SCALEFACTOR); // Tell any dependent objects about the update. VirtualEnvironmentHandleARMarkerWasUpdated(i, markersNFT[i].pose); } else { if (markersNFT[i].validPrev) { // Marker has ceased to be visible, tell any dependent objects. VirtualEnvironmentHandleARMarkerDisappeared(i); } } } // Tell GLUT the display has changed. glutPostRedisplay(); } else { arUtilSleep(2); } }
static void mainLoop(void) { static int ms_prev; int ms; float s_elapsed; ARUint8 *image; // NFT results. static int detectedPage = -2; // -2 Tracking not inited, -1 tracking inited OK, >= 0 tracking online on page. static float trackingTrans[3][4]; int i, j, k; // Find out how long since mainLoop() last ran. ms = glutGet(GLUT_ELAPSED_TIME); s_elapsed = (float)(ms - ms_prev) * 0.001f; if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz. ms_prev = ms; // Update drawing. DrawCubeUpdate(s_elapsed); // Grab a video frame. if ((image = arVideoGetImage()) != NULL) { gARTImage = image; // Save the fetched image. gCallCountMarkerDetect++; // Increment ARToolKit FPS counter. // Run marker detection on frame if (threadHandle) { // Perform NFT tracking. float err; int ret; int pageNo; if( detectedPage == -2 ) { trackingInitStart( threadHandle, gARTImage ); detectedPage = -1; } if( detectedPage == -1 ) { ret = trackingInitGetResult( threadHandle, trackingTrans, &pageNo); if( ret == 1 ) { if (pageNo >= 0 && pageNo < surfaceSetCount) { ARLOGd("Detected page %d.\n", pageNo); detectedPage = pageNo; ar2SetInitTrans(surfaceSet[detectedPage], trackingTrans); } else { ARLOGe("Detected bad page %d.\n", pageNo); detectedPage = -2; } } else if( ret < 0 ) { ARLOGd("No page detected.\n"); detectedPage = -2; } } if( detectedPage >= 0 && detectedPage < surfaceSetCount) { if( ar2Tracking(ar2Handle, surfaceSet[detectedPage], gARTImage, trackingTrans, &err) < 0 ) { ARLOGd("Tracking lost.\n"); detectedPage = -2; } else { ARLOGd("Tracked page %d (max %d).\n", detectedPage, surfaceSetCount - 1); } } } else { ARLOGe("Error: threadHandle\n"); detectedPage = -2; } // Update markers. for (i = 0; i < markersNFTCount; i++) { markersNFT[i].validPrev = markersNFT[i].valid; if (markersNFT[i].pageNo >= 0 && markersNFT[i].pageNo == detectedPage) { markersNFT[i].valid = TRUE; for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) markersNFT[i].trans[j][k] = trackingTrans[j][k]; } else markersNFT[i].valid = FALSE; if (markersNFT[i].valid) { // Filter the pose estimate. if (markersNFT[i].ftmi) { if (arFilterTransMat(markersNFT[i].ftmi, markersNFT[i].trans, !markersNFT[i].validPrev) < 0) { ARLOGe("arFilterTransMat error with marker %d.\n", i); } } if (!markersNFT[i].validPrev) { // Marker has become visible, tell any dependent objects. // ---> } // We have a new pose, so set that. arglCameraViewRH(markersNFT[i].trans, markersNFT[i].pose.T, VIEW_SCALEFACTOR); // Tell any dependent objects about the update. // ---> } else { if (markersNFT[i].validPrev) { // Marker has ceased to be visible, tell any dependent objects. // ---> } } } // Tell GLUT the display has changed. glutPostRedisplay(); } }