示例#1
0
static void mainLoop(void)
{
	static int ms_prev;
	int ms;
	float s_elapsed;
	ARUint8 *image;
    ARMarkerInfo* markerInfo;
    int markerNum;
	ARdouble err;
    int             i, j, k;
	
	// Calculate time delta.
	ms = glutGet(GLUT_ELAPSED_TIME);
	s_elapsed = (float)(ms - ms_prev) * 0.001f;
	ms_prev = ms;
	
	// Grab a video frame.
	if ((image = arVideoGetImage()) != NULL) {
		gARTImage = image;	// Save the fetched image.
		
		gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
		
		// Detect the markers in the video frame.
		if (arDetectMarker(gARHandle, gARTImage) < 0) {
			exit(-1);
		}
		
		// Get detected markers
		markerInfo = arGetMarker(gARHandle);
		markerNum = arGetMarkerNum(gARHandle);
	
		// Update markers.
		for (i = 0; i < markersSquareCount; i++) {
			markersSquare[i].validPrev = markersSquare[i].valid;
            
            
			// Check through the marker_info array for highest confidence
			// visible marker matching our preferred pattern.
			k = -1;
			if (markersSquare[i].patt_type == AR_PATTERN_TYPE_TEMPLATE) {
				for (j = 0; j < markerNum; j++) {
					if (markersSquare[i].patt_id == markerInfo[j].idPatt) {
						if (k == -1) {
							if (markerInfo[j].cfPatt >= markersSquare[i].matchingThreshold) k = j; // First marker detected.
						} else if (markerInfo[j].cfPatt > markerInfo[k].cfPatt) k = j; // Higher confidence marker detected.
					}
				}
				if (k != -1) {
					markerInfo[k].id = markerInfo[k].idPatt;
					markerInfo[k].cf = markerInfo[k].cfPatt;
					markerInfo[k].dir = markerInfo[k].dirPatt;
				}
			} else {
				for (j = 0; j < markerNum; j++) {
					if (markersSquare[i].patt_id == markerInfo[j].idMatrix) {
						if (k == -1) {
							if (markerInfo[j].cfMatrix >= markersSquare[i].matchingThreshold) k = j; // First marker detected.
						} else if (markerInfo[j].cfMatrix > markerInfo[k].cfMatrix) k = j; // Higher confidence marker detected.
					}
				}
				if (k != -1) {
					markerInfo[k].id = markerInfo[k].idMatrix;
					markerInfo[k].cf = markerInfo[k].cfMatrix;
					markerInfo[k].dir = markerInfo[k].dirMatrix;
				}
			}

			if (k != -1) {
				markersSquare[i].valid = TRUE;
				ARLOGd("Marker %d matched pattern %d.\n", i, markerInfo[k].id);
				// Get the transformation between the marker and the real camera into trans.
				if (markersSquare[i].validPrev && useContPoseEstimation) {
					err = arGetTransMatSquareCont(gAR3DHandle, &(markerInfo[k]), markersSquare[i].trans, markersSquare[i].marker_width, markersSquare[i].trans);
				} else {
					err = arGetTransMatSquare(gAR3DHandle, &(markerInfo[k]), markersSquare[i].marker_width, markersSquare[i].trans);
				}
			} else {
				markersSquare[i].valid = FALSE;
			}
	   
			if (markersSquare[i].valid) {
			
				// Filter the pose estimate.
				if (markersSquare[i].ftmi) {
					if (arFilterTransMat(markersSquare[i].ftmi, markersSquare[i].trans, !markersSquare[i].validPrev) < 0) {
						ARLOGe("arFilterTransMat error with marker %d.\n", i);
					}
				}
			
				if (!markersSquare[i].validPrev) {
					// Marker has become visible, tell any dependent objects.
                    VirtualEnvironmentHandleARMarkerAppeared(i);
				}
	
				// We have a new pose, so set that.
				arglCameraViewRH((const ARdouble (*)[4])markersSquare[i].trans, markersSquare[i].pose.T, 1.0f /*VIEW_SCALEFACTOR*/);
				// Tell any dependent objects about the update.
				VirtualEnvironmentHandleARMarkerWasUpdated(i, markersSquare[i].pose);
			
			} else {
			
				if (markersSquare[i].validPrev) {
					// Marker has ceased to be visible, tell any dependent objects.
					VirtualEnvironmentHandleARMarkerDisappeared(i);
				}
			}                    
		}
		
		// Tell GLUT the display has changed.
		glutPostRedisplay();
	} else {
		arUtilSleep(2);
	}
    
}
示例#2
0
JNIEXPORT void JNICALL JNIFUNCTION_NATIVE(nativeVideoFrame(JNIEnv* env, jobject obj, jbyteArray pinArray))
{
    int i, j, k;
    jbyte* inArray;
    ARdouble err;
        
    if (!videoInited) {
#ifdef DEBUG
        LOGI("nativeVideoFrame !VIDEO\n");
#endif        
        return; // No point in trying to track until video is inited.
    }
    if (!gARViewInited) {
        return; // Also, we won't track until the ARView has been inited.
#ifdef DEBUG
        LOGI("nativeVideoFrame !ARVIEW\n");
#endif        
    }
#ifdef DEBUG
    LOGI("nativeVideoFrame\n");
#endif        
    
    // Copy the incoming  YUV420 image in pinArray.
    env->GetByteArrayRegion(pinArray, 0, gVideoFrameSize, (jbyte *)gVideoFrame);
    
	// As of ARToolKit v5.0, NV21 format video frames are handled natively,
	// and no longer require colour conversion to RGBA.
	// If you still require RGBA format information from the video,
    // here is where you'd do the conversion:
    // color_convert_common(gVideoFrame, gVideoFrame + videoWidth*videoHeight, videoWidth, videoHeight, myRGBABuffer);

    videoFrameNeedsPixelBufferDataUpload = true; // Note that buffer needs uploading. (Upload must be done on OpenGL context's thread.)
    
	// Run marker detection on frame
	arDetectMarker(arHandle, gVideoBuffer);
	
	// Get detected markers
	ARMarkerInfo* markerInfo = arGetMarker(arHandle);
	int markerNum = arGetMarkerNum(arHandle);
    
    // Update markers.
    for (i = 0; i < markersSquareCount; i++) {
        markersSquare[i].validPrev = markersSquare[i].valid;
        
        
        // Check through the marker_info array for highest confidence
        // visible marker matching our preferred pattern.
        k = -1;
        if (markersSquare[i].patt_type == AR_PATTERN_TYPE_TEMPLATE) {
            for (j = 0; j < markerNum; j++) {
                if (markersSquare[i].patt_id == markerInfo[j].idPatt) {
                    if (k == -1) {
                        if (markerInfo[j].cfPatt >= markersSquare[i].matchingThreshold) k = j; // First marker detected.
                    } else if (markerInfo[j].cfPatt > markerInfo[k].cfPatt) k = j; // Higher confidence marker detected.
                }
            }
            if (k != -1) {
                markerInfo[k].id = markerInfo[k].idPatt;
                markerInfo[k].cf = markerInfo[k].cfPatt;
                markerInfo[k].dir = markerInfo[k].dirPatt;
            }
        } else {
            for (j = 0; j < markerNum; j++) {
                if (markersSquare[i].patt_id == markerInfo[j].idMatrix) {
                    if (k == -1) {
                        if (markerInfo[j].cfMatrix >= markersSquare[i].matchingThreshold) k = j; // First marker detected.
                    } else if (markerInfo[j].cfMatrix > markerInfo[k].cfMatrix) k = j; // Higher confidence marker detected.
                }
            }
            if (k != -1) {
                markerInfo[k].id = markerInfo[k].idMatrix;
                markerInfo[k].cf = markerInfo[k].cfMatrix;
                markerInfo[k].dir = markerInfo[k].dirMatrix;
            }
        }

        if (k != -1) {
            markersSquare[i].valid = TRUE;
#ifdef DEBUG
            LOGI("Marker %d matched pattern %d.\n", k, markerInfo[k].id);
#endif
            // Get the transformation between the marker and the real camera into trans.
            if (markersSquare[i].validPrev) {
                err = arGetTransMatSquareCont(ar3DHandle, &(markerInfo[k]), markersSquare[i].trans, markersSquare[i].marker_width, markersSquare[i].trans);
            } else {
                err = arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), markersSquare[i].marker_width, markersSquare[i].trans);
            }
        } else {
            markersSquare[i].valid = FALSE;
        }
       
        if (markersSquare[i].valid) {
            
            // Filter the pose estimate.
            if (markersSquare[i].ftmi) {
                if (arFilterTransMat(markersSquare[i].ftmi, markersSquare[i].trans, !markersSquare[i].validPrev) < 0) {
                    LOGE("arFilterTransMat error with marker %d.\n", i);
                }
            }
            
            if (!markersSquare[i].validPrev) {
                // Marker has become visible, tell any dependent objects.
                //ARMarkerAppearedNotification
            }
    
            // We have a new pose, so set that.
            arglCameraViewRHf(markersSquare[i].trans, markersSquare[i].pose.T, 1.0f /*VIEW_SCALEFACTOR*/);
            // Tell any dependent objects about the update.
            //ARMarkerUpdatedPoseNotification
            
        } else {
            
            if (markersSquare[i].validPrev) {
                // Marker has ceased to be visible, tell any dependent objects.
                //ARMarkerDisappearedNotification
            }
        }                    
    }
}
示例#3
0
static void mainLoop(void)
{
    static int ms_prev;
    int ms;
    float s_elapsed;
    ARUint8 *image;
    ARdouble err;

    int             i, j, k;

    // Find out how long since mainLoop() last ran.
    ms = glutGet(GLUT_ELAPSED_TIME);
    s_elapsed = (float)(ms - ms_prev) * 0.001f;
    if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
    ms_prev = ms;

    // Update drawing.
    arVrmlTimerUpdate();

    // Grab a video frame.
    if ((image = arVideoGetImage()) != NULL) {
        gARTImage = image;	// Save the fetched image.

        gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.

        // Detect the markers in the video frame.
        if (arDetectMarker(gARHandle, gARTImage) < 0) {
            exit(-1);
        }

        // Check for object visibility.

        for (i = 0; i < gObjectDataCount; i++) {

            // Check through the marker_info array for highest confidence
            // visible marker matching our object's pattern.
            k = -1;
            for (j = 0; j < gARHandle->marker_num; j++) {
                if (gARHandle->markerInfo[j].id == gObjectData[i].id) {
                    if (k == -1) k = j; // First marker detected.
                    else if (gARHandle->markerInfo[j].cf > gARHandle->markerInfo[k].cf) k = j; // Higher confidence marker detected.
                }
            }

            if (k != -1) {
                // Get the transformation between the marker and the real camera.
                //ARLOGe("Saw object %d.\n", i);
                if (gObjectData[i].visible && useContPoseEstimation) {
                    err = arGetTransMatSquareCont(gAR3DHandle, &(gARHandle->markerInfo[k]),
                                                  gObjectData[i].trans,
                                                  gObjectData[i].marker_width, gObjectData[i].trans);
                } else {
                    err = arGetTransMatSquare(gAR3DHandle, &(gARHandle->markerInfo[k]),
                                              gObjectData[i].marker_width, gObjectData[i].trans);
                }
                gObjectData[i].visible = 1;
            } else {
                gObjectData[i].visible = 0;
            }
        }

        // Tell GLUT the display has changed.
        glutPostRedisplay();
    }
}
示例#4
0
static void mainLoop(void)
{
	static int ms_prev;
	int ms;
	float s_elapsed;
	ARUint8 *image;
    AR2VideoBufferT *movieBuffer;
	ARdouble err;

    int             j, k;
	
	// Find out how long since mainLoop() last ran.
	ms = glutGet(GLUT_ELAPSED_TIME);
	s_elapsed = (float)(ms - ms_prev) * 0.001f;
	if (s_elapsed < 0.01f) return; // Don't update more often than 100 Hz.
	ms_prev = ms;
	
	// Grab a movie frame (if available).
    if ((movieBuffer = ar2VideoGetImage(gMovieVideo)) != NULL) {
        if (movieBuffer->buff && movieBuffer->fillFlag)
            gMovieImage = movieBuffer->buff;
    }
    
	// Grab a video frame.
	if ((image = arVideoGetImage()) != NULL) {
		gARTImage = image;	// Save the fetched image.
        
		gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
		
		// Detect the markers in the video frame.
		if (arDetectMarker(gARHandle, gARTImage) < 0) {
			exit(-1);
		}
		
		// Check through the marker_info array for highest confidence
		// visible marker matching our preferred pattern.
		k = -1;
		for (j = 0; j < gARHandle->marker_num; j++) {
			if (gARHandle->markerInfo[j].id == gPatt_id) {
				if (k == -1) k = j; // First marker detected.
				else if (gARHandle->markerInfo[j].cf > gARHandle->markerInfo[k].cf) k = j; // Higher confidence marker detected.
			}
		}
		
		if (k != -1) {
			// Get the transformation between the marker and the real camera into gPatt_trans.
            if (gPatt_found && useContPoseEstimation) {
                err = arGetTransMatSquareCont(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_trans, gPatt_width, gPatt_trans);
            } else {
                err = arGetTransMatSquare(gAR3DHandle, &(gARHandle->markerInfo[k]), gPatt_width, gPatt_trans);
                // Marker has appeared, so un-pause movie.
                ar2VideoCapStart(gMovieVideo);
            }
			gPatt_found = TRUE;
		} else {
            if (gPatt_found) {
                // Marker has disappeared, so pause movie.
                ar2VideoCapStop(gMovieVideo);
            }
			gPatt_found = FALSE;
		}
		
		// Tell GLUT the display has changed.
		glutPostRedisplay();
	}
}
bool ARMarkerSquare::updateWithDetectedMarkers(ARMarkerInfo* markerInfo, int markerNum, AR3DHandle *ar3DHandle) {

    //ARController::logv("ARMarkerSquare::update()");
    
	if (patt_id < 0) return false;	// Can't update if no pattern loaded

    visiblePrev = visible;

	if (markerInfo) {

        int k = -1;
        if (patt_type == AR_PATTERN_TYPE_TEMPLATE) { 
            // Iterate over all detected markers.
            for (int j = 0; j < markerNum; j++ ) {
                if (patt_id == markerInfo[j].idPatt) {
                    // The pattern of detected trapezoid matches marker[k].
                    if (k == -1) {
                        if (markerInfo[j].cfPatt > m_cfMin) k = j; // Count as a match if match confidence exceeds cfMin.
                    } else if (markerInfo[j].cfPatt > markerInfo[k].cfPatt) k = j; // Or if it exceeds match confidence of a different already matched trapezoid (i.e. assume only one instance of each marker).
                }
            }
            if (k != -1) {
                markerInfo[k].id = markerInfo[k].idPatt;
                markerInfo[k].cf = markerInfo[k].cfPatt;
                markerInfo[k].dir = markerInfo[k].dirPatt;
            }
        } else {
            for (int j = 0; j < markerNum; j++) {
                if (patt_id == markerInfo[j].idMatrix) {
                    if (k == -1) {
                        if (markerInfo[j].cfMatrix >= m_cfMin) k = j; // Count as a match if match confidence exceeds cfMin.
                    } else if (markerInfo[j].cfMatrix > markerInfo[k].cfMatrix) k = j; // Or if it exceeds match confidence of a different already matched trapezoid (i.e. assume only one instance of each marker).
                }
            }
            if (k != -1) {
                markerInfo[k].id = markerInfo[k].idMatrix;
                markerInfo[k].cf = markerInfo[k].cfMatrix;
                markerInfo[k].dir = markerInfo[k].dirMatrix;
            }
        }
        
		// Consider marker visible if a match was found.
        if (k != -1) {
            visible = true;
            m_cf = markerInfo[k].cf;
            // If the model is visible, update its transformation matrix
			if (visiblePrev && useContPoseEstimation) {
				// If the marker was visible last time, use "cont" version of arGetTransMatSquare
				arGetTransMatSquareCont(ar3DHandle, &(markerInfo[k]), trans, m_width, trans);
			} else {
				// If the marker wasn't visible last time, use normal version of arGetTransMatSquare
				arGetTransMatSquare(ar3DHandle, &(markerInfo[k]), m_width, trans);
			}
        } else {
            visible = false;
            m_cf = 0.0f;
        }

	} else {
        visible = false;
        m_cf = 0.0f;
    }

	return (ARMarker::update()); // Parent class will finish update.
}