Example #1
0
// References globals: markersNFTCount
// Modifies globals: threadHandle, surfaceSet[], surfaceSetCount, markersNFT[]
static int loadNFTData(void)
{
    int i;
	KpmRefDataSet *refDataSet;
    
    // If data was already loaded, stop KPM tracking thread and unload previously loaded data.
    if (threadHandle) {
        ARLOGi("Reloading NFT data.\n");
        unloadNFTData();
    } else {
        ARLOGi("Loading NFT data.\n");
    }
    
    refDataSet = NULL;
    
    for (i = 0; i < markersNFTCount; i++) {
        // Load KPM data.
        KpmRefDataSet  *refDataSet2;
        ARLOGi("Reading %s.fset3\n", markersNFT[i].datasetPathname);
        if (kpmLoadRefDataSet(markersNFT[i].datasetPathname, "fset3", &refDataSet2) < 0 ) {
            ARLOGe("Error reading KPM data from %s.fset3\n", markersNFT[i].datasetPathname);
            markersNFT[i].pageNo = -1;
            continue;
        }
        markersNFT[i].pageNo = surfaceSetCount;
        ARLOGi("  Assigned page no. %d.\n", surfaceSetCount);
        if (kpmChangePageNoOfRefDataSet(refDataSet2, KpmChangePageNoAllPages, surfaceSetCount) < 0) {
            ARLOGe("Error: kpmChangePageNoOfRefDataSet\n");
            exit(-1);
        }
        if (kpmMergeRefDataSet(&refDataSet, &refDataSet2) < 0) {
            ARLOGe("Error: kpmMergeRefDataSet\n");
            exit(-1);
        }
        ARLOGi("  Done.\n");
        
        // Load AR2 data.
        ARLOGi("Reading %s.fset\n", markersNFT[i].datasetPathname);
        
        if ((surfaceSet[surfaceSetCount] = ar2ReadSurfaceSet(markersNFT[i].datasetPathname, "fset", NULL)) == NULL ) {
            ARLOGe("Error reading data from %s.fset\n", markersNFT[i].datasetPathname);
        }
        ARLOGi("  Done.\n");
        
        surfaceSetCount++;
        if (surfaceSetCount == PAGES_MAX) break;
    }
    if (kpmSetRefDataSet(kpmHandle, refDataSet) < 0) {
        ARLOGe("Error: kpmSetRefDataSet\n");
        exit(-1);
    }
    kpmDeleteRefDataSet(&refDataSet);
    
    // Start the KPM tracking thread.
    threadHandle = trackingInitInit(kpmHandle);
    if (!threadHandle) exit(-1);

	ARLOGi("Loading of NFT data complete.\n");
    return (TRUE);
}
virtual bool onFrameBuffer(void *buffer, int bufferSize)
{
    int  frameIndex;
    bool ret;

    if (!isConnected() || !buffer || bufferSize <= 0)
    {
        ARLOGe("Error: onFrameBuffer() called while not connected, or called without frame.\n");
        return false;
    }

    ret = true;
    m_framesReceived++;

    pthread_mutex_lock(&m_nc->frameLock);
    if (m_nc->frameBuffers[0] && m_nc->frameBuffers[1])       // Only do copy if capture has been started.
    {
        if (bufferSize != m_nc->frameBufferLength)
        {
            ARLOGe("Error: onFrameBuffer frame size is %d but receiver expected %d.\n", bufferSize, m_nc->frameBufferLength);
            ret = false;
        }
        else
        {
            // Find a buffer to write to. Any buffer not locked by client is a candidate.
            if (m_nc->frameBuffersStatus[0] != LOCKED)
                frameIndex = 0;
            else if (m_nc->frameBuffersStatus[1] != LOCKED)
                frameIndex = 1;
            else
                frameIndex = -1;

            if (frameIndex == -1)
            {
                ARLOGe("Error: onFrameBuffer receiver was all full up.\n");
                ret = false;
            }
            else
            {
                ARLOGd("FRAME => buffer %d %p\n", frameIndex, m_nc->frameBuffers[frameIndex]);
                memcpy(m_nc->frameBuffers[frameIndex], buffer, bufferSize);
                m_nc->frameBuffersStatus[frameIndex] = READY;
                if (m_nc->frameReadyCallback)
                    pthread_cond_signal(&m_nc->frameReadyNotifierThreadCondGo);
            }
        }
    }
    else
    {
        ARLOGd("FRAME =X\n");
    }

    pthread_mutex_unlock(&m_nc->frameLock);

    return ret;
}
Example #3
0
static void errorWMC(void *userdata)
{
	if (!userdata) {
		ARLOGe("Windows.Media.Capture error but no userdata suppplied.\n");
		return;
	}
	AR2VideoParamWinMCT *vid = (AR2VideoParamWinMCT *)userdata;
	ARLOGe("Windows.Media.Capture error.\n");
	stopWMC(vid);
}
Example #4
0
// Report state of ARToolKit tracker.
static void debugReportMode(ARGViewportHandle *vp)
{
	if (vp->dispMethod == AR_GL_DISP_METHOD_GL_DRAW_PIXELS) {
		ARLOGe("dispMode (d)   : GL_DRAW_PIXELS\n");
	} else if (vp->dispMethod == AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME) {
		ARLOGe("dispMode (d)   : TEXTURE MAPPING (FULL RESOLUTION)\n");
	} else {
		ARLOGe("dispMode (d)   : TEXTURE MAPPING (HALF RESOLUTION)\n");
	}
}
Example #5
0
static void *trackingInitMain( THREAD_HANDLE_T *threadHandle )
{
    TrackingInitHandle     *trackingInitHandle;
    KpmHandle              *kpmHandle;
    KpmResult              *kpmResult = NULL;
    int                     kpmResultNum;
    ARUint8                *imagePtr;
    float                  err;
    int                    i, j, k;

    if (!threadHandle) {
        ARLOGe("Error starting tracking thread: empty THREAD_HANDLE_T.\n");
        return (NULL);
    }
    trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle);
    if (!threadHandle) {
        ARLOGe("Error starting tracking thread: empty trackingInitHandle.\n");
        return (NULL);
    }
    kpmHandle          = trackingInitHandle->kpmHandle;
    imagePtr           = trackingInitHandle->imagePtr;
    if (!kpmHandle || !imagePtr) {
        ARLOGe("Error starting tracking thread: empty kpmHandle/imagePtr.\n");
        return (NULL);
    }
    ARLOGi("Start tracking thread.\n");
    
    kpmGetResult( kpmHandle, &kpmResult, &kpmResultNum );

    for(;;) {
        if( threadStartWait(threadHandle) < 0 ) break;

        kpmMatching(kpmHandle, imagePtr);
        trackingInitHandle->flag = 0;
        for( i = 0; i < kpmResultNum; i++ ) {
            if( kpmResult[i].camPoseF != 0 ) continue;
            ARLOGd("kpmGetPose OK.\n");
            if( trackingInitHandle->flag == 0 || err > kpmResult[i].error ) { // Take the first or best result.
                trackingInitHandle->flag = 1;
                trackingInitHandle->page = kpmResult[i].pageNo;
                for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) trackingInitHandle->trans[j][k] = kpmResult[i].camPose[j][k];
                err = kpmResult[i].error;
            }
        }

        threadEndSignal(threadHandle);
    }

    ARLOGi("End tracking thread.\n");
    return (NULL);
}
Example #6
0
int ar2WriteImageSet(char *filename, AR2ImageSetT *imageSet)
{
    FILE          *fp;
    AR2JpegImageT jpegImage;
    int           i;
    size_t        len;
    const char    ext[] = ".iset";
    char          *buf;

    len = strlen(filename) + strlen(ext) + 1; // +1 for nul terminator.
    arMalloc(buf, char, len);
    sprintf(buf, "%s%s", filename, ext);
    if ((fp = fopen(buf, "wb")) == NULL)
    {
        ARLOGe("Error: unable to open file '%s' for writing.\n", buf);
        free(buf);
        return (-1);
    }

    free(buf);

    if (fwrite(&(imageSet->num), sizeof(imageSet->num), 1, fp) != 1)
        goto bailBadWrite;

    jpegImage.xsize = imageSet->scale[0]->xsize;
    jpegImage.ysize = imageSet->scale[0]->ysize;
    jpegImage.dpi   = imageSet->scale[0]->dpi;
    jpegImage.nc    = 1;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
    jpegImage.image = imageSet->scale[0]->imgBWBlur[0];
#else
    jpegImage.image = imageSet->scale[0]->imgBW;
#endif

    if (ar2WriteJpegImage2(fp, &jpegImage, AR2_DEFAULT_JPEG_IMAGE_QUALITY) < 0)
        goto bailBadWrite;

    for (i = 1; i < imageSet->num; i++)
    {
        if (fwrite(&(imageSet->scale[i]->dpi), sizeof(imageSet->scale[i]->dpi), 1, fp) != 1)
            goto bailBadWrite;
    }

    fclose(fp);
    return 0;

bailBadWrite:
    ARLOGe("Error saving image set: error writing data.\n");
    fclose(fp);
    return (-1);
}
Example #7
0
static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p)
{	
    ARParam			cparam;
	int				xsize, ysize;
    AR_PIXEL_FORMAT pixFormat;

    // Open the video path.
    if (arVideoOpen(vconf) < 0) {
    	ARLOGe("setupCamera(): Unable to open connection to camera.\n");
    	return (FALSE);
	}
	
    // Find the size of the window.
    if (arVideoGetSize(&xsize, &ysize) < 0) {
        ARLOGe("setupCamera(): Unable to determine camera frame size.\n");
        arVideoClose();
        return (FALSE);
    }
    ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize);
	
	// Get the format in which the camera is returning pixels.
	pixFormat = arVideoGetPixelFormat();
	if (pixFormat == AR_PIXEL_FORMAT_INVALID) {
    	ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n");
        arVideoClose();
		return (FALSE);
	}
	
	// Load the camera parameters, resize for the window and init.
    if (arParamLoad(cparam_name, 1, &cparam) < 0) {
		ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name);
        arVideoClose();
        return (FALSE);
    }
    if (cparam.xsize != xsize || cparam.ysize != ysize) {
        ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
        arParamChangeSize(&cparam, xsize, ysize, &cparam);
    }
#ifdef DEBUG
    ARLOG("*** Camera Parameter ***\n");
    arParamDisp(&cparam);
#endif
    if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) {
        ARLOGe("setupCamera(): Error: arParamLTCreate.\n");
        arVideoClose();
        return (FALSE);
    }
	
	return (TRUE);
}
bool videoAndroidNativeCaptureStart(VIDEO_ANDROID_NATIVE_CAPTURE *nc, AR_VIDEO_FRAME_READY_CALLBACK callback, void *userdata)
{
    int  err;
    bool ret = true;

    ARLOGd("videoAndroidNativeCaptureStart().\n");

    if (!nc)
        return false;

    // Don't start if already started.
    if (nc->frameBuffers[0] || nc->frameBuffers[1])
    {
        ARLOGe("videoAndroidNativeCaptureStart called again.\n");
        return false;
    }

    // Create the frame buffers.
    pthread_mutex_lock(&nc->frameLock);
    nc->frameBufferLength     = (nc->frameWidth * nc->frameHeight * 3) / 2; // Assume NV21/NV12 format.
    nc->frameBuffersStatus[0] = nc->frameBuffersStatus[1] = EMPTY;
    nc->frameBuffers[0]       = (unsigned char*)malloc(nc->frameBufferLength);
    nc->frameBuffers[1]       = (unsigned char*)malloc(nc->frameBufferLength);
    if (!nc->frameBuffers[0] || !nc->frameBuffers[1])
    {
        ARLOGe("Out of memory!\n");
        ret = false;
    }
    else
    {
        nc->frameReadyCallback = callback;
        if (callback)
        {
            // Start the frameReadyNotifierThread.
            nc->frameReadyCallbackUserdata         = userdata;
            nc->frameReadyNotifierThreadShouldQuit = false;
            if ((err = pthread_create(&(nc->frameReadyNotifierThread), NULL, frameReadyNotifier, (void*)nc)) != 0)
            {
                ARLOGe("videoAndroidNativeCaptureOpen(): Error %d detaching thread.\n", err);
                ret = false;
            }
        }
    }

    pthread_mutex_unlock(&nc->frameLock);

    ARLOGd("/videoAndroidNativeCaptureStart nc->frameBufferLength=%d.\n", nc->frameBufferLength);

    return ret;
}
Example #9
0
bool ARTApp::init(const char *cparamName, const char *pattName, const char *objModelFile, float pattWidth, float modelScale)
{
	if (arHandle) //has initialized
		return false;

	if (!setupCamera(cparamName, "", &cParam, &arHandle, &ar3DHandle)) {
		return false;
	}

	if (!setupMarker(pattName, &pattID, arHandle, &pattHandle)) {
		return false;
	}

	{
		objModel = glmReadOBJ((char*)objModelFile);
		if (!objModel)
		{
			ARLOGe("Unable to load obj model file.\n");
			return false;
		}
		glmUnitize(objModel);
		glmScale(objModel, pattWidth*modelScale);
	}
	this->pattWidth = pattWidth;

	return true;
}
Example #10
0
// N.B. This function is duplicated in libARvideo, so that libARvideo doesn't need to
// link to libAR. Therefore, if changes are made here they should be duplicated there.
const char *arUtilGetPixelFormatName(const AR_PIXEL_FORMAT arPixelFormat)
{
    const char *names[] = {
        "AR_PIXEL_FORMAT_RGB",
        "AR_PIXEL_FORMAT_BGR",
        "AR_PIXEL_FORMAT_RGBA",
        "AR_PIXEL_FORMAT_BGRA",
        "AR_PIXEL_FORMAT_ABGR",
        "AR_PIXEL_FORMAT_MONO",
        "AR_PIXEL_FORMAT_ARGB",
        "AR_PIXEL_FORMAT_2vuy",
        "AR_PIXEL_FORMAT_yuvs",
        "AR_PIXEL_FORMAT_RGB_565",
        "AR_PIXEL_FORMAT_RGBA_5551",
        "AR_PIXEL_FORMAT_RGBA_4444",
        "AR_PIXEL_FORMAT_420v",
        "AR_PIXEL_FORMAT_420f",
        "AR_PIXEL_FORMAT_NV21"
    };
    if ((int)arPixelFormat < 0 || (int)arPixelFormat > AR_PIXEL_FORMAT_MAX) {
        ARLOGe("arUtilGetPixelFormatName: Error, unrecognised pixel format (%d).\n", (int)arPixelFormat);
        return (NULL);
    }
    return (names[(int)arPixelFormat]);
}
int ar2VideoGetAbsMaxValue1394(AR2VideoParam1394T *vid, int paramName, ARdouble *value)
{
    dc1394feature_t feature;
    float           min, max;

    switch (paramName)
    {
    case AR_VIDEO_1394_GAMMA:
        feature = DC1394_FEATURE_GAMMA;
        break;

    default:
        return -1;
    }

    if (dc1394_feature_get_absolute_boundaries(vid->camera, feature, &min, &max) != DC1394_SUCCESS)
    {
        ARLOGe("unable to get max value.\n");
        return -1;
    }

    *value = (float)max;

    return 0;
}
Example #12
0
static bool startWMC(AR2VideoParamWinMCT *vid, const int width, const int height)
{
	if (!vid || !vid->wmc) return false;

	if (vid->wmc->Capturing()) {
		ARLOGe("Windows.Media.Capture already started.\n");
		return false;
	}

	if (!vid->wmc->StartCapture(width, height, getWMCVideoMediaSubTypeForARPixelFormat(vid->format), vid->devNum - 1, vid->preferredDeviceLocation, errorWMC, (void *)vid)) {
		ARLOGe("Error starting capture.\n");
		return false;
	}

	return true;
}
Example #13
0
int main(int argc, char *argv[])
{
    ARParam cparam;
    // ARParamLT          *cparamLT;
    float trans[3][4];
    float pos[2];
    float dpi[2];
    // char                name[1024], ext[1024];
    int   i, j;
    float z;

    init(argc, argv);

    if (!cpara)
        cpara = cparaDefault;

    // ar2UtilDivideExt( cpara, name, ext );

    // Load the camera parameters, resize for the window and init.
    // if( arParamLoad(name, ext, 1, &cparam) < 0 )
    if (arParamLoad(cpara, 1, &cparam) < 0)
    {
        ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cpara);
        exit(-1);
    }

    if (xsize != -1 && ysize != -1 && (cparam.xsize != xsize || cparam.ysize != ysize))
    {
        ARLOG("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
        arParamChangeSize(&cparam, xsize, ysize, &cparam);
    }

    ARLOG("*** Camera Parameter ***\n");
    arParamDisp(&cparam);

    // if ((cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) {
    //    ARLOGe("setupCamera(): Error: arParamLTCreate.\n");
    //    exit(-1);
    // }

    pos[0] = 0.0;
    pos[1] = 0.0;

    for (j = 0; j < 3; j++)
        for (i = 0; i < 4; i++)
            trans[j][i] = ((i == j) ? 1.0 : 0.0);

    for (i = 10; i <= 1000; i *= 10)
    {
        for (j = 1; j < 10; j++)
        {
            z           = j * i;
            trans[2][3] = z;
            ar2GetResolution2(&cparam, trans, pos, dpi);
            ARLOG("Distance: %f [mm] --> Resolution = %10.5f, %10.5f [DPI]\n", z, dpi[0], dpi[1]);
        }
    }

    return (0);
}
VIDEO_ANDROID_NATIVE_CAPTURE* videoAndroidNativeCaptureOpen(int cameraIndex)
{
    CameraActivity::ErrorCode ca_err;

    ARLOGd("videoAndroidNativeCaptureOpen(%d).\n", cameraIndex);

    VIDEO_ANDROID_NATIVE_CAPTURE *nc = (VIDEO_ANDROID_NATIVE_CAPTURE*)calloc(1, sizeof(VIDEO_ANDROID_NATIVE_CAPTURE));
    if (!nc)
    {
        ARLOGe("Out of memory!\n");
        return (NULL);
    }

    nc->ca = new ARToolKitVideoAndroidCameraActivity(nc);
    if (!nc->ca)
    {
        ARLOGe("Unable to create native connection to camera.\n");
        goto bail;
    }

    // Lock manages contention between user thread, CameraActivity::onFrameBuffer thread (might be same as user thread), and frameReadyNotifierThread.
    pthread_mutex_init(&nc->frameLock, NULL);
    pthread_cond_init(&nc->frameReadyNotifierThreadCondGo, NULL);

    ca_err = nc->ca->connect(cameraIndex);
    if (ca_err != CameraActivity::NO_ERROR)
    {
        ARLOGe("Error %d opening native connection to camera.\n", ca_err);
        goto bail1;
    }

    nc->frameWidth  = (int)nc->ca->getProperty(ANDROID_CAMERA_PROPERTY_FRAMEWIDTH);
    nc->frameHeight = (int)nc->ca->getProperty(ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT);

    ARLOGd("/videoAndroidNativeCaptureOpen %dx%d.\n", nc->frameWidth, nc->frameHeight);

    return (nc);

bail1:
    delete(nc->ca);
    pthread_cond_destroy(&nc->frameReadyNotifierThreadCondGo);
    pthread_mutex_destroy(&nc->frameLock);
bail:
    free(nc);
    return (NULL);
}
Example #15
0
int arUtilChangeToResourcesDirectory(AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR behavior, const char *path)
#endif
{
    char *wpath;
    AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR behaviorW;
    
    if (behavior == AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_BEST) {
#if defined(__APPLE__)
        behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_BUNDLE_RESOURCES_DIR;
#elif defined(ANDROID)
        behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_APP_CACHE_DIR;
#elif defined(_WIN32) || defined(__linux)
        behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_EXECUTABLE_DIR;
#else
        behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_CWD;
#endif
    } else {
        behaviorW = behavior;
    }
    
    if (behaviorW != AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_SUPPLIED_PATH) {
#ifdef ANDROID
        wpath = arUtilGetResourcesDirectoryPath(behavior, instanceOfAndroidContext);
#else
        wpath = arUtilGetResourcesDirectoryPath(behavior);
#endif
        if (wpath) {
            if (chdir(wpath) != 0) {
                ARLOGe("Error: Unable to change working directory to '%s'.\n", wpath);
                ARLOGperror(NULL);
                free (wpath);
                return (-1);
            }
            free(wpath);
        }
    }
    if (path) {
        if (chdir(path) != 0) {
            ARLOGe("Error: Unable to change working directory to '%s'.\n", path);
            ARLOGperror(NULL);
            return (-1);
        }
    }
    
    return (0);
}
Example #16
0
void arglCameraFrustumRH(const ARParam *cparam, const ARdouble focalmin, const ARdouble focalmax, ARdouble m_projection[16])
{
	ARdouble    icpara[3][4];
    ARdouble    trans[3][4];
    ARdouble    p[3][3], q[4][4];
	int         width, height;
    int         i, j;
	
    width  = cparam->xsize;
    height = cparam->ysize;
	
    if (arParamDecompMat(cparam->mat, icpara, trans) < 0) {
        ARLOGe("arglCameraFrustum(): arParamDecompMat() indicated parameter error.\n");
        return;
    }
	for (i = 0; i < 4; i++) {
        icpara[1][i] = (height - 1)*(icpara[2][i]) - icpara[1][i];
    }
	
    for(i = 0; i < 3; i++) {
        for(j = 0; j < 3; j++) {
            p[i][j] = icpara[i][j] / icpara[2][2];
        }
    }
    q[0][0] = (2.0 * p[0][0] / (width - 1));
    q[0][1] = (2.0 * p[0][1] / (width - 1));
    q[0][2] = -((2.0 * p[0][2] / (width - 1))  - 1.0);
    q[0][3] = 0.0;
	
    q[1][0] = 0.0;
    q[1][1] = -(2.0 * p[1][1] / (height - 1));
    q[1][2] = -((2.0 * p[1][2] / (height - 1)) - 1.0);
    q[1][3] = 0.0;
	
    q[2][0] = 0.0;
    q[2][1] = 0.0;
    q[2][2] = (focalmax + focalmin)/(focalmin - focalmax);
    q[2][3] = 2.0 * focalmax * focalmin / (focalmin - focalmax);
	
    q[3][0] = 0.0;
    q[3][1] = 0.0;
    q[3][2] = -1.0;
    q[3][3] = 0.0;
	
    for (i = 0; i < 4; i++) { // Row.
		// First 3 columns of the current row.
        for (j = 0; j < 3; j++) { // Column.
            m_projection[i + j*4] = q[i][0] * trans[0][j] +
									q[i][1] * trans[1][j] +
									q[i][2] * trans[2][j];
        }
		// Fourth column of the current row.
        m_projection[i + 3*4] = q[i][0] * trans[0][3] +
								q[i][1] * trans[1][3] +
								q[i][2] * trans[2][3] +
								q[i][3];
    }	
}
Example #17
0
static int setupMovie(const char *path)
{
    char            *movieVconf;
    int             len;
    int             xsize, ysize;
    AR_PIXEL_FORMAT pixFormat;

    // Construct the vconf string.
    arMalloc(movieVconf, char, 2048); // 2Kb for URL.
    sprintf(movieVconf, "-device=QUICKTIME -movie=\""); // Make sure we're using the QuickTime video input.
    len = (int)strlen(movieVconf);
    strncat(movieVconf + len, path, 2048 - len - 1);
    len = (int)strlen(movieVconf);
    strncat(movieVconf + len, "\" -loop -pause", 2048 - len - 1); // Start the movie paused. It will be unpaused in mainLoop().

    // Open the movie.
    gMovieVideo = ar2VideoOpen(movieVconf);
    free(movieVconf);
    if (!gMovieVideo)
    {
        ARLOGe("setupMovie(): Unable to open movie.\n");
        return (FALSE);
    }

    // Find the size of the movie.
    if (ar2VideoGetSize(gMovieVideo, &xsize, &ysize) < 0)
        return (FALSE);

    // Get the pixel format of the movie.
    pixFormat = ar2VideoGetPixelFormat(gMovieVideo);
    if (pixFormat == AR_PIXEL_FORMAT_INVALID)
    {
        ARLOGe("setupMovie(): Movie is using unsupported pixel format.\n");
        return (FALSE);
    }

    // Set up an ARParam object for the movie input.
    arParamClear(&gMovieCparam, xsize, ysize, AR_DIST_FUNCTION_VERSION_DEFAULT);

    // For convenience, we will use gsub_lite to draw the actual pixels. Set it up now.
    gMovieArglSettings = arglSetupForCurrentContext(&gMovieCparam, pixFormat);
    arglDistortionCompensationSet(gMovieArglSettings, 0);

    return (TRUE);
}
Example #18
0
static int setupMarker(const char *patt_name, int *patt_id, ARHandle *arhandle, ARPattHandle **pattHandle_p)
{	
    if ((*pattHandle_p = arPattCreateHandle()) == NULL) {
        ARLOGe("setupMarker(): Error: arPattCreateHandle.\n");
        return (FALSE);
    }
    
	// Loading only 1 pattern in this example.
	if ((*patt_id = arPattLoad(*pattHandle_p, patt_name)) < 0) {
		ARLOGe("setupMarker(): Error loading pattern file %s.\n", patt_name);
		arPattDeleteHandle(*pattHandle_p);
		return (FALSE);
	}
    
    arPattAttach(arhandle, *pattHandle_p);
	
	return (TRUE);
}
Example #19
0
static void mainLoop(void)
{
    int        i;
    static int imageNumber = 0;
    static int ms_prev;
    int        ms;
    float      s_elapsed;
    ARUint8    *image;

    // Find out how long since mainLoop() last ran.
    ms        = glutGet(GLUT_ELAPSED_TIME);
    s_elapsed = (float)(ms - ms_prev) * 0.001f;
    if (s_elapsed < 0.01f)
        return;                        // Don't update more often than 100 Hz.

    ms_prev = ms;

    // Grab a video frame.
    if ((image = arVideoGetImage()) != NULL)
    {
        gARTImage = image;              // Save the fetched image.

        if (gARTImageSavePlease)
        {
            char imageNumberText[15];
            sprintf(imageNumberText, "image-%04d.jpg", imageNumber++);
            if (arVideoSaveImageJPEG(gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, gARTImage, imageNumberText, 75, 0) < 0)
            {
                ARLOGe("Error saving video image.\n");
            }

            gARTImageSavePlease = FALSE;
        }

        gCallCountMarkerDetect++;         // Increment ARToolKit FPS counter.

        // Detect the markers in the video frame.
        if (arDetectMarker(gARHandle, gARTImage) < 0)
        {
            exit(-1);
        }

        // If  marker config files were specified, evaluate detected patterns against them now.
        for (i = 0; i < gMultiConfigCount; i++)
        {
            if (gRobustFlag)
                gMultiErrs[i] = arGetTransMatMultiSquareRobust(gAR3DHandle, arGetMarker(gARHandle), arGetMarkerNum(gARHandle), gMultiConfigs[i]);
            else
                gMultiErrs[i] = arGetTransMatMultiSquare(gAR3DHandle, arGetMarker(gARHandle), arGetMarkerNum(gARHandle), gMultiConfigs[i]);

            // if (gMultiConfigs[i]->prevF != 0) ARLOGe("Found multimarker set %d, err=%0.3f\n", i, gMultiErrs[i]);
        }

        // Tell GLUT the display has changed.
        glutPostRedisplay();
    }
}
Example #20
0
static int init( int argc, char *argv[] )
{
    ARGViewport		viewport;
    char		*filename = NULL;
    int			xmax, ymax;
    float		xzoom, yzoom;
    float		zoom;
    int			i;

    for( i = 1; i < argc; i++ ) {
        if( filename == NULL ) filename = argv[i];
        else usage(argv[0] );
    }
    if (!filename || !filename[0]) usage(argv[0]);

    ARLOG("Read ImageSet.\n");
    ar2UtilRemoveExt( filename );
    imageSet = ar2ReadImageSet( filename );
    if( imageSet == NULL ) {
        ARLOGe("file open error: %s.iset\n", filename );
        exit(0);
    }
    ARLOG("  end.\n");

    arMalloc(vp, ARGViewportHandle *, imageSet->num);

    xmax = ymax = 0;
    for( i = 0; i < imageSet->num; i++ ) {
        if( imageSet->scale[i]->xsize > xmax ) xmax = imageSet->scale[i]->xsize;
        if( imageSet->scale[i]->ysize > ymax ) ymax = imageSet->scale[i]->ysize;
    }
    xzoom = yzoom = 1.0;
    while( xmax > winXsize*xzoom ) xzoom += 1.0;
    while( ymax > winYsize*yzoom ) yzoom += 1.0;
    if( xzoom > yzoom ) zoom = 1.0/xzoom;
    else                zoom = 1.0/yzoom;
    winXsize = xmax * zoom;
    winYsize = ymax * zoom;
    ARLOG("Size = (%d,%d) Zoom = %f\n", xmax, ymax, zoom);
    argCreateWindow( winXsize, winYsize );

    for( i = 0; i < imageSet->num; i++ ) {
        viewport.sx = viewport.sy = 0;
        viewport.xsize = imageSet->scale[i]->xsize * zoom;
        viewport.ysize = imageSet->scale[i]->ysize * zoom;
        vp[i] = argCreateViewport( &viewport );
        argViewportSetImageSize( vp[i], imageSet->scale[i]->xsize, imageSet->scale[i]->ysize );
        argViewportSetDispMethod( vp[i], AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME );
        //argViewportSetDispMethod( vp[i], AR_GL_DISP_METHOD_GL_DRAW_PIXELS );
        argViewportSetDispMode( vp[i], AR_GL_DISP_MODE_FIT_TO_VIEWPORT );
        argViewportSetDistortionMode( vp[i], AR_GL_DISTORTION_COMPENSATE_DISABLE );
    }

    reportCurrentDPI();
    
    return 0;
}
Example #21
0
int trackingInitStart( THREAD_HANDLE_T *threadHandle, ARUint8 *imagePtr )
{
    TrackingInitHandle     *trackingInitHandle;

    if (!threadHandle || !imagePtr) {
        ARLOGe("trackingInitStart(): Error: NULL threadHandle or imagePtr.\n");
        return (-1);
    }
    
    trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle);
    if (!trackingInitHandle) {
        ARLOGe("trackingInitStart(): Error: NULL trackingInitHandle.\n");
        return (-1);
    }
    memcpy( trackingInitHandle->imagePtr, imagePtr, trackingInitHandle->imageSize );
    threadStartSignal( threadHandle );

    return 0;
}
Example #22
0
int arSetLabelingThreshMode(ARHandle *handle, const AR_LABELING_THRESH_MODE mode)
{
    AR_LABELING_THRESH_MODE mode1;

	if (!handle) return (-1);
    if (handle->arLabelingThreshMode != mode) {
        if (handle->arImageProcInfo) {
            arImageProcFinal(handle->arImageProcInfo);
            handle->arImageProcInfo = NULL;
        }

        mode1 = mode;
        switch (mode) {
            case AR_LABELING_THRESH_MODE_AUTO_MEDIAN:
            case AR_LABELING_THRESH_MODE_AUTO_OTSU:
#if !AR_DISABLE_THRESH_MODE_AUTO_ADAPTIVE
            case AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE:
#endif
                handle->arImageProcInfo = arImageProcInit(handle->xsize, handle->ysize);
                break;
            case AR_LABELING_THRESH_MODE_AUTO_BRACKETING:
                handle->arLabelingThreshAutoBracketOver = handle->arLabelingThreshAutoBracketUnder = 1;
                break;
            case AR_LABELING_THRESH_MODE_MANUAL:
                break; // Do nothing.
            default:
                ARLOGe("Unknown or unsupported labeling threshold mode requested. Set to manual.\n");
                mode1 = AR_LABELING_THRESH_MODE_MANUAL;
        }
        handle->arLabelingThreshMode = mode1;
        if (handle->arDebug == AR_DEBUG_ENABLE) {
            const char *modeDescs[] = {
                "MANUAL",
                "AUTO_MEDIAN",
                "AUTO_OTSU",
                "AUTO_ADAPTIVE",
                "AUTO_BRACKETING"
            };
            ARLOGe("Labeling threshold mode set to %s.\n", modeDescs[mode1]);
        }
    }
    return (0);
}
Example #23
0
// Report state of ARToolKit tracker.
static void debugReportMode(ARHandle *arhandle)
{
	int mode;
	
	arGetImageProcMode(arhandle, &mode);
	if (mode == AR_IMAGE_PROC_FRAME_IMAGE) {
		ARLOGe("ProcMode (X)   : FRAME IMAGE\n");
	} else if (mode == AR_IMAGE_PROC_FIELD_IMAGE) {
		ARLOGe("ProcMode (X)   : FIELD IMAGE\n");
	}
	
	arGetPatternDetectionMode(arhandle, &mode);
	if (mode == AR_TEMPLATE_MATCHING_COLOR) {
		ARLOGe("TemplateMatchingMode (M)   : Color Template\n");
	} else if (mode == AR_TEMPLATE_MATCHING_MONO) {
		ARLOGe("TemplateMatchingMode (M)   : Mono Template\n");
	} else if (mode == AR_MATRIX_CODE_DETECTION) {
		ARLOGe("TemplateMatchingMode (M)   : Matrix code detection\n");
	}
}
Example #24
0
static void saveParam(ARParam *param)
{
    char   *name = NULL, *cwd = NULL;
    size_t len;
    int    nameOK;

    arMalloc(name, char, MAXPATHLEN);
    arMalloc(cwd, char, MAXPATHLEN);
    if (!getcwd(cwd, MAXPATHLEN))
        ARLOGe("Unable to read current working directory.\n");

    nameOK = 0;
    ARLOG("Filename[%s]: ", SAVE_FILENAME);
    if (fgets(name, MAXPATHLEN, stdin) != NULL)
    {
        // Trim whitespace from end of name.
        len = strlen(name);

        while (len > 0 && (name[len - 1] == '\r' || name[len - 1] == '\n' || name[len - 1] == '\t' || name[len - 1] == ' '))
        {
            len--;
            name[len] = '\0';
        }

        if (len > 0)
        {
            nameOK = 1;
            if (arParamSave(name, 1, param) < 0)
            {
                ARLOG("Parameter write error!!\n");
            }
            else
            {
                ARLOG("Saved parameter file '%s/%s'.\n", cwd, name);
            }
        }
    }

    // Try and save with a default name.
    if (!nameOK)
    {
        if (arParamSave(SAVE_FILENAME, 1, param) < 0)
        {
            ARLOG("Parameter write error!!\n");
        }
        else
        {
            ARLOG("Saved parameter file '%s/%s'.\n", cwd, SAVE_FILENAME);
        }
    }

    free(name);
    free(cwd);
}
Example #25
0
// Modifies globals: kpmHandle, ar2Handle.
static int initNFT(ARParamLT *cparamLT, AR_PIXEL_FORMAT pixFormat)
{
    ARLOGd("Initialising NFT.\n");
    //
    // NFT init.
    //
    
    // KPM init.
    kpmHandle = kpmCreateHandle(cparamLT, pixFormat);
    if (!kpmHandle) {
        ARLOGe("Error: kpmCreateHandle.\n");
        return (FALSE);
    }
    //kpmSetProcMode( kpmHandle, KpmProcHalfSize );
    
    // AR2 init.
    if( (ar2Handle = ar2CreateHandle(cparamLT, pixFormat, AR2_TRACKING_DEFAULT_THREAD_NUM)) == NULL ) {
        ARLOGe("Error: ar2CreateHandle.\n");
        kpmDeleteHandle(&kpmHandle);
        return (FALSE);
    }
    if (threadGetCPU() <= 1) {
        ARLOGi("Using NFT tracking settings for a single CPU.\n");
        ar2SetTrackingThresh(ar2Handle, 5.0);
        ar2SetSimThresh(ar2Handle, 0.50);
        ar2SetSearchFeatureNum(ar2Handle, 16);
        ar2SetSearchSize(ar2Handle, 6);
        ar2SetTemplateSize1(ar2Handle, 6);
        ar2SetTemplateSize2(ar2Handle, 6);
    } else {
        ARLOGi("Using NFT tracking settings for more than one CPU.\n");
        ar2SetTrackingThresh(ar2Handle, 5.0);
        ar2SetSimThresh(ar2Handle, 0.50);
        ar2SetSearchFeatureNum(ar2Handle, 16);
        ar2SetSearchSize(ar2Handle, 12);
        ar2SetTemplateSize1(ar2Handle, 6);
        ar2SetTemplateSize2(ar2Handle, 6);
    }
    // NFT dataset loading will happen later.
    return (TRUE);
}
Example #26
0
int ar2VideoCloseAndroid( AR2VideoParamAndroidT *vid )
{
    if (!vid) return (-1); // Sanity check.
    
    if (cparamSearchFinal() < 0) {
        ARLOGe("Unable to finalise cparamSearch.\n");
    }
    
    free( vid );
    
    return 0;
} 
int ar2VideoSetAutoOn1394(AR2VideoParam1394T *vid, int paramName, int value)
{
    dc1394feature_t      feature;
    dc1394feature_mode_t mode;
    int                  v;

    switch (paramName)
    {
    case AR_VIDEO_1394_BRIGHTNESS:
        feature = DC1394_FEATURE_BRIGHTNESS;
        break;

    case AR_VIDEO_1394_EXPOSURE:
        feature = DC1394_FEATURE_EXPOSURE;
        break;

    case AR_VIDEO_1394_WHITE_BALANCE:
        feature = DC1394_FEATURE_WHITE_BALANCE;
        break;

    case AR_VIDEO_1394_SHUTTER_SPEED:
        feature = DC1394_FEATURE_SHUTTER;
        break;

    case AR_VIDEO_1394_GAIN:
        feature = DC1394_FEATURE_GAIN;
        break;

    case AR_VIDEO_1394_FOCUS:
        feature = DC1394_FEATURE_FOCUS;
        break;

    case AR_VIDEO_1394_GAMMA:
        feature = DC1394_FEATURE_GAMMA;
        break;

    default:
        return -1;
    }

    if (value)
        mode = DC1394_FEATURE_MODE_AUTO;
    else
        mode = DC1394_FEATURE_MODE_MANUAL;

    if (dc1394_feature_set_mode(vid->camera, feature, mode) != DC1394_SUCCESS)
    {
        ARLOGe("unable to set mode.\n");
        return -1;
    }

    return 0;
}
Example #28
0
static void stopWMC(AR2VideoParamWinMCT *vid)
{
    ARLOGd("ARWrap::ARvideo::stopWMC(): called");
	if (!vid || !vid->wmc) return;

    ARLOGd("ARWrap::ARvideo::stopWMC(): calling vid->wmc->Capturing()");
	if (!vid->wmc->Capturing()) {
		ARLOGe("ARWrap::ARvideo::stopWMC(): Windows.Media.Capture already stopped, exiting");
		return;
	}

	vid->wmc->StopCapture();
    ARLOGd("ARWrap::ARvideo::stopWMC(): exiting");
}
Example #29
0
int icpGetJ_U_S( ARdouble J_U_S[2][6], ARdouble matXc2U[3][4], ARdouble matXw2Xc[3][4], ICP3DCoordT *worldCoord )
{
    ARdouble        J_Xc_S[3][6];
    ARdouble        J_U_Xc[2][3];
    ICP3DCoordT   Xc;
    int           i, j, k;

    if( icpGetJ_Xc_S( J_Xc_S, &Xc, matXw2Xc, worldCoord ) < 0 ) {
        ARLOGe("Error: icpGetJ_Xc_S\n");
        return -1;
    }
#if ICP_DEBUG
    icpDispMat( "J_Xc_S", (ARdouble *)J_Xc_S, 3, 6 );
#endif

    if( icpGetJ_U_Xc( J_U_Xc, matXc2U, &Xc ) < 0 ) {
        ARLOGe("Error: icpGetJ_U_Xc");
        return -1;
    }
#if ICP_DEBUG
    icpDispMat( "J_U_Xc", (ARdouble *)J_U_Xc, 2, 3 );
#endif

    for( j = 0; j < 2; j++ ) {
        for( i = 0; i < 6; i++ ) {
            J_U_S[j][i] = 0.0;
            for( k = 0; k < 3; k++ ) {
                J_U_S[j][i] += J_U_Xc[j][k] * J_Xc_S[k][i];
            }
        }
    }
#if ICP_DEBUG
    icpDispMat( "J_U_S", (ARdouble *)J_U_S, 2, 6 );
#endif

    return 0;
}
int ar2VideoGetMinValue1394(AR2VideoParam1394T *vid, int paramName, int *value)
{
    dc1394feature_t feature;
    uint32_t        min, max;

    switch (paramName)
    {
    case AR_VIDEO_1394_BRIGHTNESS:
        feature = DC1394_FEATURE_BRIGHTNESS;
        break;

    case AR_VIDEO_1394_EXPOSURE:
        feature = DC1394_FEATURE_EXPOSURE;
        break;

    case AR_VIDEO_1394_WHITE_BALANCE:
        feature = DC1394_FEATURE_WHITE_BALANCE;
        break;

    case AR_VIDEO_1394_SHUTTER_SPEED:
        feature = DC1394_FEATURE_SHUTTER;
        break;

    case AR_VIDEO_1394_GAIN:
        feature = DC1394_FEATURE_GAIN;
        break;

    case AR_VIDEO_1394_FOCUS:
        feature = DC1394_FEATURE_FOCUS;
        break;

    case AR_VIDEO_1394_GAMMA:
        feature = DC1394_FEATURE_GAMMA;
        break;

    default:
        return -1;
    }

    if (dc1394_feature_get_boundaries(vid->camera, feature, &min, &max) != DC1394_SUCCESS)
    {
        ARLOGe("unable to get max value.\n");
        return -1;
    }

    *value = min;

    return 0;
}