// // This function is called when the window needs redrawing. // static void Display(void) { ARdouble p[16]; ARdouble m[16]; // Select correct buffer for this context. glDrawBuffer(GL_BACK); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. arglPixelBufferDataUpload(gArglSettings, gARTImage); arglDispImage(gArglSettings); gARTImage = NULL; // Invalidate image data. // Projection transformation. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p); glMatrixMode(GL_PROJECTION); #ifdef ARDOUBLE_IS_FLOAT glLoadMatrixf(p); #else glLoadMatrixd(p); #endif glMatrixMode(GL_MODELVIEW); // Viewing transformation. glLoadIdentity(); // Lighting and geometry that moves with the camera should go here. // (I.e. must be specified before viewing transformations.) //none if (gPatt_found) { // Calculate the camera position relative to the marker. // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters). arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR); #ifdef ARDOUBLE_IS_FLOAT glLoadMatrixf(m); #else glLoadMatrixd(m); #endif // Before drawing any geometry, mask out the area occupied by the cube marker itself. // This makes for a nicer visual presentation, but obviously only applies for this particular // shape of marker. DrawCubeMarkerMask(); // All lighting and geometry to be drawn relative to the marker goes here. DrawCube(); } // gPatt_found // Any 2D overlays go here. //none glutSwapBuffers(); }
bool ofxArtool5::setupCamera(string pthCamParam, ofVec2f _camSize, ofVec2f _viewportSize, ofPixelFormat pf){ ARParam cParam; AR_PIXEL_FORMAT pixFormat = toAR(pf); const char * cPathCamParam = ofToDataPath(pthCamParam).c_str(); camSize=_camSize; viewportSize=_viewportSize; if(arParamLoad(cPathCamParam, 1, &cParam)){ ofLogError("ofxArtool5::setupCamera()", "error loading param file"); return false; } if(cParam.xsize!=camSize.x||cParam.ysize!=camSize.y){ ofLogWarning("ofxArtool5::setupCamera()","camera param needs resizing"); arParamChangeSize(&cParam, camSize.x, camSize.y, &cParam); } if((gCparamLT = arParamLTCreate(&cParam, AR_PARAM_LT_DEFAULT_OFFSET))==NULL){ ofLogError("ofxArtool5::setupCamera()","Error: arParamLTCreate"); return false; } if(artMode==ART_PATTERN){ if((gARHandle = arCreateHandle(gCparamLT))==NULL){ ofLogError("ofxArtool5::setupCamera()","Error: arCreateHandle"); return false; } if(arSetPixelFormat(gARHandle, pixFormat)<0){ ofLogError("ofxArtool5::setupCamera()","Error arSetPixelFormat"); return false; } if(arSetDebugMode(gARHandle, AR_DEBUG_DISABLE)<0){ ofLogError("ofxArtool5::setupCamera()","Error arSetDebugMode"); return false; } if((gAR3DHandle = ar3DCreateHandle(&cParam))==NULL){ ofLogError("ofxArtool5::setupCamera()","Error ar3DCreateHandle"); return false; } }else if(artMode==ART_NFT){ arglCameraFrustumRH(&(gCparamLT->param), getMinDistance(), getMaxDistance(), cameraLens); } cvColorFrame.allocate(camSize.x, camSize.y); cvGrayFrame.allocate(camSize.x, camSize.y); return true; }
void ARTApp::display(ARUint8 *arImage, ARParamLT *arParam, ARGL_CONTEXT_SETTINGS_REF arSettings, GLMmodel *objModel, const ARdouble pattTrans[3][4]) { ARdouble p[16]; ARdouble m[16]; // Select correct buffer for this context. glDrawBuffer(GL_BACK); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. arglDispImage(arImage, &(arParam->param), 1.0, arSettings); // zoom = 1.0. // Projection transformation. arglCameraFrustumRH(&(arParam->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p); glMatrixMode(GL_PROJECTION); glLoadMatrixd(p); glMatrixMode(GL_MODELVIEW); // Viewing transformation. glLoadIdentity(); // Lighting and geometry that moves with the camera should go here. // (I.e. must be specified before viewing transformations.) //none glEnable(GL_LIGHTING); glEnable(GL_LIGHT0); glEnable(GL_DEPTH_TEST); if (objModel) { // Calculate the camera position relative to the marker. // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters). arglCameraViewRH(pattTrans, m, VIEW_SCALEFACTOR); glLoadMatrixd(m); // All lighting and geometry to be drawn relative to the marker goes here. glPushMatrix(); // Save world coordinate system. glTranslatef(0.0f, 0.0f, pattWidth / 2.0); // Place base of object on marker surface. glmDraw(objModel, GLM_SMOOTH | GLM_MATERIAL); glPopMatrix(); // Restore world coordinate system. } // Any 2D overlays go here. glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glDisable(GL_LIGHTING); glDisable(GL_DEPTH_TEST); glutSwapBuffers(); }
// // This function is called when the window needs redrawing. // static void Display(void) { int i; GLdouble p[16]; GLdouble m[16]; // Select correct buffer for this context. glDrawBuffer(GL_BACK); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings); // zoom = 1.0. arVideoCapNext(); gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext(). if (gPatt_found) { // Projection transformation. arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p); glMatrixMode(GL_PROJECTION); glLoadMatrixd(p); glMatrixMode(GL_MODELVIEW); // Viewing transformation. glLoadIdentity(); // Lighting and geometry that moves with the camera should go here. // (I.e. must be specified before viewing transformations.) //none // All other lighting and geometry goes here. // Calculate the camera position for each object and draw it. for (i = 0; i < gObjectDataCount; i++) { if ((gObjectData[i].visible != 0) && (gObjectData[i].vrml_id >= 0)) { //fprintf(stderr, "About to draw object %i\n", i); arglCameraViewRH(gObjectData[i].trans, m, VIEW_SCALEFACTOR_4); glLoadMatrixd(m); arVrmlDraw(gObjectData[i].vrml_id); } } } // gPatt_found // Any 2D overlays go here. //none glutSwapBuffers(); }
// // This function is called when the window needs redrawing. // static void Display(void) { GLdouble p[16]; GLdouble m[16]; // Select correct buffer for this context. glDrawBuffer(GL_BACK); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings); // zoom = 1.0. arVideoCapNext(); gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext(). for(std::list<Piece*>::iterator it = pieces.begin(); it != pieces.end(); it++) { if ((*it)->patt_found) // gPatt_found { // Projection transformation. arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p); glMatrixMode(GL_PROJECTION); glLoadMatrixd(p); glMatrixMode(GL_MODELVIEW); // Viewing transformation. glLoadIdentity(); // Lighting and geometry that moves with the camera should go here. // (I.e. must be specified before viewing transformations.) //none // ARToolKit supplied distance in millimetres, but I want OpenGL to work in my units. arglCameraViewRH((*it)->patt_trans, m, VIEW_SCALEFACTOR); glLoadMatrixd(m); // All other lighting and geometry goes here. (*it)->Draw(); } // Any 2D overlays go here. //none } glutSwapBuffers(); }
// // This function is called when the window needs redrawing. // static void Display(void) { GLdouble p[16]; GLdouble m[16]; // Select correct buffer for this context. glDrawBuffer(GL_BACK); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. // arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings); // zoom = 1.0. arVideoCapNext(); gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext(). // Projection transformation. arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p); glMatrixMode(GL_PROJECTION); glLoadMatrixd(p); glMatrixMode(GL_MODELVIEW); // Viewing transformation. glLoadIdentity(); // Lighting and geometry that moves with the camera should go here. // (I.e. must be specified before viewing transformations.) //none if (gPatt_found) { // Calculate the camera position relative to the marker. // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters). arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR); glLoadMatrixd(m); // All lighting and geometry to be drawn relative to the marker goes here. DrawCube(); } // gPatt_found // Any 2D overlays go here. //none glutSwapBuffers(); }
int CWebCam::SetupWebCam(const char *cparam_names, char *vconfs) { int xsize, ysize; ARParam wparam; if((ARTVideo = ar2VideoOpen(vconfs)) == 0) return(0); if(ar2VideoInqSize(ARTVideo, &xsize, &ysize) < 0) return(0); if(arParamLoad(cparam_names, 1, &wparam) < 0) return(0); arParamChangeSize(&wparam, xsize, ysize, &ARTCparam); arInitCparam(&ARTCparam); arParamDisp(&ARTCparam); ARTThreshhold = 100; arglCameraFrustumRH(&ARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, projectionMat); if(ar2VideoCapStart(ARTVideo) != 0) return(0); ar2VideoCapNext(ARTVideo); return(1); }
static int renderScene(AppState* state) { if (gPatt_found) { GLdouble projectionMatrix[16]; GLdouble modelViewMatrix[16]; // Projection transformation. arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, projectionMatrix); glMatrixMode(GL_PROJECTION); glLoadMatrixd(projectionMatrix); glMatrixMode(GL_MODELVIEW); // Calculate the camera position relative to the marker. // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters). arglCameraViewRH(gPatt_trans, modelViewMatrix, VIEW_SCALEFACTOR); glLoadMatrixd(modelViewMatrix); // All lighting and geometry to be drawn relative to the marker goes here. // DrawCube(); } // gPatt_found { GLuint gle = glGetError(); if (gle) { fprintf(stderr,"GL error 0x%x\n"); } } }
//============================ // プロジェクション行列の取得 //============================ void cARTK::getProjectionMat( double dMat[16] ) { // near far arglCameraFrustumRH( &m_sCameraParam, 0.1, 400.0, dMat ); }
int main(int argc, char** argv) { char glutGamemode[32] = ""; char *vconf = NULL; char cparaDefault[] = "Data2/camera_para.dat"; char *cpara = NULL; int i; int gotTwoPartOption; const char markerConfigDataFilename[] = "Data2/markers.dat"; const char objectDataFilename[] = "Data2/objects.dat"; #ifdef DEBUG arLogLevel = AR_LOG_LEVEL_DEBUG; #endif // // Process command-line options. // glutInit(&argc, argv); i = 1; // argv[0] is name of app, so start at 1. while (i < argc) { gotTwoPartOption = FALSE; // Look for two-part options first. if ((i + 1) < argc) { if (strcmp(argv[i], "--vconf") == 0) { i++; vconf = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--cpara") == 0) { i++; cpara = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--width") == 0) { i++; // Get width from second field. if (sscanf(argv[i], "%d", &prefWidth) != 1) { ARLOGe("Error: --width option must be followed by desired width.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--height") == 0) { i++; // Get height from second field. if (sscanf(argv[i], "%d", &prefHeight) != 1) { ARLOGe("Error: --height option must be followed by desired height.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--refresh") == 0) { i++; // Get refresh rate from second field. if (sscanf(argv[i], "%d", &prefRefresh) != 1) { ARLOGe("Error: --refresh option must be followed by desired refresh rate.\n"); } gotTwoPartOption = TRUE; } } if (!gotTwoPartOption) { // Look for single-part options. if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0 || strcmp(argv[i], "-h") == 0) { usage(argv[0]); } else if (strncmp(argv[i], "-cpara=", 7) == 0) { cpara = &(argv[i][7]); } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if (strcmp(argv[i],"--windowed") == 0) { prefWindowed = TRUE; } else if (strcmp(argv[i],"--fullscreen") == 0) { prefWindowed = FALSE; } else { ARLOGe("Error: invalid command line argument '%s'.\n", argv[i]); usage(argv[0]); } } i++; } // // Video setup. // if (!setupCamera((cpara ? cpara : cparaDefault), vconf, &gCparamLT)) { ARLOGe("main(): Unable to set up AR camera.\n"); exit(-1); } // // AR init. // if (!initNFT(gCparamLT, arVideoGetPixelFormat())) { ARLOGe("main(): Unable to init NFT.\n"); exit(-1); } // // Markers setup. // // Load marker(s). newMarkers(markerConfigDataFilename, &markersNFT, &markersNFTCount); if (!markersNFTCount) { ARLOGe("Error loading markers from config. file '%s'.\n", markerConfigDataFilename); cleanup(); exit(-1); } ARLOGi("Marker count = %d\n", markersNFTCount); // Marker data has been loaded, so now load NFT data. if (!loadNFTData()) { ARLOGe("Error loading NFT data.\n"); cleanup(); exit(-1); } // // Graphics setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); if (prefWindowed) { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); else glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize); glutCreateWindow(argv[0]); } else { if (glutGameModeGet(GLUT_GAME_MODE_POSSIBLE)) { if (prefWidth && prefHeight) { if (prefDepth) { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i", prefWidth, prefHeight, prefDepth); } else { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i@%i", prefWidth, prefHeight, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } } else { prefWidth = glutGameModeGet(GLUT_GAME_MODE_WIDTH); prefHeight = glutGameModeGet(GLUT_GAME_MODE_HEIGHT); snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } glutGameModeString(glutGamemode); glutEnterGameMode(); } else { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); glutCreateWindow(argv[0]); glutFullScreen(); } } // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens); cameraPoseValid = FALSE; // Setup ARgsub_lite library for current OpenGL context. if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) { ARLOGe("main(): arglSetupForCurrentContext() returned error.\n"); cleanup(); exit(-1); } // Load objects (i.e. OSG models). VirtualEnvironmentInit(objectDataFilename); VirtualEnvironmentHandleARViewUpdatedCameraLens(cameraLens); // // Setup complete. Start tracking. // // Start the video. if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } arUtilTimerReset(); // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMainLoop(); return (0); }
int main(int argc, char** argv) { char glutGamemode[32]; const char *cparam_name = "Data2/camera_para.dat"; char vconf[] = ""; const char markerConfigDataFilename[] = "Data2/markers.dat"; #ifdef DEBUG arLogLevel = AR_LOG_LEVEL_DEBUG; #endif // // Library inits. // glutInit(&argc, argv); // // Video setup. // #ifdef _WIN32 CoInitialize(NULL); #endif if (!setupCamera(cparam_name, vconf, &gCparamLT)) { ARLOGe("main(): Unable to set up AR camera.\n"); exit(-1); } // // AR init. // // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens); if (!initNFT(gCparamLT, arVideoGetPixelFormat())) { ARLOGe("main(): Unable to init NFT.\n"); exit(-1); } // // Graphics setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); if (!prefWindowed) { if (prefRefresh) sprintf(glutGamemode, "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh); else sprintf(glutGamemode, "%ix%i:%i", prefWidth, prefHeight, prefDepth); glutGameModeString(glutGamemode); glutEnterGameMode(); } else { glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize); glutCreateWindow(argv[0]); } // Setup ARgsub_lite library for current OpenGL context. if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) { ARLOGe("main(): arglSetupForCurrentContext() returned error.\n"); cleanup(); exit(-1); } arUtilTimerReset(); // // Markers setup. // // Load marker(s). newMarkers(markerConfigDataFilename, &markersNFT, &markersNFTCount); if (!markersNFTCount) { ARLOGe("Error loading markers from config. file '%s'.\n", markerConfigDataFilename); cleanup(); exit(-1); } ARLOGi("Marker count = %d\n", markersNFTCount); // Marker data has been loaded, so now load NFT data. if (!loadNFTData()) { ARLOGe("Error loading NFT data.\n"); cleanup(); exit(-1); } // Start the video. if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMainLoop(); return (0); }
// // This function is called when the window needs redrawing. // static void Display(void) { ARdouble p[16]; ARdouble m[16]; #ifdef ARDOUBLE_IS_FLOAT GLdouble p0[16]; GLdouble m0[16]; #endif int i, j, k; GLfloat w, bw, bh, vertices[6][2]; GLubyte pixels[300]; char text[256]; GLdouble winX, winY, winZ; int showMErr[CHECK_ID_MULTIMARKERS_MAX]; GLdouble MX[CHECK_ID_MULTIMARKERS_MAX]; GLdouble MY[CHECK_ID_MULTIMARKERS_MAX]; int pattDetectMode; AR_MATRIX_CODE_TYPE matrixCodeType; // Select correct buffer for this context. glDrawBuffer(GL_BACK); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. arglPixelBufferDataUpload(gArglSettings, gARTImage); arglDispImage(gArglSettings); if (gMultiConfigCount) { arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p); glMatrixMode(GL_PROJECTION); #ifdef ARDOUBLE_IS_FLOAT glLoadMatrixf(p); #else glLoadMatrixd(p); #endif glMatrixMode(GL_MODELVIEW); glEnable(GL_DEPTH_TEST); // If we have multi-configs, show their origin onscreen. for (k = 0; k < gMultiConfigCount; k++) { showMErr[k] = FALSE; if (gMultiConfigs[k]->prevF != 0) { arglCameraViewRH((const ARdouble (*)[4])gMultiConfigs[k]->trans, m, 1.0); #ifdef ARDOUBLE_IS_FLOAT glLoadMatrixf(m); #else glLoadMatrixd(m); #endif drawAxes(); #ifdef ARDOUBLE_IS_FLOAT for (i = 0; i < 16; i++) m0[i] = (GLdouble)m[i]; for (i = 0; i < 16; i++) p0[i] = (GLdouble)p[i]; if (gluProject(0, 0, 0, m0, p0, gViewport, &winX, &winY, &winZ) == GL_TRUE) #else if (gluProject(0, 0, 0, m, p, gViewport, &winX, &winY, &winZ) == GL_TRUE) #endif { showMErr[k] = TRUE; MX[k] = winX; MY[k] = winY; } } } // for k } // Any 2D overlays go here. glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0); glMatrixMode(GL_MODELVIEW); glDisable(GL_LIGHTING); glDisable(GL_DEPTH_TEST); arGetPatternDetectionMode(gARHandle, &pattDetectMode); arGetMatrixCodeType(gARHandle, &matrixCodeType); // For all markers, draw onscreen position. // Colour based on cutoffPhase. glLoadIdentity(); glVertexPointer(2, GL_FLOAT, 0, vertices); glEnableClientState(GL_VERTEX_ARRAY); glLineWidth(2.0f); for (j = 0; j < gARHandle->marker_num; j++) { glColor3ubv(cutoffPhaseColours[gARHandle->markerInfo[j].cutoffPhase].colour); for (i = 0; i < 5; i++) { int dir = gARHandle->markerInfo[j].dir; vertices[i][0] = (float)gARHandle->markerInfo[j].vertex[(i + 4 - dir) % 4][0] * (float)windowWidth / (float)gARHandle->xsize; vertices[i][1] = ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].vertex[(i + 4 - dir) % 4][1]) * (float)windowHeight / (float)gARHandle->ysize; } vertices[i][0] = (float)gARHandle->markerInfo[j].pos[0] * (float)windowWidth / (float)gARHandle->xsize; vertices[i][1] = ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].pos[1]) * (float)windowHeight / (float)gARHandle->ysize; glDrawArrays(GL_LINE_STRIP, 0, 6); // For markers that have been identified, draw the ID number. if (gARHandle->markerInfo[j].id >= 0) { glColor3ub(255, 0, 0); if (matrixCodeType == AR_MATRIX_CODE_GLOBAL_ID && (pattDetectMode == AR_MATRIX_CODE_DETECTION || pattDetectMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX || pattDetectMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX)) snprintf(text, sizeof(text), "%llu (err=%d)", gARHandle->markerInfo[j].globalID, gARHandle->markerInfo[j].errorCorrected); else snprintf(text, sizeof(text), "%d", gARHandle->markerInfo[j].id); print(text, (float)gARHandle->markerInfo[j].pos[0] * (float)windowWidth / (float)gARHandle->xsize, ((float)gARHandle->ysize - (float)gARHandle->markerInfo[j].pos[1]) * (float)windowHeight / (float)gARHandle->ysize, 0, 0); } } glDisableClientState(GL_VERTEX_ARRAY); // For matrix mode, draw the pattern image of the largest marker. if (pattDetectMode == AR_MATRIX_CODE_DETECTION || pattDetectMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX || pattDetectMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX) { int area = 0, biggestMarker = -1; for (j = 0; j < gARHandle->marker_num; j++) if (gARHandle->markerInfo[j].area > area) { area = gARHandle->markerInfo[j].area; biggestMarker = j; } if (area >= AR_AREA_MIN) { int imageProcMode; ARdouble pattRatio; ARUint8 ext_patt[AR_PATT_SIZE2_MAX * AR_PATT_SIZE2_MAX * 3]; // Holds unwarped pattern extracted from image. int size; int zoom = 4; ARdouble vertexUpright[4][2]; // Reorder vertices based on dir. for (i = 0; i < 4; i++) { int dir = gARHandle->markerInfo[biggestMarker].dir; vertexUpright[i][0] = gARHandle->markerInfo[biggestMarker].vertex[(i + 4 - dir) % 4][0]; vertexUpright[i][1] = gARHandle->markerInfo[biggestMarker].vertex[(i + 4 - dir) % 4][1]; } arGetImageProcMode(gARHandle, &imageProcMode); arGetPattRatio(gARHandle, &pattRatio); if (matrixCodeType == AR_MATRIX_CODE_GLOBAL_ID) { size = 14; arPattGetImage2(imageProcMode, AR_MATRIX_CODE_DETECTION, size, size * AR_PATT_SAMPLE_FACTOR2, gARTImage, gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, &gCparamLT->paramLTf, vertexUpright, (ARdouble)14 / (ARdouble)(14 + 2), ext_patt); } else { size = matrixCodeType & AR_MATRIX_CODE_TYPE_SIZE_MASK; arPattGetImage2(imageProcMode, AR_MATRIX_CODE_DETECTION, size, size * AR_PATT_SAMPLE_FACTOR2, gARTImage, gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, &gCparamLT->paramLTf, vertexUpright, pattRatio, ext_patt); } glRasterPos2f((float)(windowWidth - size * zoom) - 4.0f, (float)(size * zoom) + 4.0f); glPixelZoom((float)zoom, (float)-zoom); glPixelStorei(GL_UNPACK_ALIGNMENT, 1); glDrawPixels(size, size, GL_LUMINANCE, GL_UNSIGNED_BYTE, ext_patt); glPixelZoom(1.0f, 1.0f); } } // Draw error value for multimarker pose. for (k = 0; k < gMultiConfigCount; k++) { if (showMErr[k]) { snprintf(text, sizeof(text), "err=%0.3f", gMultiErrs[k]); print(text, MX[k], MY[k], 0, 0); } } // // Draw help text and mode. // glLoadIdentity(); if (gShowMode) { printMode(); } if (gShowHelp) { if (gShowHelp == 1) { printHelpKeys(); } else if (gShowHelp == 2) { bw = 0.0f; for (i = 0; i < AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT; i++) { w = (float)glutBitmapLength(GLUT_BITMAP_HELVETICA_10, (unsigned char*)arMarkerInfoCutoffPhaseDescriptions[cutoffPhaseColours[i].cutoffPhase]); if (w > bw) bw = w; } bw += 12.0f; // Space for color block. bh = AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT * 10.0f /* character height */ + (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1) * 2.0f /* line spacing */; drawBackground(bw, bh, 2.0f, 2.0f); // Draw the colour block and text, line by line. for (i = 0; i < AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT; i++) { for (j = 0; j < 300; j += 3) { pixels[j] = cutoffPhaseColours[i].colour[0]; pixels[j + 1] = cutoffPhaseColours[i].colour[1]; pixels[j + 2] = cutoffPhaseColours[i].colour[2]; } glRasterPos2f(2.0f, (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1 - i) * 12.0f + 2.0f); glPixelZoom(1.0f, 1.0f); glPixelStorei(GL_UNPACK_ALIGNMENT, 1); glDrawPixels(10, 10, GL_RGB, GL_UNSIGNED_BYTE, pixels); print(arMarkerInfoCutoffPhaseDescriptions[cutoffPhaseColours[i].cutoffPhase], 14.0f, (AR_MARKER_INFO_CUTOFF_PHASE_DESCRIPTION_COUNT - 1 - i) * 12.0f + 2.0f, 0, 0); } } } glutSwapBuffers(); }
// // This function is called when the window needs redrawing. // static void Display(void) { int i; GLdouble p[16]; GLdouble m[16]; // Select correct buffer for this context. glDrawBuffer(GL_BACK); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. arglDispImage(gARTImage, &(gCparamLT->param), 1.0, gArglSettings); // zoom = 1.0. gARTImage = NULL; // Invalidate image data. // Projection transformation. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p); glMatrixMode(GL_PROJECTION); glLoadMatrixd(p); glMatrixMode(GL_MODELVIEW); glEnable(GL_DEPTH_TEST); // Viewing transformation. glLoadIdentity(); // Lighting and geometry that moves with the camera should go here. // (I.e. must be specified before viewing transformations.) //none for (i = 0; i < gObjectDataCount; i++) { if ((gObjectData[i].visible != 0) && (gObjectData[i].vrml_id >= 0)) { // Calculate the camera position for the object and draw it. // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters). arglCameraViewRH(gObjectData[i].trans, m, VIEW_SCALEFACTOR); glLoadMatrixd(m); // All lighting and geometry to be drawn relative to the marker goes here. //ARLOGe("About to draw object %i\n", i); arVrmlDraw(gObjectData[i].vrml_id); } } // Any 2D overlays go here. glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glDisable(GL_LIGHTING); glDisable(GL_DEPTH_TEST); // // Draw help text and mode. // if (gShowMode) { printMode(); } if (gShowHelp) { if (gShowHelp == 1) { printHelpKeys(); } } glutSwapBuffers(); }
// // This function is called when the window needs redrawing. // static void Display(void) { GLdouble p[16]; GLdouble m[16]; // Select correct buffer for this context. glDrawBuffer(GL_BACK); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. arglDispImage(gARTImage, &gARTCparam, 1.0, gArglSettings); // zoom = 1.0. arVideoCapNext(); gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext(). // Projection transformation. arglCameraFrustumRH(&gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p); glMatrixMode(GL_PROJECTION); glLoadMatrixd(p); glMatrixMode(GL_MODELVIEW); // Viewing transformation. glLoadIdentity(); // Lighting and geometry that moves with the camera should go here. // (I.e. must be specified before viewing transformations.) //none //Read leap dump int koo = read_leap_dump(); float dx = px1 - px0; float dy = py1 - py0; float dz = pz1 - pz0; printf("dx %f\n", dx); px0 = px1; py0 = py1; pz0 = pz1; float stepX = 100.0f / glutGet(GLUT_WINDOW_X); float stepY = 100.0f/ glutGet(GLUT_WINDOW_Y); PX = stepX*dx + PX; PY = stepY*dy + PY; PZ = stepX*dz + PZ; if (snapToMarker && gPatt_found) { // Calculate the camera position relative to the marker. // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters). arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR); glLoadMatrixd(m); // All lighting and geometry to be drawn relative to the marker goes here. DrawItem(); } // gPatt_found else { arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR); glLoadMatrixd(m); // All lighting and geometry to be drawn relative to the marker goes here. DrawItem(); } // Any 2D overlays go here. //none glutSwapBuffers(); }
void BazARTracker::setProjection(const double n, const double f) { //GSUB_LITE - NEW ART STYLE arglCameraFrustumRH(&(m_cparam->cparam), n, f, m_projectionMatrix); }
// // This function is called when the window needs redrawing. // static void Display(void) { ARdouble p[16]; ARdouble m[16]; double zoom; // Select correct buffer for this context. glDrawBuffer(GL_BACK); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. arglPixelBufferDataUpload(gArglSettings, gARTImage); arglDispImage(gArglSettings); gARTImage = NULL; // Invalidate image data. // Projection transformation. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p); glMatrixMode(GL_PROJECTION); #ifdef ARDOUBLE_IS_FLOAT glLoadMatrixf(p); #else glLoadMatrixd(p); #endif glMatrixMode(GL_MODELVIEW); glEnable(GL_DEPTH_TEST); // Viewing transformation. glLoadIdentity(); // Lighting and geometry that moves with the camera should go here. // (I.e. must be specified before viewing transformations.) //none if (gPatt_found) { // Calculate the camera position relative to the marker. // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters). arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR); #ifdef ARDOUBLE_IS_FLOAT glLoadMatrixf(m); #else glLoadMatrixd(m); #endif // All lighting and geometry to be drawn relative to the marker goes here. // Draw the movie frame. if (gMovieImage) { glPushMatrix(); glRotatef(90.0f, 1.0f, 0.0f, 0.0f); // Place movie in x-z plane instead of x-y plane. glTranslated(-gPatt_width*0.5, 0.0f, 0.0f); // Movie origin is at lower-left of movie frame. Place this at the edge of the marker . zoom = 1.0/gMovieCparam.xsize * gPatt_width; // Scale the movie frame so that it is the same width as the marker. arglPixelBufferDataUpload(gMovieArglSettings, gMovieImage); arglDispImageStateful(gMovieArglSettings); // Show the movie frame. glPopMatrix(); } } // gPatt_found // Any 2D overlays go here. glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glDisable(GL_LIGHTING); glDisable(GL_DEPTH_TEST); // // Draw help text and mode. // if (gShowMode) { printMode(); } if (gShowHelp) { if (gShowHelp == 1) { printHelpKeys(); } } glutSwapBuffers(); }
int main(int argc, char** argv) { char glutGamemode[32] = ""; char *vconf = NULL; char cparaDefault[] = "../share/artoolkit-examples/Data/camera_para.dat"; char *cpara = NULL; int i; int gotTwoPartOption; const char markerConfigDataFilename[] = "../share/artoolkit-examples/Data/markers.dat"; const char objectDataFilename[] = "../share/artoolkit-examples/Data/objects.dat"; // // Process command-line options. // glutInit(&argc, argv); i = 1; // argv[0] is name of app, so start at 1. while (i < argc) { gotTwoPartOption = FALSE; // Look for two-part options first. if ((i + 1) < argc) { if (strcmp(argv[i], "--vconf") == 0) { i++; vconf = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i], "--cpara") == 0) { i++; cpara = argv[i]; gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--width") == 0) { i++; // Get width from second field. if (sscanf(argv[i], "%d", &prefWidth) != 1) { ARLOGe("Error: --width option must be followed by desired width.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--height") == 0) { i++; // Get height from second field. if (sscanf(argv[i], "%d", &prefHeight) != 1) { ARLOGe("Error: --height option must be followed by desired height.\n"); } gotTwoPartOption = TRUE; } else if (strcmp(argv[i],"--refresh") == 0) { i++; // Get refresh rate from second field. if (sscanf(argv[i], "%d", &prefRefresh) != 1) { ARLOGe("Error: --refresh option must be followed by desired refresh rate.\n"); } gotTwoPartOption = TRUE; } } if (!gotTwoPartOption) { // Look for single-part options. if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-help") == 0 || strcmp(argv[i], "-h") == 0) { usage(argv[0]); } else if (strncmp(argv[i], "-cpara=", 7) == 0) { cpara = &(argv[i][7]); } else if (strcmp(argv[i], "--version") == 0 || strcmp(argv[i], "-version") == 0 || strcmp(argv[i], "-v") == 0) { ARLOG("%s version %s\n", argv[0], AR_HEADER_VERSION_STRING); exit(0); } else if (strcmp(argv[i],"--windowed") == 0) { prefWindowed = TRUE; } else if (strcmp(argv[i],"--fullscreen") == 0) { prefWindowed = FALSE; } else { ARLOGe("Error: invalid command line argument '%s'.\n", argv[i]); usage(argv[0]); } } i++; } // // Video setup. // if (!setupCamera((cpara ? cpara : cparaDefault), vconf, &gCparamLT)) { ARLOGe("main(): Unable to set up AR camera.\n"); exit(-1); } // // AR init. // // Init AR. gARPattHandle = arPattCreateHandle(); if (!gARPattHandle) { ARLOGe("Error creating pattern handle.\n"); exit(-1); } gARHandle = arCreateHandle(gCparamLT); if (!gARHandle) { ARLOGe("Error creating AR handle.\n"); exit(-1); } arPattAttach(gARHandle, gARPattHandle); if (arSetPixelFormat(gARHandle, arVideoGetPixelFormat()) < 0) { ARLOGe("Error setting pixel format.\n"); exit(-1); } gAR3DHandle = ar3DCreateHandle(&gCparamLT->param); if (!gAR3DHandle) { ARLOGe("Error creating 3D handle.\n"); exit(-1); } // // Markers setup. // // Load marker(s). newMarkers(markerConfigDataFilename, gARPattHandle, &markersSquare, &markersSquareCount, &gARPattDetectionMode); ARLOGi("Marker count = %d\n", markersSquareCount); // // Other ARToolKit setup. // arSetMarkerExtractionMode(gARHandle, AR_USE_TRACKING_HISTORY_V2); //arSetMarkerExtractionMode(gARHandle, AR_NOUSE_TRACKING_HISTORY); //arSetLabelingThreshMode(gARHandle, AR_LABELING_THRESH_MODE_MANUAL); // Uncomment to force manual thresholding. // Set the pattern detection mode (template (pictorial) vs. matrix (barcode) based on // the marker types as defined in the marker config. file. arSetPatternDetectionMode(gARHandle, gARPattDetectionMode); // Default = AR_TEMPLATE_MATCHING_COLOR // Other application-wide marker options. Once set, these apply to all markers in use in the application. // If you are using standard ARToolKit picture (template) markers, leave commented to use the defaults. // If you are usign a different marker design (see http://www.artoolworks.com/support/app/marker.php ) // then uncomment and edit as instructed by the marker design application. //arSetLabelingMode(gARHandle, AR_LABELING_BLACK_REGION); // Default = AR_LABELING_BLACK_REGION //arSetBorderSize(gARHandle, 0.25f); // Default = 0.25f //arSetMatrixCodeType(gARHandle, AR_MATRIX_CODE_3x3); // Default = AR_MATRIX_CODE_3x3 // // Graphics setup. // // Set up GL context(s) for OpenGL to draw into. glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); if (prefWindowed) { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); else glutInitWindowSize(gCparamLT->param.xsize, gCparamLT->param.ysize); glutCreateWindow(argv[0]); } else { if (glutGameModeGet(GLUT_GAME_MODE_POSSIBLE)) { if (prefWidth && prefHeight) { if (prefDepth) { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i@%i", prefWidth, prefHeight, prefDepth, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i:%i", prefWidth, prefHeight, prefDepth); } else { if (prefRefresh) snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i@%i", prefWidth, prefHeight, prefRefresh); else snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } } else { prefWidth = glutGameModeGet(GLUT_GAME_MODE_WIDTH); prefHeight = glutGameModeGet(GLUT_GAME_MODE_HEIGHT); snprintf(glutGamemode, sizeof(glutGamemode), "%ix%i", prefWidth, prefHeight); } glutGameModeString(glutGamemode); glutEnterGameMode(); } else { if (prefWidth > 0 && prefHeight > 0) glutInitWindowSize(prefWidth, prefHeight); glutCreateWindow(argv[0]); glutFullScreen(); } } // Create the OpenGL projection from the calibrated camera parameters. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, cameraLens); cameraPoseValid = FALSE; // Setup ARgsub_lite library for current OpenGL context. if ((gArglSettings = arglSetupForCurrentContext(&(gCparamLT->param), arVideoGetPixelFormat())) == NULL) { ARLOGe("main(): arglSetupForCurrentContext() returned error.\n"); cleanup(); exit(-1); } arglSetupDebugMode(gArglSettings, gARHandle); // Load objects (i.e. OSG models). VirtualEnvironmentInit(objectDataFilename); VirtualEnvironmentHandleARViewUpdatedCameraLens(cameraLens); // // Setup complete. Start tracking. // // Start the video. if (arVideoCapStart() != 0) { ARLOGe("setupCamera(): Unable to begin camera data capture.\n"); return (FALSE); } arUtilTimerReset(); // Register GLUT event-handling callbacks. // NB: mainLoop() is registered by Visibility. glutDisplayFunc(Display); glutReshapeFunc(Reshape); glutVisibilityFunc(Visibility); glutKeyboardFunc(Keyboard); glutMainLoop(); return (0); }
// // This function is called when the window needs redrawing. // static void Display(void) { ARdouble p[16]; ARdouble m[16]; // Select correct buffer for this context. glDrawBuffer(GL_BACK); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame. arglDispImage(gARTImage, &(gCparamLT->param), 1.0, gArglSettings); // zoom = 1.0. gARTImage = NULL; // Invalidate image data. // Projection transformation. arglCameraFrustumRH(&(gCparamLT->param), VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p); glMatrixMode(GL_PROJECTION); #ifdef ARDOUBLE_IS_FLOAT glLoadMatrixf(p); #else glLoadMatrixd(p); #endif glMatrixMode(GL_MODELVIEW); glEnable(GL_DEPTH_TEST); // Viewing transformation. glLoadIdentity(); // Lighting and geometry that moves with the camera should go here. // (I.e. must be specified before viewing transformations.) //none if (gPatt_found) { // Calculate the camera position relative to the marker. // Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters). arglCameraViewRH(gPatt_trans, m, VIEW_SCALEFACTOR); #ifdef ARDOUBLE_IS_FLOAT glLoadMatrixf(m); #else glLoadMatrixd(m); #endif // All lighting and geometry to be drawn relative to the marker goes here. DrawCube(); } // gPatt_found // Any 2D overlays go here. glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0, (GLdouble)windowWidth, 0, (GLdouble)windowHeight, -1.0, 1.0); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glDisable(GL_LIGHTING); glDisable(GL_DEPTH_TEST); // // Draw help text and mode. // if (gShowMode) { printMode(); } if (gShowHelp) { if (gShowHelp == 1) { printHelpKeys(); } } glutSwapBuffers(); }