//---------- bool DeckLink::startCapture() { try { CHECK_ERRORS(this->input->EnableVideoInput(this->displayMode, bmdFormat8BitYUV, 0), "Failed to enable video input"); CHECK_ERRORS(this->input->StartStreams(), "Failed to start streams"); return true; } catch (std::exception e) { OFXMV_ERROR << e.what(); return false; } }
//--------- void Input::startCapture(const DeviceDefinition& device, const BMDDisplayMode& format) { try { this->stopCapture(); this->device = device; CHECK_ERRORS(device.device->QueryInterface(IID_IDeckLinkInput, (void**)&this->input), "Failed to query interface"); CHECK_ERRORS(this->input->SetCallback(this), "Failed to set input callback"); CHECK_ERRORS(this->input->EnableVideoInput(format, bmdFormat8BitYUV, 0), "Failed to enable video input"); CHECK_ERRORS(this->input->StartStreams(), "Failed to start streams"); this->state = Running; } catch(std::exception e) { OFXBM_ERROR << e.what(); this->state = Waiting; } }
//---------- Specification DeckLink::open(shared_ptr<Base::InitialisationSettings> initialisationSettings) { auto settings = this->getTypedSettings<InitialisationSettings>(initialisationSettings); auto devices = ofxBlackmagic::Iterator::getDeviceList(); if (devices.empty()) { throw(ofxMachineVision::Exception("No DeckLink devices available")); } if (devices.size() <= (unsigned int)settings->deviceID) { string str = "deviceID [" + ofToString(settings->deviceID) + "] out of range. [" + ofToString(devices.size()) + "] devices available"; throw(ofxMachineVision::Exception(str)); } this->device = devices[settings->deviceID]; int width, height; this->displayMode = static_cast<_BMDDisplayMode>(settings->displayMode.get()); try { CHECK_ERRORS(device.device->QueryInterface(IID_IDeckLinkInput, (void**)&this->input), "Failed to query interface"); CHECK_ERRORS(this->input->SetCallback(this), "Failed to set input callback"); //find the current display mode IDeckLinkDisplayModeIterator * displayModeIterator = 0; CHECK_ERRORS(input->GetDisplayModeIterator(&displayModeIterator), "Couldn't get DisplayModeIterator"); IDeckLinkDisplayMode * displayModeTest = nullptr; IDeckLinkDisplayMode * displayModeFound = nullptr; while (displayModeIterator->Next(&displayModeTest) == S_OK) { if (displayModeTest->GetDisplayMode() == this->displayMode) { displayModeFound = displayModeTest; } } if (!displayModeFound) { CHECK_ERRORS(S_FALSE, "Cannot find displayMode"); } width = displayModeFound->GetWidth(); height = displayModeFound->GetHeight(); } catch (std::exception e) { throw(ofxMachineVision::Exception(e.what())); } this->openTime = ofGetElapsedTimeMicros(); this->frameIndex = 0; Specification specification(width, height, "BlackMagic", device.modelName); specification.addFeature(ofxMachineVision::Feature::Feature_DeviceID); specification.addFeature(ofxMachineVision::Feature::Feature_FreeRun); return specification; }
//--------- void Input::stopCapture() { if (this->state != Running) { return; } try { CHECK_ERRORS(this->input->StopStreams(), "Failed to stop streams"); } catch (std::exception e) { OFXBM_ERROR << e.what(); } this->state = Waiting; }
/** * @fn initKinect * Start Kinect * This is where you start the kinect and s are checked for connection of the same * If you activate the state variables and unicia session for Kinect * Starts motion capture * The flow of movements * It is finished start the kinect once you register a movement with the same */ int initKinect() { XnStatus rc = XN_STATUS_OK; xn::EnumerationErrors errors; rc = g_Context.InitFromXmlFile(SAMPLE_XML_FILE, g_ScriptNode, &errors); CHECK_ERRORS(rc, errors, KNT_Msgs[KNT_INIT_FROM_XML_FILE]); CHECK_RC(rc, KNT_Msgs[KNT_INIT_FROM_XML]); rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator); CHECK_RC(rc, KNT_Msgs[KNT_FIND_DEPTH_GEN]); // Create and initialize point tracker g_pSessionManager = new XnVSessionManager; rc = g_pSessionManager->Initialize(&g_Context, "Wave", "RaiseHand"); if (rc != XN_STATUS_OK) { printf(KNT_Msgs[KNT_COULD_NOT_INIT_SESSION], xnGetStatusString(rc)); delete g_pSessionManager; return rc; } g_pSessionManager->RegisterSession(NULL, &SessionStart, &SessionEnd); // Start catching signals for quit indications CatchSignals(&g_bQuit); // Create and initialize the main slider g_pMainSlider = new XnVSelectableSlider1D(3); g_pMainSlider->RegisterItemSelect(NULL, &MainSlider_OnSelect); g_pMainSlider->RegisterActivate(NULL, &MainSlider_OnActivate); g_pMainSlider->RegisterDeactivate(NULL, &MainSlider_OnDeactivate); g_pMainSlider->RegisterPrimaryPointCreate(NULL, &MainSlider_OnPrimaryCreate); g_pMainSlider->RegisterPrimaryPointDestroy(NULL, &MainSlider_OnPrimaryDestroy); g_pMainSlider->RegisterValueChange(NULL, &MainSlider_OnValueChange); g_pMainSlider->SetValueChangeOnOffAxis(true); // Creat the flow manager g_pMainFlowRouter = new XnVFlowRouter; // Connect flow manager to the point tracker g_pSessionManager->AddListener(g_pMainFlowRouter); g_Context.StartGeneratingAll(); g_init_kinect = true; printf(KNT_Msgs[KNT_WAVE_GESTURE]); printf(KNT_Msgs[KNT_HIT_ANY_2_EXIT]); return 0; }
//--------- HRESULT DeckLink::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame) { if (videoFrame == NULL) { return S_OK; } const auto width = videoFrame->GetWidth(); const auto height = videoFrame->GetHeight(); const auto pixelCount = width * height; try { //try to lock for writing int tryCount = 0; while (!this->incomingFrame->lockForWriting()) { ofSleepMillis(1); if (tryCount++ > 100) { throw(ofxMachineVision::Exception("Timeout processing incoming frame")); } } //check allocation if (this->incomingFrame->getPixels().getWidth() != width || this->incomingFrame->getPixels().getHeight() != height) { this->incomingFrame->getPixels().allocate(width, height, OF_IMAGE_GRAYSCALE); } //copy bytes out from frame unsigned char * yuvBytes = nullptr; CHECK_ERRORS(videoFrame->GetBytes((void**)& yuvBytes), "Failed to pull bytes from incoming video frame"); //copy UYVY -> YY auto out = this->incomingFrame->getPixels().getPixels(); for (int i = 0; i < pixelCount; i++) { //this method seems to be auto-SIMD optimised out[i] = yuvBytes[i * 2 + 1]; } this->incomingFrame->setTimestamp(ofGetElapsedTimeMicros() - this->openTime); this->incomingFrame->setFrameIndex(this->frameIndex++); this->incomingFrame->unlock(); //alert the grabber this->onNewFrame(this->incomingFrame); } catch (ofxMachineVision::Exception e) { OFXMV_ERROR << e.what(); } return S_OK; }
int main(int argc, char **argv) { XnStatus rc = XN_STATUS_OK; xn::EnumerationErrors errors; rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH); CHECK_ERRORS(rc, errors, "InitFromXmlFile"); CHECK_RC(rc, "InitFromXml"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator); CHECK_RC(rc, "Find depth generator"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_SCENE, g_SceneAnalyzer); CHECK_RC(rc, "Find scene analyzer"); rc = g_Context.StartGeneratingAll(); CHECK_RC(rc, "StartGenerating"); #ifdef USE_GLUT glInit(&argc, argv); glutMainLoop(); #elif defined(USE_GLES) if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context)) { printf("Error initing opengles\n"); CleanupExit(); } glDisable(GL_DEPTH_TEST); // glEnable(GL_TEXTURE_2D); glEnableClientState(GL_VERTEX_ARRAY); glDisableClientState(GL_COLOR_ARRAY); while ((!_kbhit()) && (!g_bQuit)) { glutDisplay(); eglSwapBuffers(display, surface); } opengles_shutdown(display, surface, context); CleanupExit(); #endif }
int configKinect(){ XnStatus rc = XN_STATUS_OK; xn::EnumerationErrors errors; // Initialize OpenNI rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH, g_ScriptNode, &errors); CHECK_ERRORS(rc, errors, "InitFromXmlFile"); CHECK_RC(rc, "InitFromXmlFile"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator); CHECK_RC(rc, "Find depth generator"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_GESTURE, g_GestureGenerator); CHECK_RC(rc, "Find gesture generator"); XnCallbackHandle hGestureIntermediateStageCompleted, hGestureProgress, hGestureReadyForNextIntermediateStage; g_GestureGenerator.RegisterToGestureIntermediateStageCompleted(GestureIntermediateStageCompletedHandler, NULL, hGestureIntermediateStageCompleted); g_GestureGenerator.RegisterToGestureReadyForNextIntermediateStage(GestureReadyForNextIntermediateStageHandler, NULL, hGestureReadyForNextIntermediateStage); g_GestureGenerator.RegisterGestureCallbacks(NULL, GestureProgressHandler, NULL, hGestureProgress); // Create NITE objects g_pSessionManager = new XnVSessionManager; rc = g_pSessionManager->Initialize(&g_Context, "Click,Wave", "RaiseHand"); CHECK_RC(rc, "SessionManager::Initialize"); g_pSessionManager->RegisterSession(NULL, SessionStarting, SessionEnding, FocusProgress); pointHandler = new PointHandler(20, g_DepthGenerator); g_pFlowRouter = new XnVFlowRouter; g_pFlowRouter->SetActive(pointHandler); g_pSessionManager->AddListener(g_pFlowRouter); pointHandler->RegisterNoPoints(NULL, NoHands); // Initialization done. Start generating rc = g_Context.StartGeneratingAll(); CHECK_RC(rc, "StartGenerating"); return rc; }
int main(int argc, char **argv) { XnStatus rc = XN_STATUS_OK; xn::EnumerationErrors errors; rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH); CHECK_ERRORS(rc, errors, "InitFromXmlFile"); CHECK_RC(rc, "InitFromXml"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator); CHECK_RC(rc, "Find depth generator"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator); CHECK_RC(rc, "Find user generator"); if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON) || !g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) { printf("User generator doesn't support either skeleton or pose detection.\n"); return XN_STATUS_ERROR; } g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL); rc = g_Context.StartGeneratingAll(); CHECK_RC(rc, "StartGenerating"); XnCallbackHandle hUserCBs, hCalibrationCBs, hPoseCBs; g_UserGenerator.RegisterUserCallbacks(NewUser, LostUser, NULL, hUserCBs); g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(CalibrationStarted, CalibrationEnded, NULL, hCalibrationCBs); g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(PoseDetected, NULL, NULL, hPoseCBs); #ifdef USE_GLUT glInit(&argc, argv); glutMainLoop(); #else if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context)) { printf("Error initing opengles\n"); CleanupExit(); } glDisable(GL_DEPTH_TEST); // glEnable(GL_TEXTURE_2D); glEnableClientState(GL_VERTEX_ARRAY); glDisableClientState(GL_COLOR_ARRAY); while ((!_kbhit()) && (!g_bQuit)) { glutDisplay(); eglSwapBuffers(display, surface); } opengles_shutdown(display, surface, context); CleanupExit(); #endif }
float gpuEncode(EntropyCodingTaskInfo *infos, type_image *img, int count, int targetSize) { int codeBlocks = count; int maxOutLength = MAX_CODESTREAM_SIZE; // long int start_bebcot = start_measure(); int n = 0; for(int i = 0; i < codeBlocks; i++) n += infos[i].width * infos[i].height; mem_mg_t *mem_mg = img->mem_mg; CodeBlockAdditionalInfo *h_infos = (CodeBlockAdditionalInfo *)mem_mg->alloc->host(sizeof(CodeBlockAdditionalInfo) * codeBlocks, mem_mg->ctx); CodeBlockAdditionalInfo *d_infos = (CodeBlockAdditionalInfo *)mem_mg->alloc->dev(sizeof(CodeBlockAdditionalInfo) * codeBlocks, mem_mg->ctx); byte *d_outbuf = (byte *)mem_mg->alloc->dev(sizeof(byte) * codeBlocks * maxOutLength, mem_mg->ctx); int magconOffset = 0; for(int i = 0; i < codeBlocks; i++) { h_infos[i].width = infos[i].width; h_infos[i].height = infos[i].height; h_infos[i].nominalWidth = infos[i].nominalWidth; h_infos[i].stripeNo = (int) ceil(infos[i].height / 4.0f); h_infos[i].subband = infos[i].subband; h_infos[i].magconOffset = magconOffset + infos[i].width; h_infos[i].magbits = infos[i].magbits; h_infos[i].coefficients = infos[i].coefficients; h_infos[i].compType = infos[i].compType; h_infos[i].dwtLevel = infos[i].dwtLevel; h_infos[i].stepSize = infos[i].stepSize; magconOffset += h_infos[i].width * (h_infos[i].stripeNo + 2); } GPU_JPEG2K::CoefficientState *d_stBuffors = (GPU_JPEG2K::CoefficientState *)mem_mg->alloc->dev(sizeof(GPU_JPEG2K::CoefficientState) * magconOffset, mem_mg->ctx); CHECK_ERRORS(cudaMemset((void *) d_stBuffors, 0, sizeof(GPU_JPEG2K::CoefficientState) * magconOffset)); cuda_memcpy_htd(h_infos, d_infos, sizeof(CodeBlockAdditionalInfo) * codeBlocks); // cudaEvent_t start, end; // cudaEventCreate(&start); // cudaEventCreate(&end); // cudaEventRecord(start, 0); // printf("before launch encode: %d\n", stop_measure(start_bebcot)); // long int start_ebcot = start_measure(); if(targetSize == 0) { //printf("No pcrd\n"); GPU_JPEG2K::launch_encode((int)ceil((double)codeBlocks/(double)THREADS), THREADS, d_stBuffors, d_outbuf, maxOutLength, d_infos, codeBlocks, mem_mg); } else { //printf("Pcrd\n"); GPU_JPEG2K::launch_encode_pcrd((int)ceil((double)codeBlocks /(double)THREADS), THREADS, d_stBuffors, d_outbuf, maxOutLength, d_infos, codeBlocks, targetSize, mem_mg); } cudaThreadSynchronize(); // printf("launch encode: %d\n", stop_measure(start_ebcot)); CHECK_ERRORS(); // cudaEventRecord(end, 0); // long int start_aebcot = start_measure(); // long int start_copy = start_measure(); cuda_memcpy_dth(d_infos, h_infos, sizeof(CodeBlockAdditionalInfo) * codeBlocks); // printf("copy: %d\n", stop_measure(start_copy)); img->codestream = (byte *)mem_mg->alloc->host(sizeof(byte) * codeBlocks * maxOutLength, mem_mg->ctx); cuda_memcpy_dth(d_outbuf, img->codestream, sizeof(byte) * codeBlocks * maxOutLength); // long int start_cblk = start_measure(); for(int i = 0; i < codeBlocks; i++) { infos[i].significantBits = h_infos[i].significantBits; infos[i].codingPasses = h_infos[i].codingPasses; if(h_infos[i].length > 0) { infos[i].length = h_infos[i].length; int len = h_infos[i].length; infos[i].codeStream = img->codestream + i * maxOutLength; // infos[i].codeStream = (byte *)mem_mg->alloc->host(sizeof(byte) * len, mem_mg->ctx); // cuda_memcpy_dth(d_outbuf + i * maxOutLength, infos[i].codeStream, sizeof(byte) * len); } else { infos[i].length = 0; infos[i].codeStream = NULL; } } // printf("cblk: %d\n", stop_measure(start_cblk)); // long int start_free = start_measure(); // cuda_d_free(d_outbuf); mem_mg->dealloc->dev(d_outbuf, mem_mg->ctx); // cuda_d_free(d_stBuffors); mem_mg->dealloc->dev(d_stBuffors, mem_mg->ctx); // cuda_d_free(d_infos); mem_mg->dealloc->dev(d_infos, mem_mg->ctx); // free(h_infos); mem_mg->dealloc->host(h_infos, mem_mg->ctx); // printf("free: %d\n", stop_measure(start_free)); float elapsed = 0.0f; // cudaEventElapsedTime(&elapsed, start, end); // printf("after launch encode: %d\n", stop_measure(start_aebcot)); return elapsed; }
float gpuEncode(EntropyCodingTaskInfo *infos, type_image *img, int count, int targetSize) { int codeBlocks = count; int maxOutLength = /*MAX_CODESTREAM_SIZE*/(1 << img->cblk_exp_w) * (1 << img->cblk_exp_h) * 14; // long int start_bebcot = start_measure(); int n = 0; for(int i = 0; i < codeBlocks; i++) n += infos[i].width * infos[i].height; mem_mg_t *mem_mg = img->mem_mg; CodeBlockAdditionalInfo *h_infos = (CodeBlockAdditionalInfo *)mem_mg->alloc->host(sizeof(CodeBlockAdditionalInfo) * codeBlocks, mem_mg->ctx); byte *d_cxd_pairs = (byte *)mem_mg->alloc->dev(sizeof(byte) * codeBlocks * maxOutLength, mem_mg->ctx); CodeBlockAdditionalInfo *d_infos = (CodeBlockAdditionalInfo *)mem_mg->alloc->dev(sizeof(CodeBlockAdditionalInfo) * codeBlocks, mem_mg->ctx); int magconOffset = 0; for(int i = 0; i < codeBlocks; i++) { h_infos[i].width = infos[i].width; h_infos[i].height = infos[i].height; h_infos[i].nominalWidth = infos[i].nominalWidth; h_infos[i].stripeNo = (int) ceil(infos[i].height / 4.0f); h_infos[i].subband = infos[i].subband; h_infos[i].magconOffset = magconOffset + infos[i].width; h_infos[i].magbits = infos[i].magbits; h_infos[i].coefficients = infos[i].coefficients; h_infos[i].compType = infos[i].compType; h_infos[i].dwtLevel = infos[i].dwtLevel; h_infos[i].stepSize = infos[i].stepSize; magconOffset += h_infos[i].width * (h_infos[i].stripeNo + 2); } GPU_JPEG2K::CoefficientState *d_stBuffors = (GPU_JPEG2K::CoefficientState *)mem_mg->alloc->dev(sizeof(GPU_JPEG2K::CoefficientState) * magconOffset, mem_mg->ctx); CHECK_ERRORS(cudaMemset((void *) d_stBuffors, 0, sizeof(GPU_JPEG2K::CoefficientState) * magconOffset)); cuda_memcpy_htd(h_infos, d_infos, sizeof(CodeBlockAdditionalInfo) * codeBlocks); // printf("before launch encode: %d\n", stop_measure(start_bebcot)); long int start_ebcot = start_measure(); if(targetSize == 0) { //printf("No pcrd\n"); CHECK_ERRORS(GPU_JPEG2K::launch_encode((int) ceil((float) codeBlocks / THREADS), THREADS, d_stBuffors, d_cxd_pairs, maxOutLength, d_infos, codeBlocks, mem_mg)); } else { // printf("Pcrd\n"); CHECK_ERRORS(GPU_JPEG2K::launch_encode_pcrd((int) ceil((float) codeBlocks / THREADS), THREADS, d_stBuffors, maxOutLength, d_infos, codeBlocks, targetSize, mem_mg)); } // printf("launch encode: %d\n", stop_measure(start_ebcot)); // long int start_mqc = start_measure(); cuda_memcpy_dth(d_infos, h_infos, sizeof(CodeBlockAdditionalInfo) * codeBlocks); img->codestream = mqc_gpu_encode(infos, h_infos, codeBlocks, d_cxd_pairs, maxOutLength, mem_mg); // printf("mqc: %d\n", stop_measure(start_mqc)); // long int start_aebcot = start_measure(); for(int i = 0; i < codeBlocks; i++) { infos[i].significantBits = h_infos[i].significantBits; infos[i].codingPasses = h_infos[i].codingPasses; /*if(h_infos[i].length > 0) { infos[i].length = h_infos[i].length; int len = h_infos[i].length; infos[i].codeStream = (byte *) malloc(sizeof(byte) * len); cuda_memcpy_dth(d_outbuf + i * maxOutLength, infos[i].codeStream, sizeof(byte) * len); } else { infos[i].length = 0; infos[i].codeStream = NULL; }*/ } mem_mg->dealloc->dev(d_stBuffors, mem_mg->ctx); mem_mg->dealloc->dev(d_infos, mem_mg->ctx); mem_mg->dealloc->dev(d_cxd_pairs, mem_mg->ctx); mem_mg->dealloc->host(h_infos, mem_mg->ctx); // printf("after launch encode: %d\n", stop_measure(start_aebcot)); float elapsed = 0.0f; return elapsed; }
int main(int argc, char ** argv) { XnStatus rc = XN_STATUS_OK; xn::EnumerationErrors errors; // Initialize OpenNI rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH, g_ScriptNode, &errors); CHECK_ERRORS(rc, errors, "InitFromXmlFile"); CHECK_RC(rc, "InitFromXmlFile"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator); CHECK_RC(rc, "Find depth generator"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_HANDS, g_HandsGenerator); CHECK_RC(rc, "Find hands generator"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_GESTURE, g_GestureGenerator); CHECK_RC(rc, "Find gesture generator"); XnCallbackHandle h; if (g_HandsGenerator.IsCapabilitySupported(XN_CAPABILITY_HAND_TOUCHING_FOV_EDGE)) { g_HandsGenerator.GetHandTouchingFOVEdgeCap().RegisterToHandTouchingFOVEdge(TouchingCallback, NULL, h); } XnCallbackHandle hGestureIntermediateStageCompleted, hGestureProgress, hGestureReadyForNextIntermediateStage; g_GestureGenerator.RegisterToGestureIntermediateStageCompleted(GestureIntermediateStageCompletedHandler, NULL, hGestureIntermediateStageCompleted); g_GestureGenerator.RegisterToGestureReadyForNextIntermediateStage(GestureReadyForNextIntermediateStageHandler, NULL, hGestureReadyForNextIntermediateStage); g_GestureGenerator.RegisterGestureCallbacks(NULL, GestureProgressHandler, NULL, hGestureProgress); // Create NITE objects g_pSessionManager = new XnVSessionManager; rc = g_pSessionManager->Initialize(&g_Context, "Click,Wave", "RaiseHand"); CHECK_RC(rc, "SessionManager::Initialize"); g_pSessionManager->RegisterSession(NULL, SessionStarting, SessionEnding, FocusProgress); g_pDrawer = new XnVPointDrawer(20, g_DepthGenerator); g_pFlowRouter = new XnVFlowRouter; g_pFlowRouter->SetActive(g_pDrawer); g_pSessionManager->AddListener(g_pFlowRouter); g_pDrawer->RegisterNoPoints(NULL, NoHands); g_pDrawer->SetDepthMap(g_bDrawDepthMap); // Initialization done. Start generating rc = g_Context.StartGeneratingAll(); CHECK_RC(rc, "StartGenerating"); // Mainloop #ifdef USE_GLUT glInit(&argc, argv); glutMainLoop(); #elif defined(USE_GLES) if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context)) { printf("Error initializing opengles\n"); CleanupExit(); } glDisable(GL_DEPTH_TEST); glEnable(GL_TEXTURE_2D); glEnableClientState(GL_VERTEX_ARRAY); glDisableClientState(GL_COLOR_ARRAY); while ((!_kbhit()) && (!g_bQuit)) { glutDisplay(); eglSwapBuffers(display, surface); } opengles_shutdown(display, surface, context); CleanupExit(); #endif }