Пример #1
0
void OSystem_3DS::destroyEvents() {
	threadJoin(_timerThread, U64_MAX);
	threadFree(_timerThread);

	threadJoin(_eventThread, U64_MAX);
	threadFree(_eventThread);
	delete eventMutex;
}
Пример #2
0
void OSystem_3DS::destroyAudio() {
	if (hasAudio) {
		threadJoin(audioThread, U64_MAX);
		threadFree(audioThread);
		ndspExit();
	}

	delete _mixer;
	_mixer = 0;
}
Пример #3
0
void threadDetach(Thread thread)
{
	if (!thread || thread->detached)
		return;
	if (thread->finished)
	{
		threadFree(thread);
		return;
	}
	thread->detached = true;
	return;
}
Пример #4
0
void threadExit(int rc)
{
	Thread t = threadGetCurrent();
	if (!t)
		__panic();

	t->finished = true;
	if (t->detached)
		threadFree(t);
	else
		t->rc = rc;

	svcExitThread();
}
Пример #5
0
THREAD_HANDLE_T *threadInit( int ID, void *arg, void *(*start_routine)(THREAD_HANDLE_T*) )
{
    THREAD_HANDLE_T    *flag;
    int err;
#if !defined(_WINRT) && !defined(ARUTIL_DISABLE_PTHREADS)
    pthread_t           thread;
    pthread_attr_t      attr;
#endif
    if ((flag = malloc(sizeof(THREAD_HANDLE_T))) == NULL) return NULL;

    flag->ID     = ID;
    flag->startF = 0;
    flag->endF   = 0;
    flag->busyF  = 0;
    flag->arg    = arg;
    pthread_mutex_init( &(flag->mut), NULL );
    pthread_cond_init( &(flag->cond1), NULL );
    pthread_cond_init( &(flag->cond2), NULL );

#if !defined(_WINRT) && !defined(ARUTIL_DISABLE_PTHREADS)
    pthread_attr_init(&attr);
    pthread_attr_setdetachstate(&attr, 1); // Preclude the need to do pthread_join on the thread after it exits.
    err = pthread_create(&thread, &attr, (void *(*)(void*))start_routine, flag);
    pthread_attr_destroy(&attr);
#elif defined(_WIN32)
#  ifdef _WINRT
    err = arCreateDetachedThreadWinRT(start_routine, flag);
#  else
	struct start_routine_proxy_arg *srpa_p = malloc(sizeof(struct start_routine_proxy_arg));
	srpa_p->start_routine = start_routine;
	srpa_p->arg = flag;
	err = (_beginthread(start_routine_proxy, 0, srpa_p) == -1L);
#  endif
#else
#  error No routine available to create a thread.
#endif
    if (err == 0) {
        return flag;
    } else {
        threadFree(&flag);
        return NULL;
    }

}
Пример #6
0
int trackingInitQuit( THREAD_HANDLE_T **threadHandle_p )
{
    TrackingInitHandle  *trackingInitHandle;

    if (!threadHandle_p)  {
        ARLOGe("trackingInitQuit(): Error: NULL threadHandle_p.\n");
        return (-1);
    }
    if (!*threadHandle_p) return 0;
    
    threadWaitQuit( *threadHandle_p );
    trackingInitHandle = (TrackingInitHandle *)threadGetArg(*threadHandle_p);
    if (trackingInitHandle) {
        free( trackingInitHandle->imagePtr );
        free( trackingInitHandle );
    }
    threadFree( threadHandle_p );
    return 0;
}
Пример #7
0
JNIEXPORT void JNICALL JNIFUNCTION_NATIVE(nativeVideoFrame(JNIEnv* env, jobject obj, jbyteArray pinArray))
{
    int i, j, k;
    jbyte* inArray;
        
    if (!videoInited) {
#ifdef DEBUG
        LOGD("nativeVideoFrame !VIDEO\n");
#endif        
        return; // No point in trying to track until video is inited.
    }
    if (!nftDataLoaded) {
        if (!nftDataLoadingThreadHandle || threadGetStatus(nftDataLoadingThreadHandle) < 1) {
#ifdef DEBUG
            LOGD("nativeVideoFrame !NFTDATA\n");
#endif        
            return;
        } else {
            nftDataLoaded = true;
            threadWaitQuit(nftDataLoadingThreadHandle);
            threadFree(&nftDataLoadingThreadHandle); // Clean up.
        }
    }
    if (!gARViewInited) {
        return; // Also, we won't track until the ARView has been inited.
#ifdef DEBUG
        LOGD("nativeVideoFrame !ARVIEW\n");
#endif        
    }
#ifdef DEBUG
    LOGD("nativeVideoFrame\n");
#endif        
    
    // Copy the incoming  YUV420 image in pinArray.
    env->GetByteArrayRegion(pinArray, 0, gVideoFrameSize, (jbyte *)gVideoFrame);
    
	// As of ARToolKit v5.0, NV21 format video frames are handled natively,
	// and no longer require colour conversion to RGBA.
	// If you still require RGBA format information from the video,
    // here is where you'd do the conversion:
    // color_convert_common(gVideoFrame, gVideoFrame + videoWidth*videoHeight, videoWidth, videoHeight, myRGBABuffer);

    videoFrameNeedsPixelBufferDataUpload = true; // Note that buffer needs uploading. (Upload must be done on OpenGL context's thread.)
    
    // Run marker detection on frame
    if (trackingThreadHandle) {
        // Perform NFT tracking.
        float            err;
        int              ret;
        int              pageNo;
        
        if( detectedPage == -2 ) {
            trackingInitStart( trackingThreadHandle, gVideoFrame );
            detectedPage = -1;
        }
        if( detectedPage == -1 ) {
            ret = trackingInitGetResult( trackingThreadHandle, trackingTrans, &pageNo);
            if( ret == 1 ) {
                if (pageNo >= 0 && pageNo < surfaceSetCount) {
#ifdef DEBUG
                    LOGE("Detected page %d.\n", pageNo);
#endif
                    detectedPage = pageNo;
                    ar2SetInitTrans(surfaceSet[detectedPage], trackingTrans);
                } else {
                    LOGE("Detected bad page %d.\n", pageNo);
                    detectedPage = -2;
                }
            } else if( ret < 0 ) {
#ifdef DEBUG
                LOGE("No page detected.\n");
#endif
                detectedPage = -2;
            }
        }
        if( detectedPage >= 0 && detectedPage < surfaceSetCount) {
            if( ar2Tracking(ar2Handle, surfaceSet[detectedPage], gVideoFrame, trackingTrans, &err) < 0 ) {
#ifdef DEBUG
                LOGE("Tracking lost.\n");
#endif
                detectedPage = -2;
            } else {
#ifdef DEBUG
                LOGE("Tracked page %d (max %d).\n", detectedPage, surfaceSetCount - 1);
#endif
            }
        }
    } else {
        LOGE("Error: trackingThreadHandle\n");
        detectedPage = -2;
    }
    
    // Update markers.
    for (i = 0; i < markersNFTCount; i++) {
        markersNFT[i].validPrev = markersNFT[i].valid;
        if (markersNFT[i].pageNo >= 0 && markersNFT[i].pageNo == detectedPage) {
            markersNFT[i].valid = TRUE;
            for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) markersNFT[i].trans[j][k] = trackingTrans[j][k];
        }
        else markersNFT[i].valid = FALSE;
        if (markersNFT[i].valid) {
            
            // Filter the pose estimate.
            if (markersNFT[i].ftmi) {
                if (arFilterTransMat(markersNFT[i].ftmi, markersNFT[i].trans, !markersNFT[i].validPrev) < 0) {
                    LOGE("arFilterTransMat error with marker %d.\n", i);
                }
            }
            
            if (!markersNFT[i].validPrev) {
                // Marker has become visible, tell any dependent objects.
                VirtualEnvironmentHandleARMarkerAppeared(i);
            }
    
            // We have a new pose, so set that.
            arglCameraViewRHf(markersNFT[i].trans, markersNFT[i].pose.T, 1.0f /*VIEW_SCALEFACTOR*/);
            // Tell any dependent objects about the update.
            VirtualEnvironmentHandleARMarkerWasUpdated(i, markersNFT[i].pose);
            
        } else {
            
            if (markersNFT[i].validPrev) {
                // Marker has ceased to be visible, tell any dependent objects.
                VirtualEnvironmentHandleARMarkerDisappeared(i);
            }
        }                    
    }
}
Пример #8
0
bool stream_file(const std::string& filename)
{
    if (filename.empty())
    {
        print("No file selected\n");
        return true;
    }

    VGMSTREAM* vgmstream = init_vgmstream(filename.c_str());
    if (!vgmstream)
    {
        print("Bad file %s\n", filename.c_str());
        return true;
    }

    const int channels = vgmstream->channels;
    u32 buffer_size = max_samples * vgmstream->channels * sizeof(sample);

    rawSampleBuffer = static_cast<sample*>(linearAlloc(buffer_size));
    sample* buffer = static_cast<sample*>(linearAlloc(buffer_size));
    sample* buffer2 = static_cast<sample*>(linearAlloc(buffer_size));
    playBuffer1.samples = max_samples;
    playBuffer2.samples = max_samples;
    for (int i = 0; i < channels; i++)
    {
        playBuffer1.channels.push_back(buffer + i * max_samples);
        playBuffer2.channels.push_back(buffer2 + i * max_samples);
    }

    stream_filename strm_file;
    strm_file.filename = filename;
    strm_file.stream = vgmstream;

    runThreads = true;

    s32 prio = 0;
    Thread musicThread;
    Thread produceThread;
    svcGetThreadPriority(&prio, CUR_THREAD_HANDLE);
    musicThread = threadCreate(streamMusic, &strm_file, 4 * 1024, prio-1, -2, false);
    produceThread = threadCreate(decodeThread, &strm_file, 4 * 1024, prio-1, -2, false);

    bool ret = false;
    while (aptMainLoop())
    {
        hidScanInput();
        u32 kDown = hidKeysDown();
        if (kDown & KEY_START || kDown & KEY_B)
        {
            ret = kDown & KEY_START;
            break;
        }
        gfxFlushBuffers();
        gfxSwapBuffers();

        gspWaitForVBlank();
    }

    runThreads = false;
    svcSignalEvent(bufferReadyProduceRequest);
    svcSignalEvent(bufferReadyConsumeRequest);
    threadJoin(musicThread, U64_MAX);
    threadJoin(produceThread, U64_MAX);
    threadFree(musicThread);
    threadFree(produceThread);
    svcClearEvent(bufferReadyConsumeRequest);
    svcClearEvent(bufferReadyProduceRequest);


    linearFree(rawSampleBuffer);
    linearFree(buffer);
    linearFree(buffer2);
    playBuffer1.channels.clear();
    playBuffer2.channels.clear();

    close_vgmstream(vgmstream);

    return ret;
}
Пример #9
0
JNIEXPORT void JNICALL JNIFUNCTION_NATIVE(nativeDrawFrame(JNIEnv* env, jobject obj, jint movieWidth, jint movieHeight, jint movieTextureID, jfloatArray movieTextureMtx))
{
	float width, height;
	
	// Get the array contents.
	//jsize movieTextureMtxLen = env->GetArrayLength(movieTextureMtx);
	float movieTextureMtxUnpacked[16];
    env->GetFloatArrayRegion(movieTextureMtx, 0, /*movieTextureMtxLen*/ 16, movieTextureMtxUnpacked);
        
    if (!videoInited) {
#ifdef DEBUG
        LOGI("nativeDrawFrame !VIDEO\n");
#endif        
        return; // No point in trying to draw until video is inited.
    }
    if (!nftDataLoaded && nftDataLoadingThreadHandle) {
        // Check if NFT data loading has completed.
        if (threadGetStatus(nftDataLoadingThreadHandle) > 0) {
            nftDataLoaded = true;
            threadWaitQuit(nftDataLoadingThreadHandle);
            threadFree(&nftDataLoadingThreadHandle); // Clean up.
        } else {
#ifdef DEBUG
            LOGI("nativeDrawFrame !NFTDATA\n");
#endif        
            return; // No point in trying to draw until NFT data is loaded.
        }
    }
#ifdef DEBUG
    LOGI("nativeDrawFrame\n");
#endif        
    if (!gARViewInited) {
        if (!initARView()) return;
    }
    if (gARViewLayoutRequired) layoutARView();
    
    // Upload new video frame if required.
    if (videoFrameNeedsPixelBufferDataUpload) {
        pthread_mutex_lock(&gVideoFrameLock);
        arglPixelBufferDataUploadBiPlanar(gArglSettings, gVideoFrame, gVideoFrame + videoWidth*videoHeight);
        videoFrameNeedsPixelBufferDataUpload = false;
        pthread_mutex_unlock(&gVideoFrameLock);
    }
    
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the buffers for new frame.
    
    // Display the current frame
    arglDispImage(gArglSettings);
    
    // Set up 3D mode.
	glMatrixMode(GL_PROJECTION);
	glLoadMatrixf(cameraLens);
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
    glStateCacheEnableDepthTest();

    // Set any initial per-frame GL state you require here.
    // --->
    
    // Lighting and geometry that moves with the camera should be added here.
    // (I.e. should be specified before camera pose transform.)
    // --->
        
    // Draw an object on all valid markers.
    for (int i = 0; i < markersNFTCount; i++) {
        if (markersNFT[i].valid) {
            glLoadMatrixf(markersNFT[i].pose.T);
            
            //
            // Draw a rectangular surface textured with the movie texture.
            //
            float w = 80.0f;
            float h = w * (float)movieHeight/(float)movieWidth;
            GLfloat vertices[4][2] = { {0.0f, 0.0f}, {w, 0.0f}, {w, h}, {0.0f, h} };
            GLfloat normals[4][3] = { {0.0f, 0.0f, 1.0f}, {0.0f, 0.0f, 1.0f}, {0.0f, 0.0f, 1.0f}, {0.0f, 0.0f, 1.0f} };
            GLfloat texcoords[4][2] = { {0.0f, 0.0f},  {1.0f, 0.0f},  {1.0f, 1.0f},  {0.0f, 1.0f} };

            glStateCacheActiveTexture(GL_TEXTURE0);

            glMatrixMode(GL_TEXTURE);
            glPushMatrix();
            glLoadMatrixf(movieTextureMtxUnpacked);
            glMatrixMode(GL_MODELVIEW);
            
            glVertexPointer(2, GL_FLOAT, 0, vertices);
            glNormalPointer(GL_FLOAT, 0, normals);
            glStateCacheClientActiveTexture(GL_TEXTURE0);
            glTexCoordPointer(2, GL_FLOAT, 0, texcoords);
            glStateCacheEnableClientStateVertexArray();
            glStateCacheEnableClientStateNormalArray();
            glStateCacheEnableClientStateTexCoordArray();
            glStateCacheBindTexture2D(0);
            glStateCacheDisableTex2D();
            glStateCacheDisableLighting();

            glEnable(GL_TEXTURE_EXTERNAL_OES);
            glBindTexture(GL_TEXTURE_EXTERNAL_OES, movieTextureID);

            glDrawArrays(GL_TRIANGLE_FAN, 0, 4);

            glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
            glDisable(GL_TEXTURE_EXTERNAL_OES);

            glMatrixMode(GL_TEXTURE);
            glPopMatrix();
            glMatrixMode(GL_MODELVIEW);
            //
            // End.
            //
        }
    }
    
    if (cameraPoseValid) {
        
        glMultMatrixf(cameraPose);
        
        // All lighting and geometry to be drawn in world coordinates goes here.
        // --->
    }
        
    // If you added external OpenGL code above, and that code doesn't use the glStateCache routines,
    // then uncomment the line below.
    //glStateCacheFlush();
    
    // Set up 2D mode.
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
	width = (float)viewPort[viewPortIndexWidth];
	height = (float)viewPort[viewPortIndexHeight];
	glOrthof(0.0f, width, 0.0f, height, -1.0f, 1.0f);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glStateCacheDisableDepthTest();

    // Add your own 2D overlays here.
    // --->
    
    // If you added external OpenGL code above, and that code doesn't use the glStateCache routines,
    // then uncomment the line below.
    //glStateCacheFlush();

#ifdef DEBUG
    // Example of 2D drawing. It just draws a white border line. Change the 0 to 1 to enable.
    const GLfloat square_vertices [4][2] = { {0.5f, 0.5f}, {0.5f, height - 0.5f}, {width - 0.5f, height - 0.5f}, {width - 0.5f, 0.5f} };
    glStateCacheDisableLighting();
    glStateCacheDisableTex2D();
    glVertexPointer(2, GL_FLOAT, 0, square_vertices);
    glStateCacheEnableClientStateVertexArray();
    glColor4ub(255, 255, 255, 255);
    glDrawArrays(GL_LINE_LOOP, 0, 4);

    CHECK_GL_ERROR();
#endif
}