gkAndroidApp::gkAndroidApp(android_app* state) : m_state(state), m_window(NULL) { state->userData = this; state->onAppCmd = handleCmd; state->onInputEvent = handleInput; // prepare to monitor accelerometer ASensorManager *sensorManager = ASensorManager_getInstance(); m_accelerometerSensor = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_ACCELEROMETER); m_sensorEventQueue = ASensorManager_createEventQueue(sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); ANativeActivity_setWindowFlags(state->activity, AWINDOW_FLAG_FULLSCREEN | AWINDOW_FLAG_KEEP_SCREEN_ON, 0); }
TCGvoid TCGClient::AndroidInit(struct android_app* state) { state->userData = &m_engineObj; state->onAppCmd = EngineHandleCmd; state->onInputEvent = EngineHandleInput; m_engineObj.app = state; // Prepare to monitor accelerometer m_engineObj.sensorManager = ASensorManager_getInstance(); m_engineObj.accelerometerSensor = ASensorManager_getDefaultSensor(m_engineObj.sensorManager, ASENSOR_TYPE_ACCELEROMETER); m_engineObj.sensorEventQueue = ASensorManager_createEventQueue(m_engineObj.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. m_engineObj.state = *(struct SavedState*)state->savedState; } }
int defaultInitializeApplication() { struct android_app *vid = main_view_id; if (is_valid(vid) == false) return Failed; gy::r3::initialize(GY_RENDERER_OPENGLES); asset_manager = vid->activity->assetManager; sensor_manager = ASensorManager_getInstance(); sensor_accelerometer = ASensorManager_getDefaultSensor(sensor_manager, ASENSOR_TYPE_ACCELEROMETER); sensor_gyroscope = ASensorManager_getDefaultSensor(sensor_manager, ASENSOR_TYPE_GYROSCOPE); sensor_event_queue = ASensorManager_createEventQueue(sensor_manager, vid->looper, LOOPER_ID_USER, NullPtr, NullPtr); vid->onAppCmd = gy__onApplicationCommand; vid->onInputEvent = gy__onInputEvent; return Success; }
void EventLoop::activate() { Pegas_log_info("EventLoop::activate"); if((!mEnabled) && (mApplication->window != NULL)) { mSensorPollSource.id = LOOPER_ID_USER; mSensorPollSource.app = mApplication; mSensorPollSource.process = callback_sensor; mSensorManager = ASensorManager_getInstance(); if (mSensorManager != NULL) { mSensorEventQueue = ASensorManager_createEventQueue(mSensorManager, mApplication->looper, LOOPER_ID_USER, NULL, &mSensorPollSource); if (mSensorEventQueue == NULL) { Pegas_log_warning("EventLoop::activate:"); Pegas_log_warning("ASensorManager_createEventQueue == NULL"); goto ERROR; } } mQuit = false; mEnabled = true; if(mActivityHandler->onActivate() != STATUS_OK) { Pegas_log_warning("EventLoop::activate:"); Pegas_log_warning("mActivityHandler->onActivate() != STATUS_OK"); goto ERROR; } } return; ERROR: Pegas_log_info("Application finish"); mQuit = true; deactivate(); ANativeActivity_finish(mApplication->activity); }
void ConfigureSampling(JNIEnv *JNI, jobject Self) { int I; State->Looper = ALooper_forThread(); if (NULL == State->Looper) { State->Looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); } for (I = 0; I < State->Count; I++) { if (0 == State->Info[I].Interval) { LOG(ANDROID_LOG_INFO, "Event queue for sensor #%d not created", I); continue; } State->Info[I].Queue = ASensorManager_createEventQueue(State->Manager, State->Looper, 0xDEF00ABC, SampleHandler, &(State->Info[I])); LOG(ANDROID_LOG_INFO, "Event queue for sensor #%d created", I); } State->Data = (*JNI)->NewGlobalRef(JNI, CallSelfData(JNI, Self)); State->Exchange = (*JNI)->NewGlobalRef(JNI, (*JNI)->NewByteArray(JNI, State->Maximum)); pthread_mutex_init(&State->Lock, NULL); }
void init(AAssetManager *assetManager) { AAsset *vertexShaderAsset = AAssetManager_open(assetManager, "shader.glslv", AASSET_MODE_BUFFER); assert(vertexShaderAsset != NULL); const void *vertexShaderBuf = AAsset_getBuffer(vertexShaderAsset); assert(vertexShaderBuf != NULL); off_t vertexShaderLength = AAsset_getLength(vertexShaderAsset); vertexShaderSource = std::string((const char*)vertexShaderBuf, (size_t)vertexShaderLength); AAsset_close(vertexShaderAsset); AAsset *fragmentShaderAsset = AAssetManager_open(assetManager, "shader.glslf", AASSET_MODE_BUFFER); assert(fragmentShaderAsset != NULL); const void *fragmentShaderBuf = AAsset_getBuffer(fragmentShaderAsset); assert(fragmentShaderBuf != NULL); off_t fragmentShaderLength = AAsset_getLength(fragmentShaderAsset); fragmentShaderSource = std::string((const char*)fragmentShaderBuf, (size_t)fragmentShaderLength); AAsset_close(fragmentShaderAsset); sensorManager = AcquireASensorManagerInstance(); assert(sensorManager != NULL); accelerometer = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_ACCELEROMETER); assert(accelerometer != NULL); looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); assert(looper != NULL); accelerometerEventQueue = ASensorManager_createEventQueue(sensorManager, looper, LOOPER_ID_USER, NULL, NULL); assert(accelerometerEventQueue != NULL); auto status = ASensorEventQueue_enableSensor(accelerometerEventQueue, accelerometer); assert(status >= 0); status = ASensorEventQueue_setEventRate(accelerometerEventQueue, accelerometer, SENSOR_REFRESH_PERIOD_US); assert(status >= 0); (void)status; //to silent unused compiler warning generateXPos(); }
void startSensor() { ALooper* looper = ALooper_forThread(); if (looper == NULL) { looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); } engine.sensorManager = ASensorManager_getInstance(); // get sensor // engine.accelerometerSensor = ASensorManager_getDefaultSensor( // engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.gyroscopeSensor = ASensorManager_getDefaultSensor( engine.sensorManager, ASENSOR_TYPE_GYROSCOPE); // engine.magneticSensor = ASensorManager_getDefaultSensor( // engine.sensorManager, ASENSOR_TYPE_MAGNETIC_FIELD); // engine.accelerometerEventQueue = ASensorManager_createEventQueue( // engine.sensorManager, looper, LOOPER_ID_USER_ACCELEROMETER, // accelerometerCallback, &engine); engine.gyroscopeEventQueue = ASensorManager_createEventQueue( engine.sensorManager, looper, LOOPER_ID_USER_GYROSCOPE, gyroscopeCallback, &engine); // engine.magneticEventQueue = ASensorManager_createEventQueue( // engine.sensorManager, looper, LOOPER_ID_USER_MAGNETIC, // magneticCallback, &engine); // enable sensor // int a = ASensor_getMinDelay(engine.accelerometerSensor); // int b = ASensor_getMinDelay(engine.gyroscopeSensor); // int c = ASensor_getMinDelay(engine.magneticSensor); // LOGI("min-delay: %d, %d, %d", a, b, c); // ASensorEventQueue_setEventRate(engine.accelerometerEventQueue, // engine.accelerometerSensor, 1000); ASensorEventQueue_setEventRate(engine.gyroscopeEventQueue, engine.gyroscopeSensor, 100); // ASensorEventQueue_setEventRate(engine.magneticEventQueue, // engine.magneticSensor, 1000); // ASensorEventQueue_enableSensor(engine.accelerometerEventQueue, // engine.accelerometerSensor); ASensorEventQueue_enableSensor(engine.gyroscopeEventQueue, engine.gyroscopeSensor); // ASensorEventQueue_enableSensor(engine.magneticEventQueue, // engine.magneticSensor); }
void init(AAssetManager *assetManager) { AAsset *vertexShaderAsset = AAssetManager_open(assetManager, "shader.glslv", AASSET_MODE_BUFFER); assert(vertexShaderAsset != NULL); const void *vertexShaderBuf = AAsset_getBuffer(vertexShaderAsset); assert(vertexShaderBuf != NULL); off_t vertexShaderLength = AAsset_getLength(vertexShaderAsset); vertexShaderSource = std::string((const char*)vertexShaderBuf, (size_t)vertexShaderLength); AAsset_close(vertexShaderAsset); AAsset *fragmentShaderAsset = AAssetManager_open(assetManager, "shader.glslf", AASSET_MODE_BUFFER); assert(fragmentShaderAsset != NULL); const void *fragmentShaderBuf = AAsset_getBuffer(fragmentShaderAsset); assert(fragmentShaderBuf != NULL); off_t fragmentShaderLength = AAsset_getLength(fragmentShaderAsset); fragmentShaderSource = std::string((const char*)fragmentShaderBuf, (size_t)fragmentShaderLength); AAsset_close(fragmentShaderAsset); sensorManager = ASensorManager_getInstance(); assert(sensorManager != NULL); accelerometer = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_ACCELEROMETER); assert(accelerometer != NULL); looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); assert(looper != NULL); accelerometerEventQueue = ASensorManager_createEventQueue(sensorManager, looper, LOOPER_ID_USER, NULL, NULL); assert(accelerometerEventQueue != NULL); int setEventRateResult = ASensorEventQueue_setEventRate(accelerometerEventQueue, accelerometer, int32_t(1000000 / SENSOR_REFRESH_RATE)); int enableSensorResult = ASensorEventQueue_enableSensor(accelerometerEventQueue, accelerometer); assert(enableSensorResult >= 0); generateXPos(); }
void SensorHandler::InitializeHandler() { // Set and prepare looper SensorHandler::tsInstance().looper = ALooper_forThread(); if (SensorHandler::tsInstance().looper == NULL) { SensorHandler::tsInstance().looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); } SensorHandler::tsInstance().sensorManager = ASensorManager_getInstance(); SensorHandler::tsInstance().accelerometerSensor = ASensorManager_getDefaultSensor( SensorHandler::tsInstance().sensorManager, ASENSOR_TYPE_ACCELEROMETER); SensorHandler::tsInstance().magneticSensor = ASensorManager_getDefaultSensor( SensorHandler::tsInstance().sensorManager, ASENSOR_TYPE_MAGNETIC_FIELD); SensorHandler::tsInstance().gyroscopeSensor = ASensorManager_getDefaultSensor( SensorHandler::tsInstance().sensorManager, ASENSOR_TYPE_GYROSCOPE); // Create event queue for sensor events SensorHandler::tsInstance().sensorEventQueue = ASensorManager_createEventQueue( SensorHandler::tsInstance().sensorManager, SensorHandler::tsInstance().looper, 3, SensorCallback, sensor_data); ASensorEventQueue_enableSensor(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().accelerometerSensor); ASensorEventQueue_enableSensor(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().magneticSensor); ASensorEventQueue_enableSensor(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().gyroscopeSensor); // Target sampling rate 100Hz int a = ASensor_getMinDelay(accelerometerSensor); int b = ASensor_getMinDelay(magneticSensor); int c = ASensor_getMinDelay(gyroscopeSensor); LOGI("Minimum delay; acceleration = %d magnetic = %d gyroscope = %d",a,b,c); ASensorEventQueue_setEventRate(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().accelerometerSensor, 100000); ASensorEventQueue_setEventRate(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().magneticSensor, 100000); ASensorEventQueue_setEventRate(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().gyroscopeSensor, 100000); }
Window::Window( WindowCreationData const& wcd ) : m_app( wcd.app ) , m_lstateready( false ) , m_finishrequired( false ) , m_finished( false ) { m_silent = utils::MainConf->boolean( "silent", false ); if( !m_silent ) { m_app->userData = this; m_app->onAppCmd = &g_handle_cmd; m_app->onInputEvent = &g_handle_input; m_sensorManager = ASensorManager_getInstance(); m_accelerometerSensor = ASensorManager_getDefaultSensor( m_sensorManager, ASENSOR_TYPE_ACCELEROMETER ); m_sensorEventQueue = ASensorManager_createEventQueue( m_sensorManager, m_app->looper, LOOPER_ID_USER, 0, 0 ); if( m_app->savedState ) { } } }
void gdk_android_setup_app_callbacks(struct android_app *state, void (*onStop)()) { if (0) { ASensorManager* sensorManager = NULL; sensorManager = ASensorManager_getInstance(); accelerometerSensor = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_ACCELEROMETER); sensorEventQueue = ASensorManager_createEventQueue(sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); } gdk_android_stop = onStop; state->onAppCmd = gdk_android_handle_glue_cmd; state->activity->callbacks->onNativeWindowResized = onNativeWindowResized; state->activity->callbacks->onNativeWindowRedrawNeeded = onNativeWindowRedrawNeeded; state->activity->callbacks->onContentRectChanged = onContentRectChanged; state->activity->callbacks->onLowMemory = onLowMemory; // TODO: consider overriding state->inputPollSource.process instead of the following // or should we even get rid of this native_app for good? state->onInputEvent = android_handle_input; }
/** * This is the main entry point of a native application that is using * android_native_app_glue. It runs in its own thread, with its own * event loop for receiving input events and doing other things. */ void android_main(struct android_app* state) { struct engine engine; // Make sure glue isn't stripped. app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. engine.state = *(struct saved_state*)state->savedState; } else { JNIEnv *jni = state->activity->env; state->activity->vm->AttachCurrentThread(&jni, NULL); jclass activityClass = jni->FindClass("android/app/NativeActivity"); jmethodID getClassLoader = jni->GetMethodID(activityClass,"getClassLoader", "()Ljava/lang/ClassLoader;"); jobject cls = jni->CallObjectMethod(state->activity->clazz, getClassLoader); jclass classLoader = jni->FindClass("java/lang/ClassLoader"); jmethodID findClass = jni->GetMethodID(classLoader, "loadClass", "(Ljava/lang/String;)Ljava/lang/Class;"); jstring strClassName = jni->NewStringUTF("com/oriku/Bridge"); jclass j_bridge = (jclass)jni->CallObjectMethod(cls, findClass, strClassName); jmethodID j_initOuya = jni->GetStaticMethodID(j_bridge, "initOuya","(Landroid/app/Activity;)V"); jni->CallStaticVoidMethod(j_bridge, j_initOuya, state->activity->clazz); // Finished with the JVM. state->activity->vm->DetachCurrentThread(); } // loop waiting for stuff to do. while (1) { // Read all pending events. int ident; int events; struct android_poll_source* source; // If not animating, we will block forever waiting for events. // If animating, we loop until all events are read, then continue // to draw the next frame of animation. while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) { source->process(state, source); } // If a sensor has data, process it now. if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { LOGI("accelerometer: x=%f y=%f z=%f", event.acceleration.x, event.acceleration.y, event.acceleration.z); } } } // Check if we are exiting. if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { // Done with events; draw next animation frame. engine.state.angle += .01f; if (engine.state.angle > 1) { engine.state.angle = 0; } // Drawing is throttled to the screen update rate, so there // is no need to do timing here. engine_draw_frame(&engine); } } }
void android_main(struct android_app* state) { app_dummy(); //sleep(5); // Sleep a little so GDB can attach itself pthread_key_create(&s_thread_key, detach_current_thread); JNIEnv* env; state->activity->vm->AttachCurrentThread(&env, nullptr); pthread_setspecific(s_thread_key, state->activity->vm); AInstance ainstance; state->userData = &ainstance; state->onAppCmd = android_handle_event; state->onInputEvent = android_handle_input; ainstance.app = state; g_native_activity = state->activity; Path::set_current(); // Prepare to monitor accelerometer ainstance.sensorManager = ASensorManager_getInstance(); ainstance.accelerometerSensor = ASensorManager_getDefaultSensor( ainstance.sensorManager, ASENSOR_TYPE_ACCELEROMETER); if (ainstance.accelerometerSensor) { ainstance.sensorEventQueue = ASensorManager_createEventQueue( ainstance.sensorManager, state->looper, LOOPER_ID_USER, nullptr, nullptr); } Chrono chrono; while (!ainstance.done) { int ident; int events; struct android_poll_source* source; while ((ident = ALooper_pollAll( (!ainstance.active ? -1 : 0), nullptr, &events, reinterpret_cast<void**>(&source))) >= 0) { if (source) source->process(state, source); if (ainstance.done) break; // If a sensor has data, process it now. if (ainstance.active && ident == LOOPER_ID_USER && ainstance.accelerometerSensor) { ASensorEvent event; while (ASensorEventQueue_getEvents( ainstance.sensorEventQueue, &event, 1) > 0) { ainstance.director->input().accelerated( event.acceleration.x, event.acceleration.y, event.acceleration.z, event.timestamp); } } } chrono.update(); if (!(ainstance.initialised & ainstance.active)) continue; ainstance.director->update(chrono.delta()); ainstance.director->draw(); eglSwapBuffers(ainstance.display, ainstance.surface); } android_destroy_display(&ainstance); }
void android_main(struct android_app* app) { // Make sure glue isn't stripped app_dummy(); // Create application manager AppManager manager; app->userData = &manager; app->onAppCmd = engine_handle_cmd; app->onInputEvent = engine_handle_input; // Create x2d engine X2DEngine *engine = CreateEngine(0); engine->app = new AndroidApp(&manager); engine->gfx = new AndroidRender(&manager); engine->sfx = new AndroidSound; engine->debug = new AndroidDebug; engine->assetLoader = new AndroidAssetLoader(&manager); // NOTE: engine->init() can be found in engine_handle_cmd manager.android = app; manager.engine = engine; manager.display = NULL; // Prepare to monitor accelerometer manager.sensorManager = ASensorManager_getInstance(); manager.accelerometerSensor = ASensorManager_getDefaultSensor(manager.sensorManager, ASENSOR_TYPE_ACCELEROMETER); manager.sensorEventQueue = ASensorManager_createEventQueue(manager.sensorManager, app->looper, LOOPER_ID_USER, NULL, NULL); if(app->savedState != NULL) { // We are starting from a previous saved state; restore from it. manager.state = *(SaveState*)app->savedState; } // Loop waiting for stuff to do. while(true) { // Read all pending events. int ident; int events; struct android_poll_source* source; // If not animating, we will block forever waiting for events. // If animating, we loop until all events are read, then continue // to draw the next frame of animation. while((ident = ALooper_pollAll(manager.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) source->process(app, source); // If a sensor has data, process it now. if(ident == LOOPER_ID_USER) { if(manager.accelerometerSensor != NULL) { ASensorEvent event; while(ASensorEventQueue_getEvents(manager.sensorEventQueue, &event, 1) > 0) { iosystem::print("accelerometer: x=%f y=%f z=%f", event.acceleration.x, event.acceleration.y, event.acceleration.z); } } } // Check if we are exiting. if(app->destroyRequested != 0) { engine_term_display(&manager); return; } } if(manager.display != NULL && manager.animating) { // Drawing is throttled to the screen update rate, so there // is no need to do timing here. //engine_draw_frame(&manager); engine->draw(); } } }
/** * アプリケーション開始 */ void android_main(struct android_app* state) { struct engine engine; // glueが削除されないように app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // センサーからのデータ取得に必要な初期化 engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor( engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue( engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // 以前の状態に戻す engine.state = *(struct saved_state*) state->savedState; } while (1) { int ident; int events; struct android_poll_source* source; // アプリケーションが動作することになれば、これらセンサーの制御を行う while ((ident = ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**) &source)) >= 0) { // イベントを処理する if (source != NULL) { source->process(state, source); } // センサーに何かしらのデータが存在したら処理する if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents( engine.sensorEventQueue, &event, 1) > 0) { LOGI("accelerometer: x=%f y=%f z=%f", event.acceleration.x, event.acceleration.y, event.acceleration.z); } } } // 破棄要求があったか if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { // 次のフレームを描画するのに必要な処理を行う engine.state.angle += .01f; if (engine.state.angle > 1) { engine.state.angle = 0; } // 画面描画 engine_draw_frame(&engine); } } }
/** * 这是使用 android_native_app_glue * 的本地应用程序的主要入口点。它在其自己的线程中运行,具有自己的 * 事件循环用于接收输入事件并执行其他操作。 */ void android_main(struct android_app* state) { struct engine engine; memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; //准备监控加速器 engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { //我们从之前保存的状态开始;从它还原。 engine.state = *(struct saved_state*)state->savedState; } engine.animating = 1; //循环等待事情以进行处理。 while (1) { //读取所有挂起的事件。 int ident; int events; struct android_poll_source* source; //如果没有动态效果,我们将一直阻止等待事件。 //如果有动态效果,我们进行循环,直到读取所有事件,然后继续 //绘制动画的下一帧。 while ((ident = ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { //处理此事件。 if (source != NULL) { source->process(state, source); } //如果传感器有数据,立即处理。 if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { // LOGI("accelerometer: x=%f y=%f z=%f", // event.acceleration.x, event.acceleration.y, // event.acceleration.z); } } } //检查,我们是否存在。 if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { //事件完成;绘制下一动画帧。 engine.state.angle += .01f; if (engine.state.angle > 1) { engine.state.angle = 0; } //绘图被降低到屏幕更新速率, //因此,没有必要在此处计时。 engine_draw_frame(&engine); } } }
void Engine::initSensors() { mSensorManager = ASensorManager_getInstance(); mAccelerometerSensor = ASensorManager_getDefaultSensor( mSensorManager, ASENSOR_TYPE_ACCELEROMETER ); mSensorEventQueue = ASensorManager_createEventQueue( mSensorManager, mApp->looper, LOOPER_ID_USER, NULL, NULL ); }
/** * This is the main entry point of a native application that is using * android_native_app_glue. It runs in its own thread, with its own * event loop for receiving input events and doing other things. */ void android_main(struct android_app* state) { struct engine engine; // Make sure glue isn't stripped. app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor( engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue( engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); // gpg-cpp: Set platform intiialization gpg::AndroidInitialization::android_main(state); // gpg-cpp: Here we create the callback on auth operations auto callback = [&](gpg::AuthOperation op, gpg::AuthStatus status) { LOGI("OnAuthActionFinished"); if (IsSuccess(status)) { LOGI("You are logged in!"); } else { LOGI("You are not logged in!"); } engine.animating = 1; }; // gpg-cpp: We need to check to see if there's a previous state. // If there was, we'll just continue, but if not we'll set up // gpg-cpp for the first time. if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. engine.state = *(struct saved_state*)state->savedState; } else { LOGI("Setting up gpg-cpp"); // Get the platform configuration. gpg::AndroidPlatformConfiguration platform_configuration; platform_configuration.SetActivity(state->activity->clazz); // Now, create the game service (see StateManager.cpp) // and pass in our callback StateManager::InitServices(platform_configuration, NULL, callback); } // loop waiting for stuff to do. while (1) { // Read all pending events. int ident; int events; struct android_poll_source* source; // If not animating, we will block forever waiting for events. // If animating, we loop until all events are read, then continue // to draw the next frame of animation. while ((ident = ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) { source->process(state, source); } // If a sensor has data, process it now. if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { /* LOGI("accelerometer: x=%f y=%f z=%f", event.acceleration.x, event.acceleration.y, event.acceleration.z); */ } } } // Check if we are exiting. if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { // Done with events; draw next animation frame. engine.state.angle += .01f; if (engine.state.angle > 1) { engine.state.angle = 0; } // Drawing is throttled to the screen update rate, so there // is no need to do timing here. engine_draw_frame(&engine); } } }
/** * This is the main entry point of a native application that is using * android_native_app_glue. It runs in its own thread, with its own * event loop for receiving input events and doing other things. */ void android_main(struct android_app* state) { la_window_t* window = la_memory_allocate(sizeof(la_window_t)); int ident; int events; struct android_poll_source* source; // Make sure glue isn't stripped. app_dummy(); state->userData = window; state->onAppCmd = window_handle_cmd; state->onInputEvent = window_handle_input; window->app = state; // Prepare to monitor accelerometer window->sensorManager = ASensorManager_getInstance(); window->accelerometerSensor = ASensorManager_getDefaultSensor( window->sensorManager, ASENSOR_TYPE_ACCELEROMETER); window->sensorEventQueue = ASensorManager_createEventQueue( window->sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); // if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. // window->state = *(struct saved_state*)state->savedState; // } // Run main(): la_window = window; // TODO: is needed? SDL_AtomicSet(&la_rmcexit, 1); // Window thread ( Drawing + Events ). while (SDL_AtomicGet(&la_rmcexit)) { // Poll Events ident = ALooper_pollAll(0, NULL, &events, (void**)&source); // Process this event. if (source != NULL) { source->process(state, source); } // If a sensor has data, process it now. if (ident == LOOPER_ID_USER) { if (window->accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents( window->sensorEventQueue, &event, 1) > 0) { window->input.accel.x = event.acceleration.x; window->input.accel.y = event.acceleration.y; window->input.accel.z = event.acceleration.z; } } } // Run the cross-platform window loop. if(window->context) la_window_loop__(window); // Update the screen. la_port_swap_buffers(window); } la_print("port-android quitting...."); // The cross-platform window kill. la_window_kill__(window); // The window is being hidden or closed, clean it up. window_term_display(window); la_print("port-android quitted...."); exit(0); return; }
/******************************************************************** * This is the main entry point of a native application *that is using android_native_app_glue. It runs in *its own thread, with its own event loop for receiving *input events and doing other things. *******************************************************************/ void android_main(struct android_app* state) { // mgr = state->activity->assetManager; struct engine engine; engine.display = NULL; // Make sure glue isn't stripped. app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. engine.state = *(struct saved_state*)state->savedState; } // loop waiting for stuff to do. while (1) { // Read all pending events. int ident; int events; struct android_poll_source* source; while ((ident=ALooper_pollAll(0, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) { source->process(state, source); } // If a sensor has data, process it now. if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { // LOGI("accelerometer: x=%f y=%f z=%f", // event.acceleration.x, event.acceleration.y, // event.acceleration.z); } } } // Check if we are exiting. if (state->destroyRequested != 0) { LOGI("engine_term_display(&engine); state->destroyRequested"); engine_term_display(&engine); return; } } // if(mgr == NULL)break; engine_draw_frame(&engine); } }
static void handleCmd(struct android_app* app, int32_t cmd) { switch (cmd) { case APP_CMD_SAVE_STATE: break; case APP_CMD_INIT_WINDOW: if (app->window) // && mRoot) { AConfiguration* config = AConfiguration_new(); AConfiguration_fromAssetManager(config, app->activity->assetManager); if (!okit.isInited()) { LOG_FOOT; //okit.getPrefs().winsize.x = drawWidth; //okit.getPrefs().winsize.y = drawHeight; okit.assetMgr = app->activity->assetManager; okit.getPrefs().extWinhandle = Ogre::StringConverter::toString((int)app->window); okit.getPrefs().androidConfig = Ogre::StringConverter::toString((int)config); okit.getPrefs().verbose = true; //gkLogger::enable("OgreKitDemo.log", true); //Ogre::LogManager::getSingleton().getDefaultLog()->addListener(&gLogListener); if (okit.init("")) { LOG_FOOT; m_window = gkWindowSystem::getSingleton().getMainWindow(); LOG_FOOT; okit.m_input = static_cast<OIS::AndroidInputManager*>(m_window->getInputManager()); okit.sensorManager = ASensorManager_getInstance(); okit.accelerometerSensor = ASensorManager_getDefaultSensor(okit.sensorManager, ASENSOR_TYPE_ACCELEROMETER); okit.sensorEventQueue = ASensorManager_createEventQueue(okit.sensorManager, okit.state->looper, LOOPER_ID_USER, NULL, NULL); gkEngine::getSingleton().initializeStepLoop(); LOGI("inited"); //okit.setWindowSize(drawWidth, drawHeight); } } else { LOG_FOOT; Ogre::RenderWindow* rwnd = gkWindowSystem::getSingleton().getMainRenderWindow(); if (rwnd) static_cast<Ogre::AndroidEGLWindow*>(rwnd)->_createInternalResources(app->window, config); } LOG_FOOT; AConfiguration_delete(config); } break; case APP_CMD_TERM_WINDOW: if (okit.isInited()) { Ogre::RenderWindow* rwnd = gkWindowSystem::getSingleton().getMainRenderWindow(); if (rwnd) static_cast<Ogre::AndroidEGLWindow*>(rwnd)->_destroyInternalResources(); } LOGI("terminiate."); break; case APP_CMD_GAINED_FOCUS: // When our app gains focus, we start monitoring the accelerometer. if (okit.accelerometerSensor != NULL) { ASensorEventQueue_enableSensor(okit.sensorEventQueue, okit.accelerometerSensor); // We'd like to get 60 events per second (in us). ASensorEventQueue_setEventRate(okit.sensorEventQueue, okit.accelerometerSensor, (1000L/60)*1000); } break; case APP_CMD_LOST_FOCUS: // When our app loses focus, we stop monitoring the accelerometer. // This is to avoid consuming battery while not being used. if (okit.accelerometerSensor != NULL) { ASensorEventQueue_disableSensor(okit.sensorEventQueue, okit.accelerometerSensor); } break; case APP_CMD_CONFIG_CHANGED: break; } }
/** * This is the main entry point of a native application that is using * android_native_app_glue. It runs in its own thread, with its own * event loop for receiving input events and doing other things. */ void android_main(struct android_app* state) { // Make sure glue isn't stripped. app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. engine.state = *(struct saved_state*)state->savedState; } // loop waiting for stuff to do. while (1) { // Read all pending events. int ident; int events; struct android_poll_source* source; // If not animating, we will block forever waiting for events. // If animating, we loop until all events are read, then continue // to draw the next frame of animation. while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) { source->process(state, source); } // If a sensor has data, process it now. if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { LOG_EVENTS_DEBUG("accelerometer: x=%f y=%f z=%f", event.acceleration.x, event.acceleration.y, event.acceleration.z); AConfiguration* _currentconf = AConfiguration_new(); AConfiguration_fromAssetManager(_currentconf, state->activity->assetManager); static int32_t _orientation = AConfiguration_getOrientation(_currentconf); if (ACONFIGURATION_ORIENTATION_LAND != _orientation) { // ACONFIGURATION_ORIENTATION_ANY // ACONFIGURATION_ORIENTATION_PORT // ACONFIGURATION_ORIENTATION_SQUARE cocos2d::Acceleration acc; acc.x = -event.acceleration.x/10; acc.y = -event.acceleration.y/10; acc.z = event.acceleration.z/10; acc.timestamp = 0; cocos2d::EventAcceleration accEvent(acc); cocos2d::EventDispatcher::getInstance()->dispatchEvent(&accEvent); } else { // ACONFIGURATION_ORIENTATION_LAND // swap x and y parameters cocos2d::Acceleration acc; acc.x = event.acceleration.y/10; acc.y = -event.acceleration.x/10; acc.z = event.acceleration.z/10; acc.timestamp = 0; cocos2d::EventAcceleration accEvent(acc); cocos2d::EventDispatcher::getInstance()->dispatchEvent(&accEvent); } } } } // Check if we are exiting. if (state->destroyRequested != 0) { engine_term_display(&engine); memset(&engine, 0, sizeof(engine)); s_methodInitialized = false; return; } } if (engine.animating) { // Done with events; draw next animation frame. engine.state.angle += .01f; if (engine.state.angle > 1) { engine.state.angle = 0; } // Drawing is throttled to the screen update rate, so there // is no need to do timing here. LOG_RENDER_DEBUG("android_main : engine.animating"); engine_draw_frame(&engine); } else { LOG_RENDER_DEBUG("android_main : !engine.animating"); } } }
// Main関数 void android_main(struct android_app* state) { struct engine engine; // glueが削除されないように app_dummy(); // アプリ情報保存エリアの確保 memset(&engine, 0, sizeof(engine)); // ユーザーデータの配置 state->userData = &engine; // アプリケーションコマンド処理関数の設定 state->onAppCmd = engine_handle_cmd; // 入力イベント処理関数の設定 state->onInputEvent = engine_handle_input; engine.app = state; // センサーからのデータ取得に必要な初期化 engine.sensorManager = ASensorManager_getInstance(); // 加速度センサーのデータ取得準備 engine.accelerometerSensor = ASensorManager_getDefaultSensor( engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); // ジャイロスコープのデータ取得準備 engine.gyroscopeSensor = ASensorManager_getDefaultSensor( engine.sensorManager, ASENSOR_TYPE_GYROSCOPE ); // センサー情報取得キューの新規作成 engine.sensorEventQueue = ASensorManager_createEventQueue( engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); // AssetManagerの取得 engine.assetManager = state->activity->assetManager; if (state->savedState != NULL) { // 以前の状態に戻す engine.state = *(struct saved_state*) state->savedState; } while (1) { int ident; int events; struct android_poll_source* source; // アプリケーションの状態にあわせてセンサー情報の処理を行う while ((ident = ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**) &source)) >= 0) { // 内部イベントを処理する if (source != NULL) { source->process(state, source); } // センサー情報取得キューのデータを処理する if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL && engine.gyroscopeSensor != NULL) { ASensorEvent event[2]; int count; int i; while ((count = ASensorEventQueue_getEvents( engine.sensorEventQueue, event, 2)) > 0) { for (i = 0; i < count; i++){ switch(event[i].type){ case ASENSOR_TYPE_ACCELEROMETER: // 加速度センサーの値を出力する LOGI("accelerometer: x=%f y=%f z=%f", event[i].acceleration.x, event[i].acceleration.y, event[i].acceleration.z); break; case ASENSOR_TYPE_GYROSCOPE: // ジャイロスコープの値を出力する LOGI("GYROSCOPE: x=%f y=%f z=%f",event[i].vector.azimuth,event[i].vector.pitch,event[i].vector.roll ); break; } } } } } // EGL情報を破棄する if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { // 次のフレームを描画するのに必要な処理を行う int i = 0,j; engine.angle[0] += 3; engine.angle[1] += 1; for (j = 0; j < 3; j++){ if (engine.angle[j] > 360) engine.angle[j] -= 360; if (engine.angle[j] < 0) engine.angle[j] += 360; } // 画面描画 engine_draw_frame(&engine); } } }
/** * This is the main entry point of a native application that is using * android_native_app_glue. It runs in its own thread, with its own * event loop for receiving input events and doing other things. */ void android_main(struct android_app* state) { struct engine engine; // Make sure glue isn't stripped. app_dummy(); GAppOnStartup = &UCppLab::OnStartup; GAppOnTouched = &UCppLab::OnTouched; memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. engine.state = *(struct saved_state*)state->savedState; } // loop waiting for stuff to do. while (1) { // Read all pending events. int ident; int events; struct android_poll_source* source; // If not animating, we will block forever waiting for events. // If animating, we loop until all events are read, then continue // to draw the next frame of animation. while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) { source->process(state, source); } // If a sensor has data, process it now. if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { // LOGI("accelerometer: x=%f y=%f z=%f", // event.acceleration.x, event.acceleration.y, // event.acceleration.z); } } } // Check if we are exiting. if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { // Done with events; draw next animation frame. engine.state.angle += .01f; if (engine.state.angle > 1) { engine.state.angle = 0; } // Drawing is throttled to the screen update rate, so there // is no need to do timing here. engine_draw_frame(&engine); } } }
orxSTATUS orxFASTCALL orxJoystick_Android_Init() { orxSTATUS eResult = orxSTATUS_FAILURE; /* Wasn't already initialized? */ if (!(sstJoystick.u32Flags & orxJOYSTICK_KU32_STATIC_FLAG_READY)) { /* Cleans static controller */ orxMemory_Zero(&sstJoystick, sizeof(orxJOYSTICK_STATIC)); orxConfig_PushSection(KZ_CONFIG_ANDROID); sstJoystick.s32ScreenRotation = -1; sstJoystick.bAccelerometerEnabled = orxFALSE; sstJoystick.bUseJoystick = orxConfig_GetBool(KZ_CONFIG_USE_JOYSTICK); if(sstJoystick.bUseJoystick == orxTRUE) { orxAndroid_JNI_GetDeviceIds(sstJoystick.au32DeviceIds); orxEvent_AddHandler(orxANDROID_EVENT_TYPE_JOYSTICK, orxJoystick_Android_JoystickEventHandler); } else { sstJoystick.sensorManager = ASensorManager_getInstance(); sstJoystick.accelerometerSensor = ASensorManager_getDefaultSensor(sstJoystick.sensorManager, ASENSOR_TYPE_ACCELEROMETER); if(sstJoystick.accelerometerSensor != NULL) { /* Adds our Accelerometer event handlers */ if ((eResult = orxEvent_AddHandler(orxEVENT_TYPE_SYSTEM, orxJoystick_Android_AccelerometerEventHandler)) != orxSTATUS_FAILURE) { if ((eResult = orxEvent_AddHandler(orxANDROID_EVENT_TYPE_ACCELERATE, orxJoystick_Android_AccelerometerEventHandler)) != orxSTATUS_FAILURE) { if ((eResult = orxEvent_AddHandler(orxANDROID_EVENT_TYPE_SURFACE, orxJoystick_Android_AccelerometerEventHandler)) != orxSTATUS_FAILURE) { ALooper* looper = ALooper_forThread(); sstJoystick.sensorEventQueue = ASensorManager_createEventQueue(sstJoystick.sensorManager, looper, LOOPER_ID_SENSOR, NULL, NULL); if(orxConfig_HasValue(KZ_CONFIG_ACCELEROMETER_FREQUENCY)) { sstJoystick.u32Frequency = orxConfig_GetU32(KZ_CONFIG_ACCELEROMETER_FREQUENCY); } else { /* enable acceleromter with default rate */ sstJoystick.u32Frequency = 60; } /* enable sensor */ enableSensorManager(); } } } } } orxConfig_PopSection(); /* Updates status */ sstJoystick.u32Flags |= orxJOYSTICK_KU32_STATIC_FLAG_READY; } /* Done! */ return eResult; }
void android_main(struct android_app* state) { struct engine engine; // Make sure glue isn't stripped. app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; engine.requested_quit=false; engine.os=NULL; engine.display_active=false; FileAccessAndroid::asset_manager=state->activity->assetManager; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); ANativeActivity_setWindowFlags(state->activity,AWINDOW_FLAG_FULLSCREEN|AWINDOW_FLAG_KEEP_SCREEN_ON,0); state->activity->vm->AttachCurrentThread(&engine.jni, NULL); // loop waiting for stuff to do. while (1) { // Read all pending events. int ident; int events; struct android_poll_source* source; // If not animating, we will block forever waiting for events. // If animating, we loop until all events are read, then continue // to draw the next frame of animation. int nullmax=50; while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) { // LOGI("process\n"); source->process(state, source); } else { nullmax--; if (nullmax<0) break; } // If a sensor has data, process it now. // LOGI("events\n"); if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { if (engine.os) { engine.os->process_accelerometer(Vector3(event.acceleration.x, event.acceleration.y, event.acceleration.z)); } } } } // Check if we are exiting. if (state->destroyRequested != 0) { if (engine.os) { engine.os->main_loop_request_quit(); } state->destroyRequested=0; } if (engine.requested_quit) { engine_term_display(&engine); exit(0); return; } // LOGI("end\n"); } // LOGI("engine animating? %i\n",engine.animating); if (engine.animating) { //do os render engine_draw_frame(&engine); //LOGI("TERM WINDOW"); } } }
// try and open up the JoyWarrior file descriptor bool CSensorAndroidBuiltIn::detect() { setType(); setPort(); // get the singleton instance of the m_pSensorManager if (!m_pSensorManager) m_pSensorManager = (ASensorManager*) ASensorManager_getInstance(); /* // sensor listing ASensorList pSensorList = NULL; int iNum = ASensorManager_getSensorList(m_pSensorManager, &pSensorList); if (iNum && pSensorList) { fprintf(stdout, "\n\n%d Detected Sensors:\n", iNum); //int i = 0; for (int i=0; i < iNum; i++) { //while (i<10 && (pSensorList+i)) { //for (int i=0; i < iNum; i++) { fprintf(stdout, " %s\n", ASensor_getName(*(pSensorList+i))); i++; } } else { fprintf(stdout, "\n\nNo Sensor List? %d\n\n", iNum); } */ // create looper m_pLooper = ALooper_forThread(); // get existing looper if (!m_pLooper) { // make new looper //m_pLooper = ALooper_prepare(0); m_pLooper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); } if (!m_pLooper) { // no existing or new looper -- error fprintf(stderr, "can't create Looper\n"); return false; // can't create looper } // setup event queue //m_pSensorEventQueue = ASensorManager_createEventQueue(m_pSensorManager, m_pLooper, // LOOPER_ID_QCN, QCN_ALooper_callback, &l_SensorVector); m_pSensorEventQueue = ASensorManager_createEventQueue(m_pSensorManager, m_pLooper, LOOPER_ID_QCN, NULL, NULL); if (!m_pSensorEventQueue) { fprintf(stderr, "can't create SensorEventQueue\n"); return false; // can't setup queue } // get the default accelerometer m_pSensor = (ASensor*) ASensorManager_getDefaultSensor(m_pSensorManager, ASENSOR_TYPE_ACCELEROMETER); if (!m_pSensor) { //fprintf(stdout, "No Android accelerometer detected.\n"); return false; // no sensor } int iRetVal = 0; if ((iRetVal = ASensorEventQueue_enableSensor(m_pSensorEventQueue, m_pSensor)) < 0) { fprintf(stderr, "Error in enableSensor %d\n", iRetVal); return false; }; m_fResolution = ASensor_getResolution(m_pSensor); m_minDelayMsec = ASensor_getMinDelay(m_pSensor); int rateMsec = (int)((sm->dt > 0. ? sm->dt : g_DT) * 1000.); //fprintf(stdout, "Rates: m_minDelayMSec = %d raceMsec = %d\n", m_minDelayMsec, rateMsec); //if (rateMsec > m_minDelayMsec) m_minDelayMsec = rateMsec; if (rateMsec < m_minDelayMsec) m_minDelayMsec = rateMsec; fprintf(stdout, "Setting data rate to %d Hz\n", 1000L/m_minDelayMsec); strlcpy(m_strSensor, ASensor_getName(m_pSensor), _MAX_PATH); strlcpy(m_strVendor, ASensor_getVendor(m_pSensor), _MAX_PATH); // NB: the rate is in microseconds! if ((iRetVal = ASensorEventQueue_setEventRate(m_pSensorEventQueue, m_pSensor, m_minDelayMsec * 1000L)) < 0) { fprintf(stderr, "Error in setEventRate %d\n", iRetVal); // return false; // not really a fatal error } fprintf(stdout, "Android Default Sensor Detected: \n\n %s - %s\n" " Res = %f --- Min Delay msec = %d\n" " m_pSensor=%x m_pSensorEventQueue=%x\n", m_strVendor, m_strSensor, m_fResolution, m_minDelayMsec, m_pSensor, m_pSensorEventQueue); setType(SENSOR_ANDROID); setSingleSampleDT(true); // set to true in raw mode so we don't get any interpolated/avg points (i.e. just the "integer" value hopefully) return (bool)(getTypeEnum() == SENSOR_ANDROID); }