//------------------------------------------------------------------------- // Sensor handlers //------------------------------------------------------------------------- void Engine::InitSensors() { sensor_manager_ = ASensorManager_getInstance(); accelerometer_sensor_ = ASensorManager_getDefaultSensor( sensor_manager_, ASENSOR_TYPE_ACCELEROMETER); sensor_event_queue_ = ASensorManager_createEventQueue( sensor_manager_, app_->looper, LOOPER_ID_USER, NULL, NULL); }
void SensorManager::Init(android_app *app) { sensorManager_ = ASensorManager_getInstance(); accelerometerSensor_ = ASensorManager_getDefaultSensor( sensorManager_, ASENSOR_TYPE_ACCELEROMETER); sensorEventQueue_ = ASensorManager_createEventQueue( sensorManager_, app->looper, LOOPER_ID_USER, NULL, NULL); }
/* * Class: io_quadroid_ndk_QuadroidLib * Method: init * Signature: ()V */ JNIEXPORT void JNICALL Java_io_quadroid_UDPSensor_ndk_QuadroidLib_init(JNIEnv *env, jclass clazz) { sensorManager = ASensorManager_getInstance(); looper = ALooper_forThread(); if(looper == NULL) { looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); } queue = ASensorManager_createEventQueue(sensorManager, looper, LOOPER_ID, get_sensor_events, NULL); if(USE_PRESSURE==1) { pressureSensor = ASensorManager_getDefaultSensor(sensorManager, 6); ASensorEventQueue_enableSensor(queue, pressureSensor); ASensorEventQueue_setEventRate(queue, pressureSensor, SAMP_PER_SEC); } if(USE_ACCELEROMETER==1) { acceSensor = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_ACCELEROMETER); ASensorEventQueue_enableSensor(queue, acceSensor); ASensorEventQueue_setEventRate(queue, acceSensor, SAMP_PER_SEC); log_acce = ASensor_getMinDelay(acceSensor); } if(USE_GYROSCOPE==1) { gyroSensor = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_GYROSCOPE); ASensorEventQueue_enableSensor(queue, gyroSensor); ASensorEventQueue_setEventRate(queue, gyroSensor, SAMP_PER_SEC); log_gyro = ASensor_getMinDelay(gyroSensor); } if(USE_MAGNETIC==1) { magnSensor = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_MAGNETIC_FIELD); ASensorEventQueue_enableSensor(queue, magnSensor); ASensorEventQueue_setEventRate(queue, magnSensor, SAMP_PER_SEC); log_magn = ASensor_getMinDelay(magnSensor); } if(USE_DISTANCE==1) { proxSensor = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_PROXIMITY); ASensorEventQueue_enableSensor(queue, proxSensor); ASensorEventQueue_setEventRate(queue, proxSensor, SAMP_PER_SEC); } currentTime = time(0); ALooper_pollAll(-1, NULL, &events, NULL); }
JNIEXPORT void JNICALL Java_org_nzdis_sensorspeed_CMagneticField_magneticFieldStartup (JNIEnv *e, jclass c, jobject handler) { updateHandler = handler; env = e; jclass handlerClass = env->FindClass("org/nzdis/sensorspeed/CMagneticField"); if (handlerClass == NULL) { LOGI("big error 1"); } mid = env->GetMethodID(handlerClass, "onSensorChanged", "()V"); if (mid == NULL) { LOGI("big error 2"); } ASensorEvent event; int events, ident; ASensorManager * sensorManager; const ASensor* magSensor; void* sensor_data = malloc(10000); SENSORS_ENABLED = 1; ALooper* looper = ALooper_forThread(); if (looper == NULL) { looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); } sensorManager = ASensorManager_getInstance(); magSensor = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_MAGNETIC_FIELD); sensorEventQueue = ASensorManager_createEventQueue(sensorManager, looper, LOOPER_ID, (ALooper_callbackFunc)get_sensorevents, sensor_data); ASensorEventQueue_enableSensor(sensorEventQueue, magSensor); int minDelay = ASensor_getMinDelay(magSensor); //LOGI("min-delay: %d", minDelay); ASensorEventQueue_setEventRate(sensorEventQueue, magSensor, (1000L/SAMP_PER_SEC)*1000); while ((ident = ALooper_pollAll(-1, NULL, &events, NULL) >= 0)) { // If a sensor has data, process it now. if (ident == LOOPER_ID) { LOGI("magneticFieldStartup() - LOOPER!!!!!!!!"); ASensorEvent event; while (ASensorEventQueue_getEvents(sensorEventQueue, &event, 1) > 0) { if (event.type == ASENSOR_TYPE_MAGNETIC_FIELD) { env->CallVoidMethod(updateHandler, mid); magneticfield_x = event.magnetic.x; magneticfield_y = event.magnetic.y; magneticfield_z = event.magnetic.z; } } } else { LOGI("magneticFieldStartup() - else!!!!!!!!!!!!!"); } } }
int defaultInitializeApplication() { struct android_app *vid = main_view_id; if (is_valid(vid) == false) return Failed; gy::r3::initialize(GY_RENDERER_OPENGLES); asset_manager = vid->activity->assetManager; sensor_manager = ASensorManager_getInstance(); sensor_accelerometer = ASensorManager_getDefaultSensor(sensor_manager, ASENSOR_TYPE_ACCELEROMETER); sensor_gyroscope = ASensorManager_getDefaultSensor(sensor_manager, ASENSOR_TYPE_GYROSCOPE); sensor_event_queue = ASensorManager_createEventQueue(sensor_manager, vid->looper, LOOPER_ID_USER, NullPtr, NullPtr); vid->onAppCmd = gy__onApplicationCommand; vid->onInputEvent = gy__onInputEvent; return Success; }
int GoAndroid_enableSensor(int s, int32_t usec) { ASensorManager* manager = ASensorManager_getInstance(); const ASensor* sensor = ASensorManager_getDefaultSensor(manager, s); if (sensor == NULL) { return 1; } ASensorEventQueue_enableSensor(queue, sensor); ASensorEventQueue_setEventRate(queue, sensor, usec); return 0; }
/** * 繧「繝励Μ繧ア繝シ繧キ繝ァ繝ウ髢句ァ� */ void android_main(struct android_app* state) { struct engine engine; // glue縺悟炎髯、縺輔l縺ェ縺�h縺�↓ app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // 繧サ繝ウ繧オ繝シ縺九i縺ョ繝��繧ソ蜿門セ励↓蠢�ヲ√↑蛻晄悄蛹� engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor( engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue( engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // 莉・蜑阪�迥カ諷九↓謌サ縺� engine.state = *(struct saved_state*) state->savedState; } while (1) { int ident; int events; struct android_poll_source* source; // 繧「繝励Μ繧ア繝シ繧キ繝ァ繝ウ縺悟虚菴懊☆繧九%縺ィ縺ォ縺ェ繧後�縲√%繧後i繧サ繝ウ繧オ繝シ縺ョ蛻カ蠕。繧定。後≧ while ((ident = ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**) &source)) >= 0) { // 繧、繝吶Φ繝医r蜃ヲ逅�☆繧� if (source != NULL) { source->process(state, source); } // 繧サ繝ウ繧オ繝シ縺ォ菴輔°縺励i縺ョ繝��繧ソ縺悟ュ伜惠縺励◆繧牙�逅�☆繧� if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents( engine.sensorEventQueue, &event, 1) > 0) { LOGI("accelerometer: x=%f y=%f z=%f", event.acceleration.x, event.acceleration.y, event.acceleration.z); } } } // 遐エ譽�ヲ∵アゅ′縺ゅ▲縺溘° if (state->destroyRequested != 0) { engine_term_display(&engine); return; } }
ASensor const* SensorImpl::getDefaultSensor(Sensor::Type sensor) { // Find the Android sensor type static int types[] = {ASENSOR_TYPE_ACCELEROMETER, ASENSOR_TYPE_GYROSCOPE, ASENSOR_TYPE_MAGNETIC_FIELD, ASENSOR_TYPE_GRAVITY, ASENSOR_TYPE_LINEAR_ACCELERATION, ASENSOR_TYPE_ORIENTATION}; int type = types[sensor]; // Retrieve the default sensor matching this type return ASensorManager_getDefaultSensor(sensorManager, type); }
void SensorHandler::InitializeHandler() { // Set and prepare looper SensorHandler::tsInstance().looper = ALooper_forThread(); if (SensorHandler::tsInstance().looper == NULL) { SensorHandler::tsInstance().looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); } SensorHandler::tsInstance().sensorManager = ASensorManager_getInstance(); SensorHandler::tsInstance().accelerometerSensor = ASensorManager_getDefaultSensor( SensorHandler::tsInstance().sensorManager, ASENSOR_TYPE_ACCELEROMETER); SensorHandler::tsInstance().magneticSensor = ASensorManager_getDefaultSensor( SensorHandler::tsInstance().sensorManager, ASENSOR_TYPE_MAGNETIC_FIELD); SensorHandler::tsInstance().gyroscopeSensor = ASensorManager_getDefaultSensor( SensorHandler::tsInstance().sensorManager, ASENSOR_TYPE_GYROSCOPE); // Create event queue for sensor events SensorHandler::tsInstance().sensorEventQueue = ASensorManager_createEventQueue( SensorHandler::tsInstance().sensorManager, SensorHandler::tsInstance().looper, 3, SensorCallback, sensor_data); ASensorEventQueue_enableSensor(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().accelerometerSensor); ASensorEventQueue_enableSensor(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().magneticSensor); ASensorEventQueue_enableSensor(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().gyroscopeSensor); // Target sampling rate 100Hz int a = ASensor_getMinDelay(accelerometerSensor); int b = ASensor_getMinDelay(magneticSensor); int c = ASensor_getMinDelay(gyroscopeSensor); LOGI("Minimum delay; acceleration = %d magnetic = %d gyroscope = %d",a,b,c); ASensorEventQueue_setEventRate(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().accelerometerSensor, 100000); ASensorEventQueue_setEventRate(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().magneticSensor, 100000); ASensorEventQueue_setEventRate(SensorHandler::tsInstance().sensorEventQueue, SensorHandler::tsInstance().gyroscopeSensor, 100000); }
void android_main(struct android_app* state) { struct engine engine; // Make sure glue isn't stripped. app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_command; state->onInputEvent = engine_handle_input; engine.app = state; engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, 0, 0); ANativeActivity* nativeActivity = state->activity; android_pre_init_filesystem(state); while (1) { int ident; int events; struct android_poll_source* source; while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, 0, &events, (void**)&source)) >= 0) { if (source != nullptr) { source->process(state, source); } if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != nullptr) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { accelerometer_input_callback(event.acceleration.x, event.acceleration.y, event.acceleration.z); } } } if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { engine_draw_frame(&engine); } } }
gkAndroidApp::gkAndroidApp(android_app* state) : m_state(state), m_window(NULL) { state->userData = this; state->onAppCmd = handleCmd; state->onInputEvent = handleInput; // prepare to monitor accelerometer ASensorManager *sensorManager = ASensorManager_getInstance(); m_accelerometerSensor = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_ACCELEROMETER); m_sensorEventQueue = ASensorManager_createEventQueue(sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); ANativeActivity_setWindowFlags(state->activity, AWINDOW_FLAG_FULLSCREEN | AWINDOW_FLAG_KEEP_SCREEN_ON, 0); }
TCGvoid TCGClient::AndroidInit(struct android_app* state) { state->userData = &m_engineObj; state->onAppCmd = EngineHandleCmd; state->onInputEvent = EngineHandleInput; m_engineObj.app = state; // Prepare to monitor accelerometer m_engineObj.sensorManager = ASensorManager_getInstance(); m_engineObj.accelerometerSensor = ASensorManager_getDefaultSensor(m_engineObj.sensorManager, ASENSOR_TYPE_ACCELEROMETER); m_engineObj.sensorEventQueue = ASensorManager_createEventQueue(m_engineObj.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. m_engineObj.state = *(struct SavedState*)state->savedState; } }
void init(AAssetManager *assetManager) { AAsset *vertexShaderAsset = AAssetManager_open(assetManager, "shader.glslv", AASSET_MODE_BUFFER); assert(vertexShaderAsset != NULL); const void *vertexShaderBuf = AAsset_getBuffer(vertexShaderAsset); assert(vertexShaderBuf != NULL); off_t vertexShaderLength = AAsset_getLength(vertexShaderAsset); vertexShaderSource = std::string((const char*)vertexShaderBuf, (size_t)vertexShaderLength); AAsset_close(vertexShaderAsset); AAsset *fragmentShaderAsset = AAssetManager_open(assetManager, "shader.glslf", AASSET_MODE_BUFFER); assert(fragmentShaderAsset != NULL); const void *fragmentShaderBuf = AAsset_getBuffer(fragmentShaderAsset); assert(fragmentShaderBuf != NULL); off_t fragmentShaderLength = AAsset_getLength(fragmentShaderAsset); fragmentShaderSource = std::string((const char*)fragmentShaderBuf, (size_t)fragmentShaderLength); AAsset_close(fragmentShaderAsset); sensorManager = AcquireASensorManagerInstance(); assert(sensorManager != NULL); accelerometer = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_ACCELEROMETER); assert(accelerometer != NULL); looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); assert(looper != NULL); accelerometerEventQueue = ASensorManager_createEventQueue(sensorManager, looper, LOOPER_ID_USER, NULL, NULL); assert(accelerometerEventQueue != NULL); auto status = ASensorEventQueue_enableSensor(accelerometerEventQueue, accelerometer); assert(status >= 0); status = ASensorEventQueue_setEventRate(accelerometerEventQueue, accelerometer, SENSOR_REFRESH_PERIOD_US); assert(status >= 0); (void)status; //to silent unused compiler warning generateXPos(); }
void startSensor() { ALooper* looper = ALooper_forThread(); if (looper == NULL) { looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); } engine.sensorManager = ASensorManager_getInstance(); // get sensor // engine.accelerometerSensor = ASensorManager_getDefaultSensor( // engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.gyroscopeSensor = ASensorManager_getDefaultSensor( engine.sensorManager, ASENSOR_TYPE_GYROSCOPE); // engine.magneticSensor = ASensorManager_getDefaultSensor( // engine.sensorManager, ASENSOR_TYPE_MAGNETIC_FIELD); // engine.accelerometerEventQueue = ASensorManager_createEventQueue( // engine.sensorManager, looper, LOOPER_ID_USER_ACCELEROMETER, // accelerometerCallback, &engine); engine.gyroscopeEventQueue = ASensorManager_createEventQueue( engine.sensorManager, looper, LOOPER_ID_USER_GYROSCOPE, gyroscopeCallback, &engine); // engine.magneticEventQueue = ASensorManager_createEventQueue( // engine.sensorManager, looper, LOOPER_ID_USER_MAGNETIC, // magneticCallback, &engine); // enable sensor // int a = ASensor_getMinDelay(engine.accelerometerSensor); // int b = ASensor_getMinDelay(engine.gyroscopeSensor); // int c = ASensor_getMinDelay(engine.magneticSensor); // LOGI("min-delay: %d, %d, %d", a, b, c); // ASensorEventQueue_setEventRate(engine.accelerometerEventQueue, // engine.accelerometerSensor, 1000); ASensorEventQueue_setEventRate(engine.gyroscopeEventQueue, engine.gyroscopeSensor, 100); // ASensorEventQueue_setEventRate(engine.magneticEventQueue, // engine.magneticSensor, 1000); // ASensorEventQueue_enableSensor(engine.accelerometerEventQueue, // engine.accelerometerSensor); ASensorEventQueue_enableSensor(engine.gyroscopeEventQueue, engine.gyroscopeSensor); // ASensorEventQueue_enableSensor(engine.magneticEventQueue, // engine.magneticSensor); }
void init(AAssetManager *assetManager) { AAsset *vertexShaderAsset = AAssetManager_open(assetManager, "shader.glslv", AASSET_MODE_BUFFER); assert(vertexShaderAsset != NULL); const void *vertexShaderBuf = AAsset_getBuffer(vertexShaderAsset); assert(vertexShaderBuf != NULL); off_t vertexShaderLength = AAsset_getLength(vertexShaderAsset); vertexShaderSource = std::string((const char*)vertexShaderBuf, (size_t)vertexShaderLength); AAsset_close(vertexShaderAsset); AAsset *fragmentShaderAsset = AAssetManager_open(assetManager, "shader.glslf", AASSET_MODE_BUFFER); assert(fragmentShaderAsset != NULL); const void *fragmentShaderBuf = AAsset_getBuffer(fragmentShaderAsset); assert(fragmentShaderBuf != NULL); off_t fragmentShaderLength = AAsset_getLength(fragmentShaderAsset); fragmentShaderSource = std::string((const char*)fragmentShaderBuf, (size_t)fragmentShaderLength); AAsset_close(fragmentShaderAsset); sensorManager = ASensorManager_getInstance(); assert(sensorManager != NULL); accelerometer = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_ACCELEROMETER); assert(accelerometer != NULL); looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); assert(looper != NULL); accelerometerEventQueue = ASensorManager_createEventQueue(sensorManager, looper, LOOPER_ID_USER, NULL, NULL); assert(accelerometerEventQueue != NULL); int setEventRateResult = ASensorEventQueue_setEventRate(accelerometerEventQueue, accelerometer, int32_t(1000000 / SENSOR_REFRESH_RATE)); int enableSensorResult = ASensorEventQueue_enableSensor(accelerometerEventQueue, accelerometer); assert(enableSensorResult >= 0); generateXPos(); }
Window::Window( WindowCreationData const& wcd ) : m_app( wcd.app ) , m_lstateready( false ) , m_finishrequired( false ) , m_finished( false ) { m_silent = utils::MainConf->boolean( "silent", false ); if( !m_silent ) { m_app->userData = this; m_app->onAppCmd = &g_handle_cmd; m_app->onInputEvent = &g_handle_input; m_sensorManager = ASensorManager_getInstance(); m_accelerometerSensor = ASensorManager_getDefaultSensor( m_sensorManager, ASENSOR_TYPE_ACCELEROMETER ); m_sensorEventQueue = ASensorManager_createEventQueue( m_sensorManager, m_app->looper, LOOPER_ID_USER, 0, 0 ); if( m_app->savedState ) { } } }
void gdk_android_setup_app_callbacks(struct android_app *state, void (*onStop)()) { if (0) { ASensorManager* sensorManager = NULL; sensorManager = ASensorManager_getInstance(); accelerometerSensor = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_ACCELEROMETER); sensorEventQueue = ASensorManager_createEventQueue(sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); } gdk_android_stop = onStop; state->onAppCmd = gdk_android_handle_glue_cmd; state->activity->callbacks->onNativeWindowResized = onNativeWindowResized; state->activity->callbacks->onNativeWindowRedrawNeeded = onNativeWindowRedrawNeeded; state->activity->callbacks->onContentRectChanged = onContentRectChanged; state->activity->callbacks->onLowMemory = onLowMemory; // TODO: consider overriding state->inputPollSource.process instead of the following // or should we even get rid of this native_app for good? state->onInputEvent = android_handle_input; }
void android_main(struct android_app* state) { struct engine engine; // Make sure glue isn't stripped. app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; engine.requested_quit=false; engine.os=NULL; engine.display_active=false; FileAccessAndroid::asset_manager=state->activity->assetManager; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); ANativeActivity_setWindowFlags(state->activity,AWINDOW_FLAG_FULLSCREEN|AWINDOW_FLAG_KEEP_SCREEN_ON,0); state->activity->vm->AttachCurrentThread(&engine.jni, NULL); // loop waiting for stuff to do. while (1) { // Read all pending events. int ident; int events; struct android_poll_source* source; // If not animating, we will block forever waiting for events. // If animating, we loop until all events are read, then continue // to draw the next frame of animation. int nullmax=50; while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) { // LOGI("process\n"); source->process(state, source); } else { nullmax--; if (nullmax<0) break; } // If a sensor has data, process it now. // LOGI("events\n"); if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { if (engine.os) { engine.os->process_accelerometer(Vector3(event.acceleration.x, event.acceleration.y, event.acceleration.z)); } } } } // Check if we are exiting. if (state->destroyRequested != 0) { if (engine.os) { engine.os->main_loop_request_quit(); } state->destroyRequested=0; } if (engine.requested_quit) { engine_term_display(&engine); exit(0); return; } // LOGI("end\n"); } // LOGI("engine animating? %i\n",engine.animating); if (engine.animating) { //do os render engine_draw_frame(&engine); //LOGI("TERM WINDOW"); } } }
/** * This is the main entry point of a native application that is using * android_native_app_glue. It runs in its own thread, with its own * event loop for receiving input events and doing other things. */ void android_main(struct android_app* state) { la_window_t* window = la_memory_allocate(sizeof(la_window_t)); int ident; int events; struct android_poll_source* source; // Make sure glue isn't stripped. app_dummy(); state->userData = window; state->onAppCmd = window_handle_cmd; state->onInputEvent = window_handle_input; window->app = state; // Prepare to monitor accelerometer window->sensorManager = ASensorManager_getInstance(); window->accelerometerSensor = ASensorManager_getDefaultSensor( window->sensorManager, ASENSOR_TYPE_ACCELEROMETER); window->sensorEventQueue = ASensorManager_createEventQueue( window->sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); // if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. // window->state = *(struct saved_state*)state->savedState; // } // Run main(): la_window = window; // TODO: is needed? SDL_AtomicSet(&la_rmcexit, 1); // Window thread ( Drawing + Events ). while (SDL_AtomicGet(&la_rmcexit)) { // Poll Events ident = ALooper_pollAll(0, NULL, &events, (void**)&source); // Process this event. if (source != NULL) { source->process(state, source); } // If a sensor has data, process it now. if (ident == LOOPER_ID_USER) { if (window->accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents( window->sensorEventQueue, &event, 1) > 0) { window->input.accel.x = event.acceleration.x; window->input.accel.y = event.acceleration.y; window->input.accel.z = event.acceleration.z; } } } // Run the cross-platform window loop. if(window->context) la_window_loop__(window); // Update the screen. la_port_swap_buffers(window); } la_print("port-android quitting...."); // The cross-platform window kill. la_window_kill__(window); // The window is being hidden or closed, clean it up. window_term_display(window); la_print("port-android quitted...."); exit(0); return; }
/** * This is the main entry point of a native application that is using * android_native_app_glue. It runs in its own thread, with its own * event loop for receiving input events and doing other things. */ void android_main(struct android_app* state) { struct engine engine; // Make sure glue isn't stripped. app_dummy(); GAppOnStartup = &UCppLab::OnStartup; GAppOnTouched = &UCppLab::OnTouched; memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. engine.state = *(struct saved_state*)state->savedState; } // loop waiting for stuff to do. while (1) { // Read all pending events. int ident; int events; struct android_poll_source* source; // If not animating, we will block forever waiting for events. // If animating, we loop until all events are read, then continue // to draw the next frame of animation. while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) { source->process(state, source); } // If a sensor has data, process it now. if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { // LOGI("accelerometer: x=%f y=%f z=%f", // event.acceleration.x, event.acceleration.y, // event.acceleration.z); } } } // Check if we are exiting. if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { // Done with events; draw next animation frame. engine.state.angle += .01f; if (engine.state.angle > 1) { engine.state.angle = 0; } // Drawing is throttled to the screen update rate, so there // is no need to do timing here. engine_draw_frame(&engine); } } }
/** * This is the main entry point of a native application that is using * android_native_app_glue. It runs in its own thread, with its own * event loop for receiving input events and doing other things. */ void android_main(struct android_app* state) { // Make sure glue isn't stripped. app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. engine.state = *(struct saved_state*)state->savedState; } // loop waiting for stuff to do. while (1) { // Read all pending events. int ident; int events; struct android_poll_source* source; // If not animating, we will block forever waiting for events. // If animating, we loop until all events are read, then continue // to draw the next frame of animation. while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) { source->process(state, source); } // If a sensor has data, process it now. if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { LOG_EVENTS_DEBUG("accelerometer: x=%f y=%f z=%f", event.acceleration.x, event.acceleration.y, event.acceleration.z); AConfiguration* _currentconf = AConfiguration_new(); AConfiguration_fromAssetManager(_currentconf, state->activity->assetManager); static int32_t _orientation = AConfiguration_getOrientation(_currentconf); if (ACONFIGURATION_ORIENTATION_LAND != _orientation) { // ACONFIGURATION_ORIENTATION_ANY // ACONFIGURATION_ORIENTATION_PORT // ACONFIGURATION_ORIENTATION_SQUARE cocos2d::Acceleration acc; acc.x = -event.acceleration.x/10; acc.y = -event.acceleration.y/10; acc.z = event.acceleration.z/10; acc.timestamp = 0; cocos2d::EventAcceleration accEvent(acc); cocos2d::EventDispatcher::getInstance()->dispatchEvent(&accEvent); } else { // ACONFIGURATION_ORIENTATION_LAND // swap x and y parameters cocos2d::Acceleration acc; acc.x = event.acceleration.y/10; acc.y = -event.acceleration.x/10; acc.z = event.acceleration.z/10; acc.timestamp = 0; cocos2d::EventAcceleration accEvent(acc); cocos2d::EventDispatcher::getInstance()->dispatchEvent(&accEvent); } } } } // Check if we are exiting. if (state->destroyRequested != 0) { engine_term_display(&engine); memset(&engine, 0, sizeof(engine)); s_methodInitialized = false; return; } } if (engine.animating) { // Done with events; draw next animation frame. engine.state.angle += .01f; if (engine.state.angle > 1) { engine.state.angle = 0; } // Drawing is throttled to the screen update rate, so there // is no need to do timing here. LOG_RENDER_DEBUG("android_main : engine.animating"); engine_draw_frame(&engine); } else { LOG_RENDER_DEBUG("android_main : !engine.animating"); } } }
/** * This is the main entry point of a native application that is using * android_native_app_glue. It runs in its own thread, with its own * event loop for receiving input events and doing other things. */ void android_main(struct android_app* state) { struct engine engine; // Make sure glue isn't stripped. app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. engine.state = *(struct saved_state*)state->savedState; } else { JNIEnv *jni = state->activity->env; state->activity->vm->AttachCurrentThread(&jni, NULL); jclass activityClass = jni->FindClass("android/app/NativeActivity"); jmethodID getClassLoader = jni->GetMethodID(activityClass,"getClassLoader", "()Ljava/lang/ClassLoader;"); jobject cls = jni->CallObjectMethod(state->activity->clazz, getClassLoader); jclass classLoader = jni->FindClass("java/lang/ClassLoader"); jmethodID findClass = jni->GetMethodID(classLoader, "loadClass", "(Ljava/lang/String;)Ljava/lang/Class;"); jstring strClassName = jni->NewStringUTF("com/oriku/Bridge"); jclass j_bridge = (jclass)jni->CallObjectMethod(cls, findClass, strClassName); jmethodID j_initOuya = jni->GetStaticMethodID(j_bridge, "initOuya","(Landroid/app/Activity;)V"); jni->CallStaticVoidMethod(j_bridge, j_initOuya, state->activity->clazz); // Finished with the JVM. state->activity->vm->DetachCurrentThread(); } // loop waiting for stuff to do. while (1) { // Read all pending events. int ident; int events; struct android_poll_source* source; // If not animating, we will block forever waiting for events. // If animating, we loop until all events are read, then continue // to draw the next frame of animation. while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { // Process this event. if (source != NULL) { source->process(state, source); } // If a sensor has data, process it now. if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { LOGI("accelerometer: x=%f y=%f z=%f", event.acceleration.x, event.acceleration.y, event.acceleration.z); } } } // Check if we are exiting. if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { // Done with events; draw next animation frame. engine.state.angle += .01f; if (engine.state.angle > 1) { engine.state.angle = 0; } // Drawing is throttled to the screen update rate, so there // is no need to do timing here. engine_draw_frame(&engine); } } }
void android_main(struct android_app* state) { app_dummy(); //sleep(5); // Sleep a little so GDB can attach itself pthread_key_create(&s_thread_key, detach_current_thread); JNIEnv* env; state->activity->vm->AttachCurrentThread(&env, nullptr); pthread_setspecific(s_thread_key, state->activity->vm); AInstance ainstance; state->userData = &ainstance; state->onAppCmd = android_handle_event; state->onInputEvent = android_handle_input; ainstance.app = state; g_native_activity = state->activity; Path::set_current(); // Prepare to monitor accelerometer ainstance.sensorManager = ASensorManager_getInstance(); ainstance.accelerometerSensor = ASensorManager_getDefaultSensor( ainstance.sensorManager, ASENSOR_TYPE_ACCELEROMETER); if (ainstance.accelerometerSensor) { ainstance.sensorEventQueue = ASensorManager_createEventQueue( ainstance.sensorManager, state->looper, LOOPER_ID_USER, nullptr, nullptr); } Chrono chrono; while (!ainstance.done) { int ident; int events; struct android_poll_source* source; while ((ident = ALooper_pollAll( (!ainstance.active ? -1 : 0), nullptr, &events, reinterpret_cast<void**>(&source))) >= 0) { if (source) source->process(state, source); if (ainstance.done) break; // If a sensor has data, process it now. if (ainstance.active && ident == LOOPER_ID_USER && ainstance.accelerometerSensor) { ASensorEvent event; while (ASensorEventQueue_getEvents( ainstance.sensorEventQueue, &event, 1) > 0) { ainstance.director->input().accelerated( event.acceleration.x, event.acceleration.y, event.acceleration.z, event.timestamp); } } } chrono.update(); if (!(ainstance.initialised & ainstance.active)) continue; ainstance.director->update(chrono.delta()); ainstance.director->draw(); eglSwapBuffers(ainstance.display, ainstance.surface); } android_destroy_display(&ainstance); }
//accelerometer static void addAccelerometer(){ sensors.accelerometerSensor = ASensorManager_getDefaultSensor( sensors.sensorManager, ASENSOR_TYPE_ACCELEROMETER); }
void GoAndroid_disableSensor(int s) { ASensorManager* manager = ASensorManager_getInstance(); const ASensor* sensor = ASensorManager_getDefaultSensor(manager, s); ASensorEventQueue_disableSensor(queue, sensor); }
/** * 这是使用 android_native_app_glue * 的本地应用程序的主要入口点。它在其自己的线程中运行,具有自己的 * 事件循环用于接收输入事件并执行其他操作。 */ void android_main(struct android_app* state) { struct engine engine; memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; //准备监控加速器 engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); if (state->savedState != NULL) { //我们从之前保存的状态开始;从它还原。 engine.state = *(struct saved_state*)state->savedState; } engine.animating = 1; //循环等待事情以进行处理。 while (1) { //读取所有挂起的事件。 int ident; int events; struct android_poll_source* source; //如果没有动态效果,我们将一直阻止等待事件。 //如果有动态效果,我们进行循环,直到读取所有事件,然后继续 //绘制动画的下一帧。 while ((ident = ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**)&source)) >= 0) { //处理此事件。 if (source != NULL) { source->process(state, source); } //如果传感器有数据,立即处理。 if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL) { ASensorEvent event; while (ASensorEventQueue_getEvents(engine.sensorEventQueue, &event, 1) > 0) { // LOGI("accelerometer: x=%f y=%f z=%f", // event.acceleration.x, event.acceleration.y, // event.acceleration.z); } } } //检查,我们是否存在。 if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { //事件完成;绘制下一动画帧。 engine.state.angle += .01f; if (engine.state.angle > 1) { engine.state.angle = 0; } //绘图被降低到屏幕更新速率, //因此,没有必要在此处计时。 engine_draw_frame(&engine); } } }
// Main関数 void android_main(struct android_app* state) { struct engine engine; // glueが削除されないように app_dummy(); // アプリ情報保存エリアの確保 memset(&engine, 0, sizeof(engine)); // ユーザーデータの配置 state->userData = &engine; // アプリケーションコマンド処理関数の設定 state->onAppCmd = engine_handle_cmd; // 入力イベント処理関数の設定 state->onInputEvent = engine_handle_input; engine.app = state; // センサーからのデータ取得に必要な初期化 engine.sensorManager = ASensorManager_getInstance(); // 加速度センサーのデータ取得準備 engine.accelerometerSensor = ASensorManager_getDefaultSensor( engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); // ジャイロスコープのデータ取得準備 engine.gyroscopeSensor = ASensorManager_getDefaultSensor( engine.sensorManager, ASENSOR_TYPE_GYROSCOPE ); // センサー情報取得キューの新規作成 engine.sensorEventQueue = ASensorManager_createEventQueue( engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL); // AssetManagerの取得 engine.assetManager = state->activity->assetManager; if (state->savedState != NULL) { // 以前の状態に戻す engine.state = *(struct saved_state*) state->savedState; } while (1) { int ident; int events; struct android_poll_source* source; // アプリケーションの状態にあわせてセンサー情報の処理を行う while ((ident = ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, (void**) &source)) >= 0) { // 内部イベントを処理する if (source != NULL) { source->process(state, source); } // センサー情報取得キューのデータを処理する if (ident == LOOPER_ID_USER) { if (engine.accelerometerSensor != NULL && engine.gyroscopeSensor != NULL) { ASensorEvent event[2]; int count; int i; while ((count = ASensorEventQueue_getEvents( engine.sensorEventQueue, event, 2)) > 0) { for (i = 0; i < count; i++){ switch(event[i].type){ case ASENSOR_TYPE_ACCELEROMETER: // 加速度センサーの値を出力する LOGI("accelerometer: x=%f y=%f z=%f", event[i].acceleration.x, event[i].acceleration.y, event[i].acceleration.z); break; case ASENSOR_TYPE_GYROSCOPE: // ジャイロスコープの値を出力する LOGI("GYROSCOPE: x=%f y=%f z=%f",event[i].vector.azimuth,event[i].vector.pitch,event[i].vector.roll ); break; } } } } } // EGL情報を破棄する if (state->destroyRequested != 0) { engine_term_display(&engine); return; } } if (engine.animating) { // 次のフレームを描画するのに必要な処理を行う int i = 0,j; engine.angle[0] += 3; engine.angle[1] += 1; for (j = 0; j < 3; j++){ if (engine.angle[j] > 360) engine.angle[j] -= 360; if (engine.angle[j] < 0) engine.angle[j] += 360; } // 画面描画 engine_draw_frame(&engine); } } }
void Engine::initSensors() { mSensorManager = ASensorManager_getInstance(); mAccelerometerSensor = ASensorManager_getDefaultSensor( mSensorManager, ASENSOR_TYPE_ACCELEROMETER ); mSensorEventQueue = ASensorManager_createEventQueue( mSensorManager, mApp->looper, LOOPER_ID_USER, NULL, NULL ); }
static void handleCmd(struct android_app* app, int32_t cmd) { switch (cmd) { case APP_CMD_SAVE_STATE: break; case APP_CMD_INIT_WINDOW: if (app->window) // && mRoot) { AConfiguration* config = AConfiguration_new(); AConfiguration_fromAssetManager(config, app->activity->assetManager); if (!okit.isInited()) { LOG_FOOT; //okit.getPrefs().winsize.x = drawWidth; //okit.getPrefs().winsize.y = drawHeight; okit.assetMgr = app->activity->assetManager; okit.getPrefs().extWinhandle = Ogre::StringConverter::toString((int)app->window); okit.getPrefs().androidConfig = Ogre::StringConverter::toString((int)config); okit.getPrefs().verbose = true; //gkLogger::enable("OgreKitDemo.log", true); //Ogre::LogManager::getSingleton().getDefaultLog()->addListener(&gLogListener); if (okit.init("")) { LOG_FOOT; m_window = gkWindowSystem::getSingleton().getMainWindow(); LOG_FOOT; okit.m_input = static_cast<OIS::AndroidInputManager*>(m_window->getInputManager()); okit.sensorManager = ASensorManager_getInstance(); okit.accelerometerSensor = ASensorManager_getDefaultSensor(okit.sensorManager, ASENSOR_TYPE_ACCELEROMETER); okit.sensorEventQueue = ASensorManager_createEventQueue(okit.sensorManager, okit.state->looper, LOOPER_ID_USER, NULL, NULL); gkEngine::getSingleton().initializeStepLoop(); LOGI("inited"); //okit.setWindowSize(drawWidth, drawHeight); } } else { LOG_FOOT; Ogre::RenderWindow* rwnd = gkWindowSystem::getSingleton().getMainRenderWindow(); if (rwnd) static_cast<Ogre::AndroidEGLWindow*>(rwnd)->_createInternalResources(app->window, config); } LOG_FOOT; AConfiguration_delete(config); } break; case APP_CMD_TERM_WINDOW: if (okit.isInited()) { Ogre::RenderWindow* rwnd = gkWindowSystem::getSingleton().getMainRenderWindow(); if (rwnd) static_cast<Ogre::AndroidEGLWindow*>(rwnd)->_destroyInternalResources(); } LOGI("terminiate."); break; case APP_CMD_GAINED_FOCUS: // When our app gains focus, we start monitoring the accelerometer. if (okit.accelerometerSensor != NULL) { ASensorEventQueue_enableSensor(okit.sensorEventQueue, okit.accelerometerSensor); // We'd like to get 60 events per second (in us). ASensorEventQueue_setEventRate(okit.sensorEventQueue, okit.accelerometerSensor, (1000L/60)*1000); } break; case APP_CMD_LOST_FOCUS: // When our app loses focus, we stop monitoring the accelerometer. // This is to avoid consuming battery while not being used. if (okit.accelerometerSensor != NULL) { ASensorEventQueue_disableSensor(okit.sensorEventQueue, okit.accelerometerSensor); } break; case APP_CMD_CONFIG_CHANGED: break; } }
// try and open up the JoyWarrior file descriptor bool CSensorAndroidBuiltIn::detect() { setType(); setPort(); // get the singleton instance of the m_pSensorManager if (!m_pSensorManager) m_pSensorManager = (ASensorManager*) ASensorManager_getInstance(); /* // sensor listing ASensorList pSensorList = NULL; int iNum = ASensorManager_getSensorList(m_pSensorManager, &pSensorList); if (iNum && pSensorList) { fprintf(stdout, "\n\n%d Detected Sensors:\n", iNum); //int i = 0; for (int i=0; i < iNum; i++) { //while (i<10 && (pSensorList+i)) { //for (int i=0; i < iNum; i++) { fprintf(stdout, " %s\n", ASensor_getName(*(pSensorList+i))); i++; } } else { fprintf(stdout, "\n\nNo Sensor List? %d\n\n", iNum); } */ // create looper m_pLooper = ALooper_forThread(); // get existing looper if (!m_pLooper) { // make new looper //m_pLooper = ALooper_prepare(0); m_pLooper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); } if (!m_pLooper) { // no existing or new looper -- error fprintf(stderr, "can't create Looper\n"); return false; // can't create looper } // setup event queue //m_pSensorEventQueue = ASensorManager_createEventQueue(m_pSensorManager, m_pLooper, // LOOPER_ID_QCN, QCN_ALooper_callback, &l_SensorVector); m_pSensorEventQueue = ASensorManager_createEventQueue(m_pSensorManager, m_pLooper, LOOPER_ID_QCN, NULL, NULL); if (!m_pSensorEventQueue) { fprintf(stderr, "can't create SensorEventQueue\n"); return false; // can't setup queue } // get the default accelerometer m_pSensor = (ASensor*) ASensorManager_getDefaultSensor(m_pSensorManager, ASENSOR_TYPE_ACCELEROMETER); if (!m_pSensor) { //fprintf(stdout, "No Android accelerometer detected.\n"); return false; // no sensor } int iRetVal = 0; if ((iRetVal = ASensorEventQueue_enableSensor(m_pSensorEventQueue, m_pSensor)) < 0) { fprintf(stderr, "Error in enableSensor %d\n", iRetVal); return false; }; m_fResolution = ASensor_getResolution(m_pSensor); m_minDelayMsec = ASensor_getMinDelay(m_pSensor); int rateMsec = (int)((sm->dt > 0. ? sm->dt : g_DT) * 1000.); //fprintf(stdout, "Rates: m_minDelayMSec = %d raceMsec = %d\n", m_minDelayMsec, rateMsec); //if (rateMsec > m_minDelayMsec) m_minDelayMsec = rateMsec; if (rateMsec < m_minDelayMsec) m_minDelayMsec = rateMsec; fprintf(stdout, "Setting data rate to %d Hz\n", 1000L/m_minDelayMsec); strlcpy(m_strSensor, ASensor_getName(m_pSensor), _MAX_PATH); strlcpy(m_strVendor, ASensor_getVendor(m_pSensor), _MAX_PATH); // NB: the rate is in microseconds! if ((iRetVal = ASensorEventQueue_setEventRate(m_pSensorEventQueue, m_pSensor, m_minDelayMsec * 1000L)) < 0) { fprintf(stderr, "Error in setEventRate %d\n", iRetVal); // return false; // not really a fatal error } fprintf(stdout, "Android Default Sensor Detected: \n\n %s - %s\n" " Res = %f --- Min Delay msec = %d\n" " m_pSensor=%x m_pSensorEventQueue=%x\n", m_strVendor, m_strSensor, m_fResolution, m_minDelayMsec, m_pSensor, m_pSensorEventQueue); setType(SENSOR_ANDROID); setSingleSampleDT(true); // set to true in raw mode so we don't get any interpolated/avg points (i.e. just the "integer" value hopefully) return (bool)(getTypeEnum() == SENSOR_ANDROID); }