void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, int ioHandle, void *param2) { LOGV("ioConfigChanged() event %d", event); OutputDescriptor *desc; uint32_t stream; if (ioHandle == 0) return; Mutex::Autolock _l(AudioSystem::gLock); switch (event) { case STREAM_CONFIG_CHANGED: if (param2 == 0) break; stream = *(uint32_t *)param2; LOGV("ioConfigChanged() STREAM_CONFIG_CHANGED stream %d, output %d", stream, ioHandle); if (gStreamOutputMap.indexOfKey(stream) >= 0) { gStreamOutputMap.replaceValueFor(stream, ioHandle); } break; case OUTPUT_OPENED: { if (gOutputs.indexOfKey(ioHandle) >= 0) { LOGV("ioConfigChanged() opening already existing output! %d", ioHandle); break; } if (param2 == 0) break; desc = (OutputDescriptor *)param2; OutputDescriptor *outputDesc = new OutputDescriptor(*desc); gOutputs.add(ioHandle, outputDesc); LOGV("ioConfigChanged() new output samplingRate %d, format %d channels %d frameCount %d latency %d", outputDesc->samplingRate, outputDesc->format, outputDesc->channels, outputDesc->frameCount, outputDesc->latency); } break; case OUTPUT_CLOSED: { if (gOutputs.indexOfKey(ioHandle) < 0) { LOGW("ioConfigChanged() closing unknow output! %d", ioHandle); break; } LOGV("ioConfigChanged() output %d closed", ioHandle); gOutputs.removeItem(ioHandle); for (int i = gStreamOutputMap.size() - 1; i >= 0 ; i--) { if (gStreamOutputMap.valueAt(i) == ioHandle) { gStreamOutputMap.removeItemsAt(i); } } } break; case OUTPUT_CONFIG_CHANGED: { int index = gOutputs.indexOfKey(ioHandle); if (index < 0) { LOGW("ioConfigChanged() modifying unknow output! %d", ioHandle); break; } if (param2 == 0) break; desc = (OutputDescriptor *)param2; LOGV("ioConfigChanged() new config for output %d samplingRate %d, format %d channels %d frameCount %d latency %d", ioHandle, desc->samplingRate, desc->format, desc->channels, desc->frameCount, desc->latency); OutputDescriptor *outputDesc = gOutputs.valueAt(index); delete outputDesc; outputDesc = new OutputDescriptor(*desc); gOutputs.replaceValueFor(ioHandle, outputDesc); } break; case INPUT_OPENED: case INPUT_CLOSED: case INPUT_CONFIG_CHANGED: break; } }
static int open_inputs(int mode, int *akm_fd, int *p_fd, int *l_fd) { /* scan all input drivers and look for "compass" */ int fd = -1; const char *dirname = "/dev/input"; char devname[PATH_MAX]; char *filename; DIR *dir; struct dirent *de; dir = opendir(dirname); if(dir == NULL) return -1; strcpy(devname, dirname); filename = devname + strlen(devname); *filename++ = '/'; *akm_fd = *p_fd = *l_fd = -1; while((de = readdir(dir))) { if(de->d_name[0] == '.' && (de->d_name[1] == '\0' || (de->d_name[1] == '.' && de->d_name[2] == '\0'))) continue; strcpy(filename, de->d_name); fd = open(devname, mode); if (fd>=0) { char name[80]; if (ioctl(fd, EVIOCGNAME(sizeof(name) - 1), &name) < 1) { name[0] = '\0'; } if (!strcmp(name, "compass")) { LOGV("using %s (name=%s)", devname, name); *akm_fd = fd; } else if (!strcmp(name, "proximity")) { LOGV("using %s (name=%s)", devname, name); *p_fd = fd; } else if (!strcmp(name, "LightSensor")) { LOGV("using %s (name=%s)", devname, name); *l_fd = fd; } else close(fd); } } closedir(dir); fd = 0; if (*akm_fd < 0) { LOGE("Couldn't find or open 'compass' driver (%s)", strerror(errno)); fd = -1; } if (*p_fd < 0) { LOGE("Couldn't find or open 'proximity' driver (%s)", strerror(errno)); fd = -1; } if (*l_fd < 0) { LOGE("Couldn't find or open 'light' driver (%s)", strerror(errno)); fd = -1; } return fd; }
static int data__poll(struct sensors_data_context_t *dev, sensors_data_t* values) { int akm_fd = dev->events_fd[0]; int gp_fd = dev->events_fd[1]; int ls_fd = dev->events_fd[2]; if (akm_fd < 0) { LOGE("invalid compass file descriptor, fd=%d", akm_fd); return -1; } if (gp_fd < 0) { LOGE("invalid proximity-sensor file descriptor, fd=%d", gp_fd); return -1; } if (ls_fd < 0) { LOGE("invalid light-sensor file descriptor, fd=%d", ls_fd); return -1; } // there are pending sensors, returns them now... if (dev->pendingSensors) { LOGV("pending sensors 0x%08x", dev->pendingSensors); return pick_sensor(dev, values); } // wait until we get a complete event for an enabled sensor uint32_t new_sensors = 0; while (1) { /* read the next event; first, read the compass event, then the proximity event */ struct input_event event; int got_syn = 0; int exit = 0; int nread; fd_set rfds; int n; FD_ZERO(&rfds); FD_SET(akm_fd, &rfds); FD_SET(gp_fd, &rfds); FD_SET(ls_fd, &rfds); n = select(__MAX(akm_fd, __MAX(gp_fd, ls_fd)) + 1, &rfds, NULL, NULL, NULL); LOGV("return from select: %d\n", n); if (n < 0) { LOGE("%s: error from select(%d, %d): %s", __FUNCTION__, akm_fd, gp_fd, strerror(errno)); return -1; } if (FD_ISSET(akm_fd, &rfds)) { nread = read(akm_fd, &event, sizeof(event)); if (nread == sizeof(event)) { new_sensors |= data__poll_process_akm_abs(dev, akm_fd, &event); LOGV("akm abs %08x", new_sensors); got_syn = event.type == EV_SYN; exit = got_syn && event.code == SYN_CONFIG; if (got_syn) { LOGV("akm syn %08x", new_sensors); data__poll_process_syn(dev, &event, new_sensors); new_sensors = 0; } } else LOGE("akm read too small %d", nread); } else LOGV("akm fd is not set"); if (FD_ISSET(gp_fd, &rfds)) { nread = read(gp_fd, &event, sizeof(event)); if (nread == sizeof(event)) { new_sensors |= data__poll_process_gp_abs(dev, gp_fd, &event); LOGV("gp abs %08x", new_sensors); got_syn |= event.type == EV_SYN; exit |= got_syn && event.code == SYN_CONFIG; if (got_syn) { LOGV("gp syn %08x", new_sensors); data__poll_process_syn(dev, &event, new_sensors); new_sensors = 0; } } else LOGE("gp read too small %d", nread); } else LOGV("gp fd is not set"); if (FD_ISSET(ls_fd, &rfds)) { nread = read(ls_fd, &event, sizeof(event)); if (nread == sizeof(event)) { new_sensors |= data__poll_process_ls_abs(dev, ls_fd, &event); LOGV("ls abs %08x", new_sensors); got_syn |= event.type == EV_SYN; exit |= got_syn && event.code == SYN_CONFIG; if (got_syn) { LOGV("ls syn %08x", new_sensors); data__poll_process_syn(dev, &event, new_sensors); new_sensors = 0; } } else LOGE("ls read too small %d", nread); } else LOGV("ls fd is not set"); if (exit) { // we use SYN_CONFIG to signal that we need to exit the // main loop. //LOGV("got empty message: value=%d", event->value); LOGV("exit"); return 0x7FFFFFFF; } if (got_syn && dev->pendingSensors) { LOGV("got syn, picking sensor"); return pick_sensor(dev, values); } } }
//------------------------------------------------------------------------------------------------- int JetPlayer::clearQueue() { LOGV("JetPlayer::clearQueue"); Mutex::Autolock lock(mMutex); return JET_Clear_Queue(mEasData); }
//------------------------------------------------------------------------------------------------- int JetPlayer::init() { //Mutex::Autolock lock(&mMutex); EAS_RESULT result; // retrieve the EAS library settings if (pLibConfig == NULL) pLibConfig = EAS_Config(); if (pLibConfig == NULL) { LOGE("JetPlayer::init(): EAS library configuration could not be retrieved, aborting."); return EAS_FAILURE; } // init the EAS library result = EAS_Init(&mEasData); if( result != EAS_SUCCESS) { LOGE("JetPlayer::init(): Error initializing Sonivox EAS library, aborting."); mState = EAS_STATE_ERROR; return result; } // init the JET library with the default app event controller range result = JET_Init(mEasData, NULL, sizeof(S_JET_CONFIG)); if( result != EAS_SUCCESS) { LOGE("JetPlayer::init(): Error initializing JET library, aborting."); mState = EAS_STATE_ERROR; return result; } // create the output AudioTrack mAudioTrack = new AudioTrack(); mAudioTrack->set(AUDIO_STREAM_MUSIC, //TODO parametrize this pLibConfig->sampleRate, 1, // format = PCM 16bits per sample, (pLibConfig->numChannels == 2) ? AUDIO_CHANNEL_OUT_STEREO : AUDIO_CHANNEL_OUT_MONO, mTrackBufferSize, #ifdef WITH_QCOM_LPA 0, 0, #endif 0); // create render and playback thread { Mutex::Autolock l(mMutex); LOGV("JetPlayer::init(): trying to start render thread"); createThreadEtc(renderThread, this, "jetRenderThread", ANDROID_PRIORITY_AUDIO); mCondition.wait(mMutex); } if (mTid > 0) { // render thread started, we're ready LOGV("JetPlayer::init(): render thread(%d) successfully started.", mTid); mState = EAS_STATE_READY; } else { LOGE("JetPlayer::init(): failed to start render thread."); mState = EAS_STATE_ERROR; return EAS_FAILURE; } return EAS_SUCCESS; }
static void onNativeWindowDestroyed(ANativeActivity* activity, ANativeWindow* window) { LOGV("NativeWindowDestroyed: %p -- %p\n", activity, window); android_app_set_window((struct android_app*)activity->instance, NULL); }
//------------------------------------------------------------------------------------------------- int JetPlayer::render() { EAS_RESULT result = EAS_FAILURE; EAS_I32 count; int temp; bool audioStarted = false; LOGV("JetPlayer::render(): entering"); // allocate render buffer mAudioBuffer = new EAS_PCM[pLibConfig->mixBufferSize * pLibConfig->numChannels * MIX_NUM_BUFFERS]; if (!mAudioBuffer) { LOGE("JetPlayer::render(): mAudioBuffer allocate failed"); goto threadExit; } // signal main thread that we started { Mutex::Autolock l(mMutex); mTid = gettid(); LOGV("JetPlayer::render(): render thread(%d) signal", mTid); mCondition.signal(); } while (1) { mMutex.lock(); // [[[[[[[[ LOCK --------------------------------------- if (mEasData == NULL) { mMutex.unlock(); LOGV("JetPlayer::render(): NULL EAS data, exiting render."); goto threadExit; } // nothing to render, wait for client thread to wake us up while (!mRender) { LOGV("JetPlayer::render(): signal wait"); if (audioStarted) { mAudioTrack->pause(); // we have to restart the playback once we start rendering again audioStarted = false; } mCondition.wait(mMutex); LOGV("JetPlayer::render(): signal rx'd"); } // render midi data into the input buffer int num_output = 0; EAS_PCM* p = mAudioBuffer; for (int i = 0; i < MIX_NUM_BUFFERS; i++) { result = EAS_Render(mEasData, p, pLibConfig->mixBufferSize, &count); if (result != EAS_SUCCESS) { LOGE("JetPlayer::render(): EAS_Render returned error %ld", result); } p += count * pLibConfig->numChannels; num_output += count * pLibConfig->numChannels * sizeof(EAS_PCM); // send events that were generated (if any) to the event callback fireEventsFromJetQueue(); } // update playback state //LOGV("JetPlayer::render(): updating state"); JET_Status(mEasData, &mJetStatus); fireUpdateOnStatusChange(); mPaused = mJetStatus.paused; mMutex.unlock(); // UNLOCK ]]]]]]]] ----------------------------------- // check audio output track if (mAudioTrack == NULL) { LOGE("JetPlayer::render(): output AudioTrack was not created"); goto threadExit; } // Write data to the audio hardware //LOGV("JetPlayer::render(): writing to audio output"); if ((temp = mAudioTrack->write(mAudioBuffer, num_output)) < 0) { LOGE("JetPlayer::render(): Error in writing:%d",temp); return temp; } // start audio output if necessary if (!audioStarted) { LOGV("JetPlayer::render(): starting audio playback"); mAudioTrack->start(); audioStarted = true; } }//while (1) threadExit: if (mAudioTrack) { mAudioTrack->stop(); mAudioTrack->flush(); } if (mAudioBuffer) { delete [] mAudioBuffer; mAudioBuffer = NULL; } mMutex.lock(); mTid = -1; mCondition.signal(); mMutex.unlock(); return result; }
void AndroidAudioRenderer::stop() { LOGV("AndroidAudioRenderer::stop"); OpenSLShutdown(); LOGV("AndroidAudioRenderer::stop complete"); }
/* * Block forever, waiting for a debugger to connect to us. Called from the * JDWP thread. * * This needs to un-block and return "false" if the VM is shutting down. It * should return "true" when it successfully accepts a connection. */ static bool acceptConnection(struct JdwpState* state) { JdwpNetState* netState = state->netState; int retryCount = 0; /* first, ensure that we get a connection to the ADB daemon */ retry: if (netState->shuttingDown) return false; if (netState->controlSock < 0) { int sleep_ms = 500; const int sleep_max_ms = 2*1000; char buff[5]; netState->controlSock = socket(PF_UNIX, SOCK_STREAM, 0); if (netState->controlSock < 0) { LOGE("Could not create ADB control socket:%s\n", strerror(errno)); return false; } if (pipe(netState->wakeFds) < 0) { LOGE("pipe failed"); return false; } snprintf(buff, sizeof(buff), "%04x", getpid()); buff[4] = 0; for (;;) { /* * If adbd isn't running, because USB debugging was disabled or * perhaps the system is restarting it for "adb root", the * connect() will fail. We loop here forever waiting for it * to come back. * * Waking up and polling every couple of seconds is generally a * bad thing to do, but we only do this if the application is * debuggable *and* adbd isn't running. Still, for the sake * of battery life, we should consider timing out and giving * up after a few minutes in case somebody ships an app with * the debuggable flag set. */ int ret = connect(netState->controlSock, &netState->controlAddr.controlAddrPlain, netState->controlAddrLen); if (!ret) { /* now try to send our pid to the ADB daemon */ do { ret = send( netState->controlSock, buff, 4, 0 ); } while (ret < 0 && errno == EINTR); if (ret >= 0) { LOGV("PID sent as '%.*s' to ADB\n", 4, buff); break; } LOGE("Weird, can't send JDWP process pid to ADB: %s\n", strerror(errno)); return false; } LOGV("Can't connect to ADB control socket:%s\n", strerror(errno)); usleep( sleep_ms*1000 ); sleep_ms += (sleep_ms >> 1); if (sleep_ms > sleep_max_ms) sleep_ms = sleep_max_ms; } }
MetadataRetrieverClient::~MetadataRetrieverClient() { LOGV("MetadataRetrieverClient destructor"); disconnect(); }
void AndroidAudioRenderer::pause(bool pause) { LOGV("AndroidAudioRenderer::pause"); mPaused = pause; OpenSLSetMute(pause); }
static void handle_keydown(DisplayState *ds, SDL_Event *ev) { int mod_state; int keycode; if (alt_grab) { // LOGV("Found alt grab\n"); mod_state = (SDL_GetModState() & (gui_grab_code | KMOD_LSHIFT)) == (gui_grab_code | KMOD_LSHIFT); } else if (ctrl_grab) { // LOGV("Found ctrl grab\n"); mod_state = (SDL_GetModState() & KMOD_RCTRL) == KMOD_RCTRL; } else { // LOGV("Default grab\n"); mod_state = (SDL_GetModState() & gui_grab_code) == gui_grab_code; } gui_key_modifier_pressed = mod_state; if (gui_key_modifier_pressed) { keycode = sdl_keyevent_to_keycode(&ev->key); // LOGV("Found modifier pressed for key/keycode = %d/%d\n", ev->key.keysym.sym, keycode); switch (keycode) { case 1: /* 'f' key on US keyboard */ LOGV("Keycode Pressed 'f' Fullscreen\n"); toggle_full_screen(ds); gui_keysym = 1; break; case 16: /* 'u' key on US keyboard */ LOGV("Keycode Pressed 'u' unset Scale\n"); if (scaling_active) { LOGV("Found scaling active Unsetting...\n"); scaling_active = 0; sdl_resize(ds); vga_hw_invalidate(); vga_hw_update(); reset_keys(); } gui_keysym = 1; break; case 22 ... 23: /* '1' to '9' keys */ //MK hack /* Reset the modifiers sent to the current console */ LOGV("Keycode Pressed '1-9' console\n"); reset_keys(); console_select(keycode - 22); gui_keysym = 1; // if (gui_fullscreen) { // LOGV("Found fullscreen breaking...\n"); // break; // } if (!is_graphic_console()) { /* release grab if going to a text console */ LOGV("Found text console releasing grab...\n"); if (gui_grab) { LOGV("Found grab, grab ending...\n"); sdl_grab_end(); } else if (absolute_enabled) { LOGV("Found absolute_enabled, show cursor...\n"); sdl_show_cursor(); } } else if (absolute_enabled) { LOGV("Found absolute_enabled, hiding cursor and grabing mouse...\n"); sdl_hide_cursor(); absolute_mouse_grab(); } break; case 24: /* '4' Zoom In */ case 25: /* '3' Zoom Out*/ LOGV("Keycode Pressed '3/4' Zoom\n"); // if (!gui_fullscreen) { { int width = MAX(real_screen->w + (keycode == 25 ? 50 : -50), 160); int height = (ds_get_height(ds) * width) / ds_get_width(ds); LOGV("Found no fullscreen, scaling to: %dx%d \n", width, height); sdl_scale(ds, width, height); vga_hw_invalidate(); vga_hw_update(); reset_keys(); gui_keysym = 1; } // } break; case 26: /* Fit to Screen */ LOGV("Keycode Pressed '5' Fit to Screen\n"); // if (!gui_fullscreen) { { int width; int height; AndroidGetWindowSize(&width, &height); LOGV("Got Android window size=%dx%d", width, height); LOGV("Got VM resolution=%dx%d", ds_get_width(ds), ds_get_height(ds)); float aspectRatio = (float) ds_get_height(ds) / (float) ds_get_width(ds); LOGV("Got aspectRatio=%f", aspectRatio); int new_width = (int) (height / aspectRatio); if(new_width > width){ LOGV("Width is overrun, modifying height"); new_width = width; height = width * aspectRatio; } LOGV("Found no fullscreen, Fit To Screen: %dx%d \n", new_width, height); sdl_scale(ds, new_width, height); vga_hw_invalidate(); vga_hw_update(); reset_keys(); gui_keysym = 1; } // } break; case 27: /* Stretch to Screen */ LOGV("Keycode Pressed '6' Fit to Screen\n"); // if (!gui_fullscreen) { { int width; int height; AndroidGetWindowSize(&width, &height); LOGV("Found no fullscreen, Fit To Screen: %dx%d \n", width, height); sdl_scale(ds, width, height); vga_hw_invalidate(); vga_hw_update(); reset_keys(); gui_keysym = 1; } // } break; default: LOGV("Default\n"); break; } } else if (!is_graphic_console()) {
void SObjModel::LoadTextures() { texDiffuse = (new STexture("AssetBase/empty_texture.png")); if (!texDiffuse->IsReady) { LOGE(" diffuse texture file not found"); return; } texNormal = (new STexture("AssetBase/empty_normal.png",false)); if (!texNormal->IsReady) { LOGE("normal texture file not found"); return; } for (auto it = d_sm.begin(); it != d_sm.end();++it) { auto &submesh = (*it); if (d_materials.find(submesh->m_name) == d_materials.end()) { LOGE("no material found - \"%s\" ",submesh->m_name.c_str()); } else { auto &material = d_materials[submesh->m_name]; std::string &diffuse = material.albedoTexFileName; if (d_textures.find(diffuse) == d_textures.end()) { LOGV("material %s Diffuse %s Bump %s Alpha %s",submesh->m_name.c_str(), material.albedoTexFileName.c_str(), material.bumpMapTexFileName.c_str(), material.alphaMaskTexFileName.c_str()); d_materials[submesh->m_name].albedoTex = new STexture(submesh->m_dir+diffuse); d_textures[diffuse].reset( d_materials[submesh->m_name].albedoTex); if (!d_textures[diffuse]->IsReady) { LOGE("OBJ:Diffuse texture load failed %s",(submesh->m_dir+diffuse).c_str()); } } std::string &bump = material.bumpMapTexFileName; if (d_textures.find(bump) == d_textures.end()) { d_materials[submesh->m_name].bumpMapTex = new STexture(submesh->m_dir+bump,false); d_textures[bump].reset(d_materials[submesh->m_name].bumpMapTex); if (!d_textures[bump]->IsReady) { LOGE("OBJ:Bump texture load failed %s",(submesh->m_dir+bump).c_str()); } } std::string &alpha = material.alphaMaskTexFileName; if (d_textures.find(alpha) == d_textures.end()) { d_materials[submesh->m_name].alphaMaskTex = new STexture(submesh->m_dir+alpha); d_textures[alpha].reset(d_materials[submesh->m_name].alphaMaskTex); if (!d_textures[alpha]->IsReady) { LOGE("OBJ:Alpha mask texture load failed %s",(submesh->m_dir+alpha).c_str()); } } } } }
SSBSIP_MFC_ERROR_CODE SsbSipMfcEncInit(void *openHandle, void *param) { int ret_code; int dpbBufSize; _MFCLIB *pCTX; mfc_common_args EncArg; mfc_common_args user_addr_arg, phys_addr_arg; SSBSIP_MFC_ENC_H264_PARAM *h264_arg; SSBSIP_MFC_ENC_MPEG4_PARAM *mpeg4_arg; SSBSIP_MFC_ENC_H263_PARAM *h263_arg; SSBSIP_MFC_CODEC_TYPE codec_type; pCTX = (_MFCLIB *)openHandle; memset(&EncArg, 0, sizeof(mfc_common_args)); LOGV("SsbSipMfcEncInit: Encode Init start\n"); mpeg4_arg = (SSBSIP_MFC_ENC_MPEG4_PARAM *)param; codec_type = mpeg4_arg->codecType; if ((codec_type != MPEG4_ENC) && (codec_type != H264_ENC) && (codec_type != H263_ENC)) { LOGE("SsbSipMfcEncOpen: Undefined codec type.\n"); return MFC_RET_INVALID_PARAM; } pCTX->codec_type = codec_type; switch (pCTX->codec_type) { case MPEG4_ENC: LOGV("SsbSipMfcEncInit: MPEG4 Encode\n"); mpeg4_arg = (SSBSIP_MFC_ENC_MPEG4_PARAM *)param; pCTX->width = mpeg4_arg->SourceWidth; pCTX->height = mpeg4_arg->SourceHeight; break; case H263_ENC: LOGV("SsbSipMfcEncInit: H263 Encode\n"); h263_arg = (SSBSIP_MFC_ENC_H263_PARAM *)param; pCTX->width = h263_arg->SourceWidth; pCTX->height = h263_arg->SourceHeight; break; case H264_ENC: LOGV("SsbSipMfcEncInit: H264 Encode\n"); h264_arg = (SSBSIP_MFC_ENC_H264_PARAM *)param; pCTX->width = h264_arg->SourceWidth; pCTX->height = h264_arg->SourceHeight; break; default: break; } switch (pCTX->codec_type) { case MPEG4_ENC: mpeg4_arg = (SSBSIP_MFC_ENC_MPEG4_PARAM*)param; EncArg.args.enc_init_mpeg4.in_codec_type = pCTX->codec_type; EncArg.args.enc_init_mpeg4.in_profile_level = ENC_PROFILE_LEVEL(mpeg4_arg->ProfileIDC, mpeg4_arg->LevelIDC); EncArg.args.enc_init_mpeg4.in_width = mpeg4_arg->SourceWidth; EncArg.args.enc_init_mpeg4.in_height = mpeg4_arg->SourceHeight; EncArg.args.enc_init_mpeg4.in_gop_num = mpeg4_arg->IDRPeriod; if (mpeg4_arg->DisableQpelME) EncArg.args.enc_init_mpeg4.in_qpelME_enable = 0; else EncArg.args.enc_init_mpeg4.in_qpelME_enable = 1; EncArg.args.enc_init_mpeg4.in_MS_mode = mpeg4_arg->SliceMode; EncArg.args.enc_init_mpeg4.in_MS_size = mpeg4_arg->SliceArgument; if (mpeg4_arg->NumberBFrames > 2) { LOGE("SsbSipMfcEncInit: No such BframeNum is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_mpeg4.in_BframeNum = mpeg4_arg->NumberBFrames; EncArg.args.enc_init_mpeg4.in_mb_refresh = mpeg4_arg->RandomIntraMBRefresh; /* rate control*/ EncArg.args.enc_init_mpeg4.in_RC_frm_enable = mpeg4_arg->EnableFRMRateControl; if ((mpeg4_arg->QSCodeMin > 51) || (mpeg4_arg->QSCodeMax > 51)) { LOGE("SsbSipMfcEncInit: No such Min/Max QP is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_mpeg4.in_RC_qbound = ENC_RC_QBOUND(mpeg4_arg->QSCodeMin, mpeg4_arg->QSCodeMax); EncArg.args.enc_init_mpeg4.in_RC_rpara = mpeg4_arg->CBRPeriodRf; /* pad control */ EncArg.args.enc_init_mpeg4.in_pad_ctrl_on = mpeg4_arg->PadControlOn; if ((mpeg4_arg->LumaPadVal > 255) || (mpeg4_arg->CbPadVal > 255) || (mpeg4_arg->CrPadVal > 255)) { LOGE("SsbSipMfcEncInit: No such Pad value is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_mpeg4.in_luma_pad_val = mpeg4_arg->LumaPadVal; EncArg.args.enc_init_mpeg4.in_cb_pad_val = mpeg4_arg->CbPadVal; EncArg.args.enc_init_mpeg4.in_cr_pad_val = mpeg4_arg->CrPadVal; EncArg.args.enc_init_mpeg4.in_time_increament_res = mpeg4_arg->TimeIncreamentRes; EncArg.args.enc_init_mpeg4.in_time_vop_time_increament = mpeg4_arg->VopTimeIncreament; EncArg.args.enc_init_mpeg4.in_RC_framerate = (mpeg4_arg->TimeIncreamentRes / mpeg4_arg->VopTimeIncreament); EncArg.args.enc_init_mpeg4.in_RC_bitrate = mpeg4_arg->Bitrate; if ((mpeg4_arg->FrameQp > 51) || (mpeg4_arg->FrameQp_P) > 51 || (mpeg4_arg->FrameQp_B > 51)) { LOGE("SsbSipMfcEncInit: No such FrameQp is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_mpeg4.in_frame_qp = mpeg4_arg->FrameQp; if (mpeg4_arg->FrameQp_P) EncArg.args.enc_init_mpeg4.in_frame_P_qp = mpeg4_arg->FrameQp_P; else EncArg.args.enc_init_mpeg4.in_frame_P_qp = mpeg4_arg->FrameQp; if (mpeg4_arg->FrameQp_B) EncArg.args.enc_init_mpeg4.in_frame_B_qp = mpeg4_arg->FrameQp_B; else EncArg.args.enc_init_mpeg4.in_frame_B_qp = mpeg4_arg->FrameQp; break; case H263_ENC: h263_arg = (SSBSIP_MFC_ENC_H263_PARAM *)param; EncArg.args.enc_init_mpeg4.in_codec_type = pCTX->codec_type; EncArg.args.enc_init_mpeg4.in_profile_level = ENC_PROFILE_LEVEL(66, 40); EncArg.args.enc_init_mpeg4.in_width = h263_arg->SourceWidth; EncArg.args.enc_init_mpeg4.in_height = h263_arg->SourceHeight; EncArg.args.enc_init_mpeg4.in_gop_num = h263_arg->IDRPeriod; EncArg.args.enc_init_mpeg4.in_mb_refresh = h263_arg->RandomIntraMBRefresh; EncArg.args.enc_init_mpeg4.in_MS_mode = h263_arg->SliceMode; EncArg.args.enc_init_mpeg4.in_MS_size = 0; /* rate control*/ EncArg.args.enc_init_mpeg4.in_RC_frm_enable = h263_arg->EnableFRMRateControl; if ((h263_arg->QSCodeMin > 51) || (h263_arg->QSCodeMax > 51)) { LOGE("SsbSipMfcEncInit: No such Min/Max QP is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_mpeg4.in_RC_qbound = ENC_RC_QBOUND(h263_arg->QSCodeMin, h263_arg->QSCodeMax); EncArg.args.enc_init_mpeg4.in_RC_rpara = h263_arg->CBRPeriodRf; /* pad control */ EncArg.args.enc_init_mpeg4.in_pad_ctrl_on = h263_arg->PadControlOn; if ((h263_arg->LumaPadVal > 255) || (h263_arg->CbPadVal > 255) || (h263_arg->CrPadVal > 255)) { LOGE("SsbSipMfcEncInit: No such Pad value is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_mpeg4.in_luma_pad_val = h263_arg->LumaPadVal; EncArg.args.enc_init_mpeg4.in_cb_pad_val = h263_arg->CbPadVal; EncArg.args.enc_init_mpeg4.in_cr_pad_val = h263_arg->CrPadVal; EncArg.args.enc_init_mpeg4.in_RC_framerate = h263_arg->FrameRate; EncArg.args.enc_init_mpeg4.in_RC_bitrate = h263_arg->Bitrate; if (h263_arg->FrameQp > 51) { LOGE("SsbSipMfcEncInit: No such FrameQp is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_mpeg4.in_frame_qp = h263_arg->FrameQp; if (h263_arg->FrameQp_P) EncArg.args.enc_init_mpeg4.in_frame_P_qp = h263_arg->FrameQp_P; else EncArg.args.enc_init_mpeg4.in_frame_P_qp = h263_arg->FrameQp; break; case H264_ENC: h264_arg = (SSBSIP_MFC_ENC_H264_PARAM *)param; EncArg.args.enc_init_h264.in_codec_type = H264_ENC; EncArg.args.enc_init_h264.in_profile_level = ENC_PROFILE_LEVEL(h264_arg->ProfileIDC, h264_arg->LevelIDC); EncArg.args.enc_init_h264.in_width = h264_arg->SourceWidth; EncArg.args.enc_init_h264.in_height = h264_arg->SourceHeight; EncArg.args.enc_init_h264.in_gop_num = h264_arg->IDRPeriod; if ((h264_arg->NumberRefForPframes > 2) || (h264_arg->NumberReferenceFrames > 2)) { LOGE("SsbSipMfcEncInit: No such ref Num is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_h264.in_reference_num = h264_arg->NumberReferenceFrames; EncArg.args.enc_init_h264.in_ref_num_p = h264_arg->NumberRefForPframes; if ((h264_arg->SliceMode == 0) || (h264_arg->SliceMode == 1) || (h264_arg->SliceMode == 2) || (h264_arg->SliceMode == 4)) { EncArg.args.enc_init_h264.in_MS_mode = h264_arg->SliceMode; } else { LOGE("SsbSipMfcEncInit: No such slice mode is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_h264.in_MS_size = h264_arg->SliceArgument; if (h264_arg->NumberBFrames > 2) { LOGE("SsbSipMfcEncInit: No such BframeNum is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_h264.in_BframeNum = h264_arg->NumberBFrames; EncArg.args.enc_init_h264.in_deblock_filt = h264_arg->LoopFilterDisable; if ((abs(h264_arg->LoopFilterAlphaC0Offset) > 6) || (abs(h264_arg->LoopFilterBetaOffset) > 6)) { LOGE("SsbSipMfcEncInit: No such AlphaC0Offset or BetaOffset is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_h264.in_deblock_alpha_C0 = h264_arg->LoopFilterAlphaC0Offset; EncArg.args.enc_init_h264.in_deblock_beta = h264_arg->LoopFilterBetaOffset; EncArg.args.enc_init_h264.in_symbolmode = h264_arg->SymbolMode; EncArg.args.enc_init_h264.in_interlace_mode = h264_arg->PictureInterlace; EncArg.args.enc_init_h264.in_transform8x8_mode = h264_arg->Transform8x8Mode; EncArg.args.enc_init_h264.in_mb_refresh = h264_arg->RandomIntraMBRefresh; /* pad control */ EncArg.args.enc_init_h264.in_pad_ctrl_on = h264_arg->PadControlOn; if ((h264_arg->LumaPadVal > 255) || (h264_arg->CbPadVal > 255) || (h264_arg->CrPadVal > 255)) { LOGE("SsbSipMfcEncInit: No such Pad value is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_h264.in_luma_pad_val = h264_arg->LumaPadVal; EncArg.args.enc_init_h264.in_cb_pad_val = h264_arg->CbPadVal; EncArg.args.enc_init_h264.in_cr_pad_val = h264_arg->CrPadVal; /* rate control*/ EncArg.args.enc_init_h264.in_RC_frm_enable = h264_arg->EnableFRMRateControl; EncArg.args.enc_init_h264.in_RC_mb_enable = h264_arg->EnableMBRateControl; EncArg.args.enc_init_h264.in_RC_framerate = h264_arg->FrameRate; EncArg.args.enc_init_h264.in_RC_bitrate = h264_arg->Bitrate; if (h264_arg->FrameQp > 51) { LOGE("SsbSipMfcEncInit: No such FrameQp is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_h264.in_frame_qp = h264_arg->FrameQp; if (h264_arg->FrameQp_P) EncArg.args.enc_init_h264.in_frame_P_qp = h264_arg->FrameQp_P; else EncArg.args.enc_init_h264.in_frame_P_qp = h264_arg->FrameQp; if (h264_arg->FrameQp_B) EncArg.args.enc_init_h264.in_frame_B_qp = h264_arg->FrameQp_B; else EncArg.args.enc_init_h264.in_frame_B_qp = h264_arg->FrameQp; if ((h264_arg->QSCodeMin > 51) || (h264_arg->QSCodeMax > 51)) { LOGE("SsbSipMfcEncInit: No such Min/Max QP is supported.\n"); return MFC_RET_INVALID_PARAM; } EncArg.args.enc_init_h264.in_RC_qbound = ENC_RC_QBOUND(h264_arg->QSCodeMin, h264_arg->QSCodeMax); EncArg.args.enc_init_h264.in_RC_rpara = h264_arg->CBRPeriodRf; EncArg.args.enc_init_h264.in_RC_mb_dark_disable = h264_arg->DarkDisable; EncArg.args.enc_init_h264.in_RC_mb_smooth_disable = h264_arg->SmoothDisable; EncArg.args.enc_init_h264.in_RC_mb_static_disable = h264_arg->StaticDisable; EncArg.args.enc_init_h264.in_RC_mb_activity_disable = h264_arg->ActivityDisable; /* default setting */ EncArg.args.enc_init_h264.in_md_interweight_pps = 0; EncArg.args.enc_init_h264.in_md_intraweight_pps = 0; break; default: break; } EncArg.args.enc_init_mpeg4.in_mapped_addr = pCTX->mapped_addr; ret_code = ioctl(pCTX->hMFC, IOCTL_MFC_ENC_INIT, &EncArg); if (EncArg.ret_code != MFC_RET_OK) { LOGE("SsbSipMfcEncInit: IOCTL_MFC_ENC_INIT (%d) failed\n", EncArg.ret_code); return MFC_RET_ENC_INIT_FAIL; } pCTX->virStrmBuf = EncArg.args.enc_init_mpeg4.out_u_addr.strm_ref_y; pCTX->phyStrmBuf = EncArg.args.enc_init_mpeg4.out_p_addr.strm_ref_y; pCTX->sizeStrmBuf = MAX_ENCODER_OUTPUT_BUFFER_SIZE; pCTX->encodedHeaderSize = EncArg.args.enc_init_mpeg4.out_header_size; pCTX->virMvRefYC = EncArg.args.enc_init_mpeg4.out_u_addr.mv_ref_yc; pCTX->inter_buff_status |= MFC_USE_STRM_BUFF; return MFC_RET_OK; }
static void onLowMemory(ANativeActivity* activity) { struct android_app* android_app = (struct android_app*)activity->instance; LOGV("LowMemory: %p\n", activity); android_app_write_cmd(android_app, APP_CMD_LOW_MEMORY); }
/* public native void dbopen(String path, int flags, String locale); */ static void dbopen(JNIEnv* env, jobject object, jstring pathString, jint flags) { int err; sqlite3 * handle = NULL; sqlite3_stmt * statement = NULL; char const * path8 = env->GetStringUTFChars(pathString, NULL); int sqliteFlags; // register the logging func on sqlite. needs to be done BEFORE any sqlite3 func is called. registerLoggingFunc(path8); // convert our flags into the sqlite flags if (flags & CREATE_IF_NECESSARY) { sqliteFlags = SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE; } else if (flags & OPEN_READONLY) { sqliteFlags = SQLITE_OPEN_READONLY; } else { sqliteFlags = SQLITE_OPEN_READWRITE; } err = sqlite3_open_v2(path8, &handle, sqliteFlags, NULL); if (err != SQLITE_OK) { LOGE("sqlite3_open_v2(\"%s\", &handle, %d, NULL) failed\n", path8, sqliteFlags); throw_sqlite3_exception(env, handle); goto done; } // The soft heap limit prevents the page cache allocations from growing // beyond the given limit, no matter what the max page cache sizes are // set to. The limit does not, as of 3.5.0, affect any other allocations. sqlite3_soft_heap_limit(sSqliteSoftHeapLimit); // Set the default busy handler to retry for 1000ms and then return SQLITE_BUSY err = sqlite3_busy_timeout(handle, 1000 /* ms */); if (err != SQLITE_OK) { LOGE("sqlite3_busy_timeout(handle, 1000) failed for \"%s\"\n", path8); throw_sqlite3_exception(env, handle); goto done; } #ifdef DB_INTEGRITY_CHECK static const char* integritySql = "pragma integrity_check(1);"; err = sqlite3_prepare_v2(handle, integritySql, -1, &statement, NULL); if (err != SQLITE_OK) { LOGE("sqlite_prepare_v2(handle, \"%s\") failed for \"%s\"\n", integritySql, path8); throw_sqlite3_exception(env, handle); goto done; } // first is OK or error message err = sqlite3_step(statement); if (err != SQLITE_ROW) { LOGE("integrity check failed for \"%s\"\n", integritySql, path8); throw_sqlite3_exception(env, handle); goto done; } else { const char *text = (const char*)sqlite3_column_text(statement, 0); if (strcmp(text, "ok") != 0) { LOGE("integrity check failed for \"%s\": %s\n", integritySql, path8, text); jniThrowException(env, "android/database/sqlite/SQLiteDatabaseCorruptException", text); goto done; } } #endif err = register_android_functions(handle, UTF16_STORAGE); if (err) { throw_sqlite3_exception(env, handle); goto done; } #ifdef MTK_DIALER_SEARCH_SUPPORT err = register_dialer_search_custom_functions(handle); if (err) { err = register_dialer_search_android_functions(handle); if (err) { throw_sqlite3_exception(env, handle); goto done; } } #endif LOGV("Opened '%s' - %p\n", path8, handle); env->SetIntField(object, offset_db_handle, (int) handle); handle = NULL; // The caller owns the handle now. done: // Release allocated resources if (path8 != NULL) env->ReleaseStringUTFChars(pathString, path8); if (statement != NULL) sqlite3_finalize(statement); if (handle != NULL) sqlite3_close(handle); }
static void onWindowFocusChanged(ANativeActivity* activity, int focused) { LOGV("WindowFocusChanged: %p -- %d\n", activity, focused); android_app_write_cmd((struct android_app*)activity->instance, focused ? APP_CMD_GAINED_FOCUS : APP_CMD_LOST_FOCUS); }
/* * Entry point for JDWP thread. The thread was created through the VM * mechanisms, so there is a java/lang/Thread associated with us. */ static void* jdwpThreadStart(void* arg) { JdwpState* state = (JdwpState*) arg; LOGV("JDWP: thread running\n"); /* * Finish initializing "state", then notify the creating thread that * we're running. */ state->debugThreadHandle = dvmThreadSelf()->handle; state->run = true; MEM_BARRIER(); state->debugThreadStarted = true; // touch this last dvmDbgLockMutex(&state->threadStartLock); dvmDbgCondBroadcast(&state->threadStartCond); dvmDbgUnlockMutex(&state->threadStartLock); /* set the thread state to VMWAIT so GCs don't wait for us */ dvmDbgThreadWaiting(); /* * Loop forever if we're in server mode, processing connections. In * non-server mode, we bail out of the thread when the debugger drops * us. * * We broadcast a notification when a debugger attaches, after we * successfully process the handshake. */ while (state->run) { bool first; int cc; if (state->params.server) { /* * Block forever, waiting for a connection. To support the * "timeout=xxx" option we'll need to tweak this. */ if (!dvmJdwpAcceptConnection(state)) break; } else { /* * If we're not acting as a server, we need to connect out to the * debugger. To support the "timeout=xxx" option we need to * have a timeout if the handshake reply isn't received in a * reasonable amount of time. */ if (!dvmJdwpEstablishConnection(state)) { /* wake anybody who was waiting for us to succeed */ dvmDbgLockMutex(&state->attachLock); dvmDbgCondBroadcast(&state->attachCond); dvmDbgUnlockMutex(&state->attachLock); break; } } /* prep debug code to handle the new connection */ dvmDbgConnected(); /* process requests until the debugger drops */ first = true; while (true) { // sanity check -- shouldn't happen? if (dvmThreadSelf()->status != THREAD_VMWAIT) { LOGE("JDWP thread no longer in VMWAIT (now %d); resetting\n", dvmThreadSelf()->status); dvmDbgThreadWaiting(); } if (!dvmJdwpProcessIncoming(state)) /* blocking read */ break; if (first && !dvmJdwpAwaitingHandshake(state)) { /* handshake worked, tell the interpreter that we're active */ first = false; /* set thread ID; requires object registry to be active */ state->debugThreadId = dvmDbgGetThreadSelfId(); /* wake anybody who's waiting for us */ dvmDbgLockMutex(&state->attachLock); dvmDbgCondBroadcast(&state->attachCond); dvmDbgUnlockMutex(&state->attachLock); } } dvmJdwpCloseConnection(state); if (state->ddmActive) { state->ddmActive = false; /* broadcast the disconnect; must be in RUNNING state */ dvmDbgThreadRunning(); dvmDbgDdmDisconnected(); dvmDbgThreadWaiting(); } /* interpreter can ignore breakpoints */ dvmDbgDisconnected(); /* if we had stuff suspended, resume it now */ dvmUndoDebuggerSuspensions(); dvmJdwpResetState(state); /* if we connected out, this was a one-shot deal */ if (!state->params.server) state->run = false; } /* back to running, for thread shutdown */ dvmDbgThreadRunning(); LOGV("JDWP: thread exiting\n"); return NULL; }
static void onInputQueueDestroyed(ANativeActivity* activity, AInputQueue* queue) { LOGV("InputQueueDestroyed: %p -- %p\n", activity, queue); android_app_set_input((struct android_app*)activity->instance, NULL); }
static void onDestroy(ANativeActivity* activity) { LOGV("Destroy: %p\n", activity); android_app_free((struct android_app*)activity->instance); }
//------------------------------------------------------------------------------------------------- int JetPlayer::triggerClip(int clipId) { LOGV("JetPlayer::triggerClip clipId=%d", clipId); Mutex::Autolock lock(mMutex); return JET_TriggerClip(mEasData, clipId); }
static void onResume(ANativeActivity* activity) { LOGV("Resume: %p\n", activity); android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_RESUME); }
//------------------------------------------------------------------------------------------------- JetPlayer::~JetPlayer() { LOGV("~JetPlayer"); release(); }
static void onPause(ANativeActivity* activity) { LOGV("Pause: %p\n", activity); android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_PAUSE); }
status_t StagefrightMediaScanner::processFile( const char *path, const char *mimeType, MediaScannerClient &client) { LOGV("processFile '%s'.", path); client.setLocale(locale()); client.beginFile(); const char *extension = strrchr(path, '.'); if (!extension) { return UNKNOWN_ERROR; } if (!FileHasAcceptableExtension(extension)) { client.endFile(); return UNKNOWN_ERROR; } if (!strcasecmp(extension, ".mid") || !strcasecmp(extension, ".smf") || !strcasecmp(extension, ".imy") || !strcasecmp(extension, ".midi") || !strcasecmp(extension, ".xmf") || !strcasecmp(extension, ".rtttl") || !strcasecmp(extension, ".rtx") || !strcasecmp(extension, ".ota")) { return HandleMIDI(path, &client); } if (!strcasecmp(extension, ".flac")) { return HandleFLAC(path, &client); } if (mRetriever->setDataSource(path) == OK) { const char *value; if ((value = mRetriever->extractMetadata( METADATA_KEY_MIMETYPE)) != NULL) { client.setMimeType(value); } struct KeyMap { const char *tag; int key; }; static const KeyMap kKeyMap[] = { { "tracknumber", METADATA_KEY_CD_TRACK_NUMBER }, { "discnumber", METADATA_KEY_DISC_NUMBER }, { "album", METADATA_KEY_ALBUM }, { "artist", METADATA_KEY_ARTIST }, { "albumartist", METADATA_KEY_ALBUMARTIST }, { "composer", METADATA_KEY_COMPOSER }, { "genre", METADATA_KEY_GENRE }, { "title", METADATA_KEY_TITLE }, { "year", METADATA_KEY_YEAR }, { "duration", METADATA_KEY_DURATION }, { "writer", METADATA_KEY_WRITER }, { "compilation", METADATA_KEY_COMPILATION }, }; static const size_t kNumEntries = sizeof(kKeyMap) / sizeof(kKeyMap[0]); for (size_t i = 0; i < kNumEntries; ++i) { const char *value; if ((value = mRetriever->extractMetadata(kKeyMap[i].key)) != NULL) { client.addStringTag(kKeyMap[i].tag, value); } } } client.endFile(); return OK; }
static void onStop(ANativeActivity* activity) { LOGV("Stop: %p\n", activity); android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_STOP); }
static uint32_t data__poll_process_akm_abs(struct sensors_data_context_t *dev, int fd __attribute__((unused)), struct input_event *event) { uint32_t new_sensors = 0; if (event->type == EV_ABS) { LOGV("compass type: %d code: %d value: %-5d time: %ds", event->type, event->code, event->value, (int)event->time.tv_sec); switch (event->code) { case EVENT_TYPE_ACCEL_X: new_sensors |= SENSORS_AKM_ACCELERATION; dev->sensors[ID_A].acceleration.x = event->value * CONVERT_A_X; break; case EVENT_TYPE_ACCEL_Y: new_sensors |= SENSORS_AKM_ACCELERATION; dev->sensors[ID_A].acceleration.y = event->value * CONVERT_A_Y; break; case EVENT_TYPE_ACCEL_Z: new_sensors |= SENSORS_AKM_ACCELERATION; dev->sensors[ID_A].acceleration.z = event->value * CONVERT_A_Z; break; case EVENT_TYPE_MAGV_X: new_sensors |= SENSORS_AKM_MAGNETIC_FIELD; dev->sensors[ID_M].magnetic.x = event->value * CONVERT_M_X; break; case EVENT_TYPE_MAGV_Y: new_sensors |= SENSORS_AKM_MAGNETIC_FIELD; dev->sensors[ID_M].magnetic.y = event->value * CONVERT_M_Y; break; case EVENT_TYPE_MAGV_Z: new_sensors |= SENSORS_AKM_MAGNETIC_FIELD; dev->sensors[ID_M].magnetic.z = event->value * CONVERT_M_Z; break; case EVENT_TYPE_YAW: new_sensors |= SENSORS_AKM_ORIENTATION; dev->sensors[ID_O].orientation.azimuth = event->value * CONVERT_O_A; break; case EVENT_TYPE_PITCH: new_sensors |= SENSORS_AKM_ORIENTATION; dev->sensors[ID_O].orientation.pitch = event->value * CONVERT_O_P; break; case EVENT_TYPE_ROLL: new_sensors |= SENSORS_AKM_ORIENTATION; dev->sensors[ID_O].orientation.roll = -event->value * CONVERT_O_R; break; case EVENT_TYPE_TEMPERATURE: new_sensors |= SENSORS_AKM_TEMPERATURE; dev->sensors[ID_T].temperature = event->value; break; case EVENT_TYPE_STEP_COUNT: // step count (only reported in MODE_FFD) // we do nothing with it for now. break; case EVENT_TYPE_ACCEL_STATUS: // accuracy of the calibration (never returned!) //LOGV("G-Sensor status %d", event->value); break; case EVENT_TYPE_ORIENT_STATUS: { // accuracy of the calibration uint32_t v = (uint32_t)(event->value & SENSOR_STATE_MASK); LOGV_IF(dev->sensors[ID_O].orientation.status != (uint8_t)v, "M-Sensor status %d", v); dev->sensors[ID_O].orientation.status = (uint8_t)v; } break; } } return new_sensors; }
static void onConfigurationChanged(ANativeActivity* activity) { struct android_app* android_app = (struct android_app*)activity->instance; LOGV("ConfigurationChanged: %p\n", activity); android_app_write_cmd(android_app, APP_CMD_CONFIG_CHANGED); }
static void* android_app_entry(void* param) { LOGV("+android_app_entry"); struct android_app* android_app = (struct android_app*)param; android_app->config = AConfiguration_new(); AConfiguration_fromAssetManager(android_app->config, android_app->activity->assetManager); print_cur_config(android_app); android_app->cmdPollSource.id = LOOPER_ID_MAIN; android_app->cmdPollSource.app = android_app; android_app->cmdPollSource.process = process_cmd; android_app->inputPollSource.id = LOOPER_ID_INPUT; android_app->inputPollSource.app = android_app; android_app->inputPollSource.process = process_input; ALooper* looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS); ALooper_addFd(looper, android_app->msgread, LOOPER_ID_MAIN, ALOOPER_EVENT_INPUT, NULL, &android_app->cmdPollSource); android_app->looper = looper; pthread_mutex_lock(&android_app->mutex); android_app->running = 1; pthread_cond_broadcast(&android_app->cond); pthread_mutex_unlock(&android_app->mutex); std::string sargv; // Load command line from ARGV parameter JNIEnv *env = GetEnvAttachThread(android_app->activity->vm); if(env) { jobject me = android_app->activity->clazz; jclass acl = env->GetObjectClass(me); //class pointer of NativeActivity jmethodID giid = env->GetMethodID(acl, "getIntent", "()Landroid/content/Intent;"); jobject intent = env->CallObjectMethod(me, giid); //Got our intent jclass icl = env->GetObjectClass(intent); //class pointer of Intent jmethodID gseid = env->GetMethodID(icl, "getStringExtra", "(Ljava/lang/String;)Ljava/lang/String;"); jstring jsARGV = (jstring)env->CallObjectMethod(intent, gseid, env->NewStringUTF("ARGV")); if(jsARGV) { const char *chARGV = env->GetStringUTFChars(jsARGV, 0); if(chARGV) { sargv = std::string(chARGV); LOGI("ARGV: pango %s", chARGV); } env->ReleaseStringUTFChars(jsARGV, chARGV); } android_app->activity->vm->DetachCurrentThread(); } // Set up argv/argc to pass to users main std::vector<std::string> vargv; vargv.push_back("pango"); // Parse parameters from ARGV android intent parameter std::istringstream iss(sargv); std::copy(std::istream_iterator<std::string>(iss), std::istream_iterator<std::string>(), std::back_inserter<std::vector<std::string> >(vargv)); char* argv[vargv.size()+1]; for(size_t ac = 0; ac < vargv.size(); ++ac) { argv[ac] = new char[vargv[ac].size()]; strcpy( argv[ac], vargv[ac].c_str() ); } argv[vargv.size()] = NULL; // Find main symbol void (*main)(int, char**); *(void **) (&main) = dlsym( dlopen(android_app->application_so, RTLD_NOW), "main"); if (!main) { LOGE( "undefined symbol main, crap" ); exit(1); } // Call users standard main entry point. (*main)(vargv.size(), argv); // Clean up parameters for(size_t ac = 0; ac < vargv.size(); ++ac) { delete[] argv[ac]; } android_app_destroy(android_app); LOGV("-android_app_entry"); return NULL; }
bool SurfaceMediaSource::isMetaDataStoredInVideoBuffers() const { LOGV("isMetaDataStoredInVideoBuffers"); return true; }