DownmixerBufferProvider::DownmixerBufferProvider( audio_channel_mask_t inputChannelMask, audio_channel_mask_t outputChannelMask, audio_format_t format, uint32_t sampleRate, int32_t sessionId, size_t bufferFrameCount) : CopyBufferProvider( audio_bytes_per_sample(format) * audio_channel_count_from_out_mask(inputChannelMask), audio_bytes_per_sample(format) * audio_channel_count_from_out_mask(outputChannelMask), bufferFrameCount) // set bufferFrameCount to 0 to do in-place { ALOGV("DownmixerBufferProvider(%p)(%#x, %#x, %#x %u %d)", this, inputChannelMask, outputChannelMask, format, sampleRate, sessionId); if (!sIsMultichannelCapable || EffectCreate(&sDwnmFxDesc.uuid, sessionId, SESSION_ID_INVALID_AND_IGNORED, &mDownmixHandle) != 0) { ALOGE("DownmixerBufferProvider() error creating downmixer effect"); mDownmixHandle = NULL; return; } // channel input configuration will be overridden per-track mDownmixConfig.inputCfg.channels = inputChannelMask; // FIXME: Should be bits mDownmixConfig.outputCfg.channels = outputChannelMask; // FIXME: should be bits mDownmixConfig.inputCfg.format = format; mDownmixConfig.outputCfg.format = format; mDownmixConfig.inputCfg.samplingRate = sampleRate; mDownmixConfig.outputCfg.samplingRate = sampleRate; mDownmixConfig.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ; mDownmixConfig.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_WRITE; // input and output buffer provider, and frame count will not be used as the downmix effect // process() function is called directly (see DownmixerBufferProvider::getNextBuffer()) mDownmixConfig.inputCfg.mask = EFFECT_CONFIG_SMP_RATE | EFFECT_CONFIG_CHANNELS | EFFECT_CONFIG_FORMAT | EFFECT_CONFIG_ACC_MODE; mDownmixConfig.outputCfg.mask = mDownmixConfig.inputCfg.mask; int cmdStatus; uint32_t replySize = sizeof(int); // Configure downmixer status_t status = (*mDownmixHandle)->command(mDownmixHandle, EFFECT_CMD_SET_CONFIG /*cmdCode*/, sizeof(effect_config_t) /*cmdSize*/, &mDownmixConfig /*pCmdData*/, &replySize, &cmdStatus /*pReplyData*/); if (status != 0 || cmdStatus != 0) { ALOGE("DownmixerBufferProvider() error %d cmdStatus %d while configuring downmixer", status, cmdStatus); EffectRelease(mDownmixHandle); mDownmixHandle = NULL; return; } // Enable downmixer replySize = sizeof(int); status = (*mDownmixHandle)->command(mDownmixHandle, EFFECT_CMD_ENABLE /*cmdCode*/, 0 /*cmdSize*/, NULL /*pCmdData*/, &replySize, &cmdStatus /*pReplyData*/); if (status != 0 || cmdStatus != 0) { ALOGE("DownmixerBufferProvider() error %d cmdStatus %d while enabling downmixer", status, cmdStatus); EffectRelease(mDownmixHandle); mDownmixHandle = NULL; return; } // Set downmix type // parameter size rounded for padding on 32bit boundary const int psizePadded = ((sizeof(downmix_params_t) - 1)/sizeof(int) + 1) * sizeof(int); const int downmixParamSize = sizeof(effect_param_t) + psizePadded + sizeof(downmix_type_t); effect_param_t * const param = (effect_param_t *) malloc(downmixParamSize); CHECK(param != NULL); param->psize = sizeof(downmix_params_t); const downmix_params_t downmixParam = DOWNMIX_PARAM_TYPE; memcpy(param->data, &downmixParam, param->psize); const downmix_type_t downmixType = DOWNMIX_TYPE_FOLD; param->vsize = sizeof(downmix_type_t); memcpy(param->data + psizePadded, &downmixType, param->vsize); replySize = sizeof(int); status = (*mDownmixHandle)->command(mDownmixHandle, EFFECT_CMD_SET_PARAM /* cmdCode */, downmixParamSize /* cmdSize */, param /*pCmdData*/, &replySize, &cmdStatus /*pReplyData*/); free(param); if (status != 0 || cmdStatus != 0) { ALOGE("DownmixerBufferProvider() error %d cmdStatus %d while setting downmix type", status, cmdStatus); EffectRelease(mDownmixHandle); mDownmixHandle = NULL; return; } ALOGV("DownmixerBufferProvider() downmix type set to %d", (int) downmixType); }
void ContextSwitchRenderer::drawWorkload() { SCOPED_TRACE(); if (mWorkload > 8) { return; // This test does not support higher workloads. } // Set the background clear color to black. glClearColor(0.0f, 0.0f, 0.0f, 0.0f); glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); // No culling of back faces glDisable(GL_CULL_FACE); // No depth testing glDisable(GL_DEPTH_TEST); EGLSyncKHR fence = eglCreateSyncKHR(mEglDisplay, EGL_SYNC_FENCE_KHR, NULL); const int TOTAL_NUM_CONTEXTS = NUM_WORKER_CONTEXTS + 1; const float TRANSLATION = 0.9f - (TOTAL_NUM_CONTEXTS * 0.2f); for (int i = 0; i < TOTAL_NUM_CONTEXTS; i++) { eglWaitSyncKHR(mEglDisplay, fence, 0); eglDestroySyncKHR(mEglDisplay, fence); glUseProgram(mProgramId); // Set the texture. glActiveTexture (GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, mTextureId); glUniform1i(mTextureUniformHandle, 0); // Set the x translate. glUniform1f(mTranslateUniformHandle, (i * 0.2f) + TRANSLATION); glEnableVertexAttribArray(mPositionHandle); glEnableVertexAttribArray(mTexCoordHandle); glVertexAttribPointer(mPositionHandle, 3, GL_FLOAT, false, 0, CS_VERTICES); glVertexAttribPointer(mTexCoordHandle, 2, GL_FLOAT, false, 0, CS_TEX_COORDS); glDrawArrays(GL_TRIANGLES, 0, CS_NUM_VERTICES); fence = eglCreateSyncKHR(mEglDisplay, EGL_SYNC_FENCE_KHR, NULL); // Switch to next context. if (i < (mWorkload - 1)) { eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mContexts[i]); // Switch to FBO and re-attach. if (mOffscreen) { glBindFramebuffer(GL_FRAMEBUFFER, mFboIds[i]); glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, mFboDepthId); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, mFboTexId, 0); glViewport(0, 0, mFboWidth, mFboHeight); } } GLuint err = glGetError(); if (err != GL_NO_ERROR) { ALOGE("GLError %d in drawWorkload", err); break; } } eglWaitSyncKHR(mEglDisplay, fence, 0); eglDestroySyncKHR(mEglDisplay, fence); // Switch back to the main context. eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext); if (mOffscreen) { glBindFramebuffer(GL_FRAMEBUFFER, mFboId); glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, mFboDepthId); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, mFboTexId, 0); glViewport(0, 0, mFboWidth, mFboHeight); } }
status_t SpeechPhoneCallController::CloseModemSpeechControlFlow(const audio_mode_t audio_mode) { Mutex::Autolock _l(mLock); ALOGD("+%s(), audio_mode = %d", __FUNCTION__, audio_mode); const modem_index_t modem_index = mSpeechDriverFactory->GetActiveModemIndex(); ASSERT((modem_index == MODEM_1 && audio_mode == AUDIO_MODE_IN_CALL) || (modem_index == MODEM_2 && audio_mode == AUDIO_MODE_IN_CALL_2)); // check VM need close SpeechVMRecorder *pSpeechVMRecorder = SpeechVMRecorder::GetInstance(); if (pSpeechVMRecorder->GetVMRecordStatus() == true) { ALOGD("%s(), Close VM/EPL record", __FUNCTION__); pSpeechVMRecorder->Close(); } // Stop PMIC digital/analog part - downlink mAudioResourceManager->StopOutputDevice(); // Stop Side Tone Filter mAudioDigitalInstance->EnableSideToneFilter(false); // Stop MODEM_PCM mAudioDigitalInstance->SetModemPcmEnable(modem_index, false); // Stop PMIC digital/analog part - uplink mAudioResourceManager->StopInputDevice(); // Stop AP side digital part CloseModemSpeechDigitalPart(modem_index, (audio_devices_t)mAudioResourceManager->getDlOutputDevice()); // Get current active speech driver SpeechDriverInterface *pSpeechDriver = mSpeechDriverFactory->GetSpeechDriver(); // check BGS need close if (pSpeechDriver->GetApSideModemStatus(BGS_STATUS_MASK) == true) { pSpeechDriver->BGSoundOff(); } // Speech/VT off if (pSpeechDriver->GetApSideModemStatus(VT_STATUS_MASK) == true) { pSpeechDriver->PCM2WayOff(); pSpeechDriver->VideoTelephonyOff(); } else if (pSpeechDriver->GetApSideModemStatus(SPEECH_STATUS_MASK) == true) { if (pSpeechDriver->GetApSideModemStatus(TTY_STATUS_MASK) == true) { pSpeechDriver->TtyCtmOff(); } pSpeechDriver->SpeechOff(); } else { ALOGE("%s(), audio_mode = %d, Speech & VT are already closed!!", __FUNCTION__, audio_mode); ASSERT(pSpeechDriver->GetApSideModemStatus(VT_STATUS_MASK) == true || pSpeechDriver->GetApSideModemStatus(SPEECH_STATUS_MASK) == true); } // AFE_ON = false mAudioDigitalInstance->SetAfeEnable(false); // recover sampling rate mAudioAnalogInstance->SetFrequency(AudioAnalogType::DEVICE_OUT_DAC, 44100); mAudioAnalogInstance->SetFrequency(AudioAnalogType::DEVICE_IN_ADC, 44100); // disable clock SetAfeAnalogClock(false); // clean VT status if (mVtNeedOn == true) { ALOGD("%s(), Set mVtNeedOn = false"); mVtNeedOn = false; } ALOGD("-%s(), audio_mode = %d", __FUNCTION__, audio_mode); return NO_ERROR; }
/* Helpers */ bool configPrimVid(hwc_context_t *ctx, hwc_layer_t *layer) { overlay::Overlay& ov = *(ctx->mOverlay); private_handle_t *hnd = (private_handle_t *)layer->handle; ovutils::Whf info(hnd->width, hnd->height, hnd->format, hnd->size); ovutils::eMdpFlags mdpFlags = ovutils::OV_MDP_FLAGS_NONE; if (hnd->flags & private_handle_t::PRIV_FLAGS_SECURE_BUFFER) { ovutils::setMdpFlags(mdpFlags, ovutils::OV_MDP_SECURE_OVERLAY_SESSION); } MetaData_t *metadata = (MetaData_t *)hnd->base_metadata; if (metadata->paramType == PP_PARAM_INTERLACED && metadata->paramValue) { ovutils::setMdpFlags(mdpFlags, ovutils::OV_MDP_DEINTERLACE); } ovutils::eIsFg isFgFlag = ovutils::IS_FG_OFF; if (ctx->numHwLayers == 1) { isFgFlag = ovutils::IS_FG_SET; } ovutils::PipeArgs parg(mdpFlags, info, ovutils::ZORDER_0, isFgFlag, ovutils::ROT_FLAG_DISABLED); ovutils::PipeArgs pargs[ovutils::MAX_PIPES] = { parg, parg, parg }; ov.setSource(pargs, ovutils::OV_PIPE0); hwc_rect_t sourceCrop = layer->sourceCrop; // x,y,w,h ovutils::Dim dcrop(sourceCrop.left, sourceCrop.top, sourceCrop.right - sourceCrop.left, sourceCrop.bottom - sourceCrop.top); ovutils::Dim dpos; hwc_rect_t displayFrame = layer->displayFrame; dpos.x = displayFrame.left; dpos.y = displayFrame.top; dpos.w = (displayFrame.right - displayFrame.left); dpos.h = (displayFrame.bottom - displayFrame.top); //Calculate the rect for primary based on whether the supplied position //is within or outside bounds. const int fbWidth = ovutils::FrameBufferInfo::getInstance()->getWidth(); const int fbHeight = ovutils::FrameBufferInfo::getInstance()->getHeight(); if( displayFrame.left < 0 || displayFrame.top < 0 || displayFrame.right > fbWidth || displayFrame.bottom > fbHeight) { calculate_crop_rects(sourceCrop, displayFrame, fbWidth, fbHeight); //Update calculated width and height dcrop.w = sourceCrop.right - sourceCrop.left; dcrop.h = sourceCrop.bottom - sourceCrop.top; dpos.x = displayFrame.left; dpos.y = displayFrame.top; dpos.w = displayFrame.right - displayFrame.left; dpos.h = displayFrame.bottom - displayFrame.top; } //Only for Primary ov.setCrop(dcrop, ovutils::OV_PIPE0); int transform = layer->transform & FINAL_TRANSFORM_MASK; ovutils::eTransform orient = static_cast<ovutils::eTransform>(transform); ov.setTransform(orient, ovutils::OV_PIPE0); ov.setPosition(dpos, ovutils::OV_PIPE0); if (!ov.commit(ovutils::OV_PIPE0)) { ALOGE("%s: commit fails", __FUNCTION__); return false; } return true; }
SSBSIP_MFC_ERROR_CODE SsbSipMfcEncInit(void *openHandle, void *param) { int ret, i, j,index; _MFCLIB *pCTX; enum v4l2_buf_type type; struct v4l2_format fmt; struct v4l2_plane planes[MFC_ENC_NUM_PLANES]; struct v4l2_buffer buf; struct v4l2_requestbuffers reqbuf; struct v4l2_control ctrl; struct pollfd poll_events; int poll_state; struct v4l2_ext_control ext_ctrl_mpeg4[23]; struct v4l2_ext_control ext_ctrl_h263[17]; struct v4l2_ext_control ext_ctrl[38]; struct v4l2_ext_controls ext_ctrls; int pad_value = 0; SSBSIP_MFC_ENC_H264_PARAM *h264_arg; SSBSIP_MFC_ENC_MPEG4_PARAM *mpeg4_arg; SSBSIP_MFC_ENC_H263_PARAM *h263_arg; if (openHandle == NULL) { return MFC_RET_INVALID_PARAM; } pCTX = (_MFCLIB *) openHandle; mpeg4_arg = (SSBSIP_MFC_ENC_MPEG4_PARAM*)param; if (mpeg4_arg->codecType == MPEG4_ENC) { pCTX->codecType= MPEG4_ENC; pCTX->width = mpeg4_arg->SourceWidth; pCTX->height = mpeg4_arg->SourceHeight; pCTX->framemap = mpeg4_arg->FrameMap; } else { h263_arg = (SSBSIP_MFC_ENC_H263_PARAM*)param; if (h263_arg->codecType == H263_ENC) { pCTX->codecType = H263_ENC; pCTX->width = h263_arg->SourceWidth; pCTX->height = h263_arg->SourceHeight; pCTX->framemap = h263_arg->FrameMap; } else { h264_arg = (SSBSIP_MFC_ENC_H264_PARAM*)param; if (h264_arg->codecType == H264_ENC) { pCTX->codecType = H264_ENC; pCTX->width = h264_arg->SourceWidth; pCTX->height = h264_arg->SourceHeight; pCTX->framemap = h264_arg->FrameMap; } else { ALOGE("[%s] Undefined codec type \n",__func__); ret = MFC_RET_INVALID_PARAM; goto error_case1; } } } switch (pCTX->codecType) { case MPEG4_ENC: ext_ctrl_mpeg4[0].id = V4L2_CID_MPEG_VIDEO_MPEG4_PROFILE; ext_ctrl_mpeg4[0].value = mpeg4_arg->ProfileIDC; ext_ctrl_mpeg4[1].id = V4L2_CID_MPEG_VIDEO_MPEG4_LEVEL; ext_ctrl_mpeg4[1].value = mpeg4_arg->LevelIDC; ext_ctrl_mpeg4[2].id = V4L2_CID_MPEG_VIDEO_GOP_SIZE; ext_ctrl_mpeg4[2].value = mpeg4_arg->IDRPeriod; ext_ctrl_mpeg4[3].id = V4L2_CID_MPEG_VIDEO_MPEG4_QPEL; ext_ctrl_mpeg4[3].value = mpeg4_arg->DisableQpelME; ext_ctrl_mpeg4[4].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE; ext_ctrl_mpeg4[4].value = mpeg4_arg->SliceMode; /* 0: one, 1: fixed #mb, 3: fixed #bytes */ if (mpeg4_arg->SliceMode == 0) { ext_ctrl_mpeg4[5].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; ext_ctrl_mpeg4[5].value = 1; /* default */ ext_ctrl_mpeg4[6].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; ext_ctrl_mpeg4[6].value = 1900; /* default */ } else if (mpeg4_arg->SliceMode == 1) { ext_ctrl_mpeg4[5].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; ext_ctrl_mpeg4[5].value = mpeg4_arg->SliceArgument; ext_ctrl_mpeg4[6].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; ext_ctrl_mpeg4[6].value = 1900; /* default */ } else if (mpeg4_arg->SliceMode == 3) { ext_ctrl_mpeg4[5].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; ext_ctrl_mpeg4[5].value = 1; /* default */ ext_ctrl_mpeg4[6].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; ext_ctrl_mpeg4[6].value = mpeg4_arg->SliceArgument; } /* It should be set using mpeg4_arg->NumberBFrames after being handled by appl. */ ext_ctrl_mpeg4[7].id = V4L2_CID_MPEG_VIDEO_B_FRAMES; ext_ctrl_mpeg4[7].value = mpeg4_arg->NumberBFrames; ext_ctrl_mpeg4[8].id = V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB; ext_ctrl_mpeg4[8].value = mpeg4_arg->RandomIntraMBRefresh; ext_ctrl_mpeg4[9].id = V4L2_CID_MPEG_MFC51_VIDEO_PADDING; ext_ctrl_mpeg4[9].value = mpeg4_arg->PadControlOn; /* TODO: Verify the padding values assignment */ pad_value |= mpeg4_arg->CrPadVal; pad_value |= mpeg4_arg->CbPadVal << 8; pad_value |= mpeg4_arg->LumaPadVal << 16; ext_ctrl_mpeg4[10].id = V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV; ext_ctrl_mpeg4[10].value = pad_value; ext_ctrl_mpeg4[11].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE; ext_ctrl_mpeg4[11].value = mpeg4_arg->EnableFRMRateControl; ext_ctrl_mpeg4[12].id = V4L2_CID_MPEG_VIDEO_BITRATE; ext_ctrl_mpeg4[12].value = mpeg4_arg->Bitrate; ext_ctrl_mpeg4[13].id = V4L2_CID_MPEG_VIDEO_MPEG4_I_FRAME_QP; ext_ctrl_mpeg4[13].value = mpeg4_arg->FrameQp; ext_ctrl_mpeg4[14].id = V4L2_CID_MPEG_VIDEO_MPEG4_P_FRAME_QP; ext_ctrl_mpeg4[14].value = mpeg4_arg->FrameQp_P; ext_ctrl_mpeg4[15].id = V4L2_CID_MPEG_VIDEO_MPEG4_B_FRAME_QP; ext_ctrl_mpeg4[15].value = mpeg4_arg->FrameQp_B; ext_ctrl_mpeg4[16].id = V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP; ext_ctrl_mpeg4[16].value = mpeg4_arg->QSCodeMax; ext_ctrl_mpeg4[17].id = V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP; ext_ctrl_mpeg4[17].value = mpeg4_arg->QSCodeMin; ext_ctrl_mpeg4[18].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; ext_ctrl_mpeg4[18].value = mpeg4_arg->CBRPeriodRf; if (V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT == pCTX->enc_frameskip) { ext_ctrl_mpeg4[19].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; ext_ctrl_mpeg4[19].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT; } else if(V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT == pCTX->enc_frameskip) { ext_ctrl_mpeg4[19].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; ext_ctrl_mpeg4[19].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT; } else { /* ENC_FRAME_SKIP_MODE_DISABLE (default) */ ext_ctrl_mpeg4[19].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; ext_ctrl_mpeg4[19].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED; } ext_ctrl_mpeg4[20].id = V4L2_CID_MPEG_VIDEO_VBV_SIZE; ext_ctrl_mpeg4[20].value = 0; ext_ctrl_mpeg4[21].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; ext_ctrl_mpeg4[21].value = 0; ext_ctrl_mpeg4[22].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT; ext_ctrl_mpeg4[22].value = 1; break; case H263_ENC: ext_ctrl_h263[0].id = V4L2_CID_MPEG_VIDEO_GOP_SIZE; ext_ctrl_h263[0].value = h263_arg->IDRPeriod; ext_ctrl_h263[1].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE; ext_ctrl_h263[1].value = h263_arg->SliceMode; /* 0: one, Check is needed if h264 support multi-slice */ ext_ctrl_h263[2].id = V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB; ext_ctrl_h263[2].value = h263_arg->RandomIntraMBRefresh; ext_ctrl_h263[3].id = V4L2_CID_MPEG_MFC51_VIDEO_PADDING; ext_ctrl_h263[3].value = h263_arg->PadControlOn; /* TODO: Verify the padding values assignment */ pad_value |= h263_arg->CrPadVal; pad_value |= h263_arg->CbPadVal << 8; pad_value |= h263_arg->LumaPadVal << 16; ext_ctrl_h263[4].id = V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV; ext_ctrl_h263[4].value = pad_value; ext_ctrl_h263[5].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE; ext_ctrl_h263[5].value = h263_arg->EnableFRMRateControl; ext_ctrl_h263[6].id = V4L2_CID_MPEG_VIDEO_BITRATE; ext_ctrl_h263[6].value = h263_arg->Bitrate; ext_ctrl_h263[7].id = V4L2_CID_MPEG_VIDEO_H263_I_FRAME_QP; ext_ctrl_h263[7].value = h263_arg->FrameQp; ext_ctrl_h263[8].id = V4L2_CID_MPEG_VIDEO_H263_P_FRAME_QP; ext_ctrl_h263[8].value = h263_arg->FrameQp_P; ext_ctrl_h263[9].id = V4L2_CID_MPEG_VIDEO_H263_MAX_QP; ext_ctrl_h263[9].value = h263_arg->QSCodeMax; ext_ctrl_h263[10].id = V4L2_CID_MPEG_VIDEO_H263_MIN_QP; ext_ctrl_h263[10].value = h263_arg->QSCodeMin; ext_ctrl_h263[11].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; ext_ctrl_h263[11].value = h263_arg->CBRPeriodRf; if (V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT == pCTX->enc_frameskip) { ext_ctrl_h263[12].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; ext_ctrl_h263[12].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT; } else if(V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT == pCTX->enc_frameskip) { ext_ctrl_h263[12].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; ext_ctrl_h263[12].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT; } else { /* ENC_FRAME_SKIP_MODE_DISABLE (default) */ ext_ctrl_h263[12].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; ext_ctrl_h263[12].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED; } ext_ctrl_h263[13].id = V4L2_CID_MPEG_VIDEO_VBV_SIZE; ext_ctrl_h263[13].value = 0; ext_ctrl_h263[14].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; ext_ctrl_h263[14].value = 0; ext_ctrl_h263[15].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT; ext_ctrl_h263[15].value = 1; ext_ctrl_h263[16].id = V4L2_CID_MPEG_VIDEO_B_FRAMES; ext_ctrl_h263[16].value = 2; break; case H264_ENC: ext_ctrl[0].id = V4L2_CID_MPEG_VIDEO_H264_PROFILE; ext_ctrl[0].value = h264_arg->ProfileIDC; ext_ctrl[1].id = V4L2_CID_MPEG_VIDEO_H264_LEVEL; ext_ctrl[1].value = h264_arg->LevelIDC; ext_ctrl[2].id = V4L2_CID_MPEG_VIDEO_GOP_SIZE; ext_ctrl[2].value = h264_arg->IDRPeriod; ext_ctrl[3].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_NUM_REF_PIC_FOR_P; ext_ctrl[3].value = h264_arg->NumberRefForPframes; ext_ctrl[4].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE; ext_ctrl[4].value = h264_arg->SliceMode; /* 0: one, 1: fixed #mb, 3: fixed #bytes */ if (h264_arg->SliceMode == 0) { ext_ctrl[5].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; ext_ctrl[5].value = 1; /* default */ ext_ctrl[6].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; ext_ctrl[6].value = 1900; /* default */ } else if (h264_arg->SliceMode == 1) { ext_ctrl[5].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; ext_ctrl[5].value = h264_arg->SliceArgument; ext_ctrl[6].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; ext_ctrl[6].value = 1900; /* default */ } else if (h264_arg->SliceMode == 3) { ext_ctrl[5].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; ext_ctrl[5].value = 1; /* default */ ext_ctrl[6].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; ext_ctrl[6].value = h264_arg->SliceArgument; } /* It should be set using h264_arg->NumberBFrames after being handled by appl. */ ext_ctrl[7].id = V4L2_CID_MPEG_VIDEO_B_FRAMES; ext_ctrl[7].value = h264_arg->NumberBFrames; ext_ctrl[8].id = V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_MODE; ext_ctrl[8].value = h264_arg->LoopFilterDisable; ext_ctrl[9].id = V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_ALPHA; ext_ctrl[9].value = h264_arg->LoopFilterAlphaC0Offset; ext_ctrl[10].id = V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_BETA; ext_ctrl[10].value = h264_arg->LoopFilterBetaOffset; ext_ctrl[11].id = V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE; ext_ctrl[11].value = h264_arg->SymbolMode; ext_ctrl[12].id = V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM; ext_ctrl[12].value = h264_arg->Transform8x8Mode; ext_ctrl[13].id = V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB; ext_ctrl[13].value = h264_arg->RandomIntraMBRefresh; ext_ctrl[14].id = V4L2_CID_MPEG_MFC51_VIDEO_PADDING; ext_ctrl[14].value = h264_arg->PadControlOn; pad_value |= h264_arg->CrPadVal; pad_value |= h264_arg->CbPadVal << 8; pad_value |= h264_arg->LumaPadVal << 16; ext_ctrl[15].id = V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV; ext_ctrl[15].value = pad_value; ext_ctrl[16].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE; ext_ctrl[16].value = h264_arg->EnableFRMRateControl; ext_ctrl[17].id = V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE; ext_ctrl[17].value = h264_arg->EnableMBRateControl; ext_ctrl[18].id = V4L2_CID_MPEG_VIDEO_BITRATE; /* FIXME temporary fix */ if (h264_arg->Bitrate) ext_ctrl[18].value = h264_arg->Bitrate; else ext_ctrl[18].value = 1; /* just for testing Movi studio */ ext_ctrl[19].id = V4L2_CID_MPEG_VIDEO_H264_I_FRAME_QP; ext_ctrl[19].value = h264_arg->FrameQp; ext_ctrl[20].id = V4L2_CID_MPEG_VIDEO_H264_P_FRAME_QP; ext_ctrl[20].value = h264_arg->FrameQp_P; ext_ctrl[21].id = V4L2_CID_MPEG_VIDEO_H264_B_FRAME_QP; ext_ctrl[21].value = h264_arg->FrameQp_B; ext_ctrl[22].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP; ext_ctrl[22].value = h264_arg->QSCodeMax; ext_ctrl[23].id = V4L2_CID_MPEG_VIDEO_H264_MIN_QP; ext_ctrl[23].value = h264_arg->QSCodeMin; ext_ctrl[24].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; ext_ctrl[24].value = h264_arg->CBRPeriodRf; ext_ctrl[25].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_DARK; ext_ctrl[25].value = h264_arg->DarkDisable; ext_ctrl[26].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_SMOOTH; ext_ctrl[26].value = h264_arg->SmoothDisable; ext_ctrl[27].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_STATIC; ext_ctrl[27].value = h264_arg->StaticDisable; ext_ctrl[28].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_ACTIVITY; ext_ctrl[28].value = h264_arg->ActivityDisable; /* doesn't have to be set */ ext_ctrl[29].id = V4L2_CID_MPEG_VIDEO_GOP_CLOSURE; ext_ctrl[29].value = 0; ext_ctrl[30].id = V4L2_CID_MPEG_VIDEO_H264_I_PERIOD; ext_ctrl[30].value = 10; if (V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT == pCTX->enc_frameskip) { ext_ctrl[31].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; ext_ctrl[31].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT; } else if(V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT == pCTX->enc_frameskip) { ext_ctrl[31].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; ext_ctrl[31].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT; } else { /* ENC_FRAME_SKIP_MODE_DISABLE (default) */ ext_ctrl[31].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; ext_ctrl[31].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED; } ext_ctrl[32].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; ext_ctrl[32].value = 0; /* 0: seperated header 1: header + first frame */ ext_ctrl[33].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT; ext_ctrl[33].value = 1; ext_ctrl[34].id = V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE; ext_ctrl[34].value = 0; ext_ctrl[35].id = V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC; ext_ctrl[35].value = 0; ext_ctrl[36].id = V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_WIDTH; ext_ctrl[36].value = 0; ext_ctrl[37].id = V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_HEIGHT; ext_ctrl[37].value = 0; break; default: ALOGE("[%s] Undefined codec type",__func__); ret = MFC_RET_INVALID_PARAM; goto error_case1; } ext_ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG; if (pCTX->codecType == MPEG4_ENC) { ext_ctrls.count = 23; ext_ctrls.controls = ext_ctrl_mpeg4; } else if (pCTX->codecType == H264_ENC) { ext_ctrls.count = 38; ext_ctrls.controls = ext_ctrl; } else if (pCTX->codecType == H263_ENC) { ext_ctrls.count = 17; ext_ctrls.controls = ext_ctrl_h263; } ret = ioctl(pCTX->hMFC, VIDIOC_S_EXT_CTRLS, &ext_ctrls); if (ret != 0) { ALOGE("[%s] Failed to set extended controls",__func__); ret = MFC_RET_ENC_INIT_FAIL; goto error_case1; } memset(&fmt, 0, sizeof(fmt)); fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; fmt.fmt.pix_mp.width = pCTX->width; fmt.fmt.pix_mp.height = pCTX->height; fmt.fmt.pix_mp.num_planes = 2; fmt.fmt.pix_mp.plane_fmt[0].bytesperline = Align(fmt.fmt.pix_mp.width, 128); fmt.fmt.pix_mp.plane_fmt[1].bytesperline = Align(fmt.fmt.pix_mp.width, 128); if (NV12_TILE == pCTX->framemap) { fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12MT; /* 4:2:0, 2 Planes, 64x32 Tiles */ fmt.fmt.pix_mp.plane_fmt[0].sizeimage = Align(Align(fmt.fmt.pix_mp.width, 128) * Align(fmt.fmt.pix_mp.height, 32), 8192); /* tiled mode */ fmt.fmt.pix_mp.plane_fmt[1].sizeimage = Align(Align(fmt.fmt.pix_mp.width, 128) * Align(fmt.fmt.pix_mp.height >> 1, 32), 8192); /* tiled mode */ } else { /* NV12_LINEAR (default) */
bool IntelHWComposer::initialize() { bool ret = true; //TODO: replace the hard code buffer type later int bufferType = IntelBufferManager::TTM_BUFFER; ALOGD_IF(ALLOW_HWC_PRINT, "%s\n", __func__); if (hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t**)&mGrallocModule) != 0) { ALOGE("%s: failed to open IMG GRALLOC module\n", __func__); goto err; } //create new DRM object if not exists if (!mDrm) { mDrm = &IntelHWComposerDrm::getInstance(); if (!mDrm) { ALOGE("%s: Invalid DRM object\n", __func__); goto err; } ret = mDrm->initialize(this); if (ret == false) { ALOGE("%s: failed to initialize DRM instance\n", __func__); goto drm_err; } } //create Vsync Event Handler mVsync = new IntelVsyncEventHandler(this, mDrm->getDrmFd()); mFakeVsync = new IntelFakeVsyncEvent(this); //create new buffer manager and initialize it if (!mBufferManager) { //mBufferManager = new IntelTTMBufferManager(mDrm->getDrmFd()); mBufferManager = new IntelBCDBufferManager(mDrm->getDrmFd()); if (!mBufferManager) { ALOGE("%s: Failed to create buffer manager\n", __func__); goto drm_err; } // do initialization ret = mBufferManager->initialize(); if (ret == false) { ALOGE("%s: Failed to initialize buffer manager\n", __func__); goto bm_err; } } // create buffer manager for gralloc buffer if (!mGrallocBufferManager) { //mGrallocBufferManager = new IntelPVRBufferManager(mDrm->getDrmFd()); mGrallocBufferManager = new IntelGraphicBufferManager(mDrm->getDrmFd()); if (!mGrallocBufferManager) { ALOGE("%s: Failed to create Gralloc buffer manager\n", __func__); goto bm_err; } ret = mGrallocBufferManager->initialize(); if (ret == false) { ALOGE("%s: Failed to initialize Gralloc buffer manager\n", __func__); goto gralloc_bm_err; } } // create new display plane manager if (!mPlaneManager) { mPlaneManager = new IntelDisplayPlaneManager(mDrm->getDrmFd(), mBufferManager, mGrallocBufferManager); if (!mPlaneManager) { ALOGE("%s: Failed to create plane manager\n", __func__); goto gralloc_bm_err; } } // create display devices memset(mDisplayDevice, 0, sizeof(mDisplayDevice)); for (size_t i=0; i<DISPLAY_NUM; i++) { if (i == HWC_DISPLAY_PRIMARY) mDisplayDevice[i] = new IntelMIPIDisplayDevice(mBufferManager, mGrallocBufferManager, mPlaneManager, mDrm, &mExtendedModeInfo, i); #ifdef TARGET_HAS_MULTIPLE_DISPLAY else if (i == HWC_DISPLAY_EXTERNAL) mDisplayDevice[i] = new IntelHDMIDisplayDevice(mBufferManager, mGrallocBufferManager, mPlaneManager, mDrm, i); #endif #ifdef INTEL_WIDI else if (i == HWC_DISPLAY_VIRTUAL) mDisplayDevice[i] = new WidiDisplayDevice(mBufferManager, mGrallocBufferManager, mPlaneManager, mDrm, &mExtendedModeInfo, i); #endif else continue; if (!mDisplayDevice[i]) { ALOGE("%s: Failed to create plane manager\n", __func__); goto device_err; } } // init mHDMIBuffers memset(&mHDMIFBHandle, 0, sizeof(mHDMIFBHandle)); memset(&mExtendedModeInfo, 0, sizeof(mExtendedModeInfo)); // do mode setting in HWC if HDMI is connected when boot up if (mDrm->detectDisplayConnection(OUTPUT_HDMI)) handleHotplugEvent(1, NULL); char value[PROPERTY_VALUE_MAX]; property_get("hwcomposer.debug.dumpPost2", value, "0"); if (atoi(value)) mForceDumpPostBuffer = true; // startObserver(); mInitialized = true; ALOGD_IF(ALLOW_HWC_PRINT, "%s: successfully\n", __func__); return true; device_err: for (size_t i=0; i<DISPLAY_NUM; i++) { if (mDisplayDevice[i]) delete mDisplayDevice[i]; mDisplayDevice[i] = 0; } pm_err: if (mPlaneManager) delete mPlaneManager; mPlaneManager = 0; gralloc_bm_err: if (mGrallocBufferManager) delete mGrallocBufferManager; mGrallocBufferManager = 0; bm_err: if (mBufferManager) delete mBufferManager; mBufferManager = 0; drm_err: if (mDrm) delete mDrm; mDrm = 0; err: return false; }
bool IntelHWComposer::setFramecount(int cmd, int count, int x, int y) { struct drm_psb_register_rw_arg arg; struct psb_gtt_mapping_arg gttarg; uint32_t fb_size = 0; uint8_t* pDatabuff = NULL; uint8_t w = 128; uint8_t h = 128; int ret = 0; void *virtAddr; uint32_t size; PVR2D_ULONG uFlags = 0; PVR2DERROR err; switch(cmd){ case 0: if (mCursorBufferManager) { if (cursorDataBuffer) { ret = mCursorBufferManager->updateCursorReg(count, cursorDataBuffer, 0, 0, w, h, true); if (ret == false) { ALOGE("%s: Failed to update Cursor content\n", __func__); return false; } } } break; case 1: if (!mCursorBufferManager) { mCursorBufferManager = new IntelPVRBufferManager(mDrm->getDrmFd()); if (!mCursorBufferManager) { ALOGE("%s: Failed to create Cursor buffer manager\n", __func__); ret = false; goto gralloc_bm_err; } ret = mCursorBufferManager->initialize(); if (ret == false) { ALOGE("%s: Failed to initialize Cursor buffer manager\n", __func__); goto gralloc_bm_err; } cursorDataBuffer = mCursorBufferManager->curAlloc(w, h); if (!cursorDataBuffer) { ALOGE("%s: Failed to alloc Cursor buffer memory\n", __func__); ret = false; goto gralloc_bm_err; } ret = mCursorBufferManager->updateCursorReg(0, cursorDataBuffer, x, y, w, h, true); if (ret == false) { ALOGE("%s: Failed to update Cursor content\n", __func__); return false; } } break; case 2: if (mCursorBufferManager) { if (cursorDataBuffer) { ret = mCursorBufferManager->updateCursorReg(0, cursorDataBuffer, x, y, w, h, false); if (ret == false) { ALOGE("%s: Failed to update Cursor content\n", __func__); return false; } } mCursorBufferManager->curFree(cursorDataBuffer); mCursorBufferManager = 0; delete mCursorBufferManager; } break; } return ret; gralloc_bm_err: mCursorBufferManager = 0; delete mCursorBufferManager; return false; }
static status_t limitError(AString name, const char *msg) { ALOGE("limit '%s' %s", name.c_str(), msg); return -EINVAL; }
static status_t limitInvalidAttr(AString name, const char *attr, AString value) { ALOGE("limit '%s' with invalid '%s' attribute (%s)", name.c_str(), attr, value.c_str()); return -EINVAL; }
int ijkmp_get_msg(IjkMediaPlayer *mp, AVMessage *msg, int block) { assert(mp); while (1) { int continue_wait_next_msg = 0; int retval = msg_queue_get(&mp->ffplayer->msg_queue, msg, block); if (retval <= 0) return retval; switch (msg->what) { case FFP_MSG_PREPARED: MPTRACE("ijkmp_get_msg: FFP_MSG_PREPARED\n"); pthread_mutex_lock(&mp->mutex); if (mp->mp_state == MP_STATE_ASYNC_PREPARING) { ijkmp_change_state_l(mp, MP_STATE_PREPARED); } else { // FIXME: 1: onError() ? ALOGE("FFP_MSG_PREPARED: expecting mp_state==MP_STATE_ASYNC_PREPARING\n"); } pthread_mutex_unlock(&mp->mutex); break; case FFP_MSG_COMPLETED: MPTRACE("ijkmp_get_msg: FFP_MSG_COMPLETED\n"); pthread_mutex_lock(&mp->mutex); mp->restart_from_beginning = 1; ijkmp_change_state_l(mp, MP_STATE_COMPLETED); pthread_mutex_unlock(&mp->mutex); break; case FFP_MSG_SEEK_COMPLETE: MPTRACE("ijkmp_get_msg: FFP_MSG_SEEK_COMPLETE\n"); pthread_mutex_lock(&mp->mutex); mp->seek_req = 0; mp->seek_msec = 0; pthread_mutex_unlock(&mp->mutex); break; case FFP_REQ_START: MPTRACE("ijkmp_get_msg: FFP_REQ_START\n"); continue_wait_next_msg = 1; pthread_mutex_lock(&mp->mutex); if (0 == ikjmp_chkst_start_l(mp->mp_state)) { // FIXME: 8 check seekable if (mp->mp_state == MP_STATE_COMPLETED) { if (mp->restart_from_beginning) { ALOGD("ijkmp_get_msg: FFP_REQ_START: restart from beginning\n"); retval = ffp_start_from_l(mp->ffplayer, 0); if (retval == 0) ijkmp_change_state_l(mp, MP_STATE_STARTED); } else { ALOGD("ijkmp_get_msg: FFP_REQ_START: restart from seek pos\n"); retval = ffp_start_l(mp->ffplayer); if (retval == 0) ijkmp_change_state_l(mp, MP_STATE_STARTED); } mp->restart_from_beginning = 0; } else { ALOGD("ijkmp_get_msg: FFP_REQ_START: start on fly\n"); retval = ffp_start_l(mp->ffplayer); if (retval == 0) ijkmp_change_state_l(mp, MP_STATE_STARTED); } } pthread_mutex_unlock(&mp->mutex); break; case FFP_REQ_PAUSE: MPTRACE("ijkmp_get_msg: FFP_REQ_PAUSE\n"); continue_wait_next_msg = 1; pthread_mutex_lock(&mp->mutex); if (0 == ikjmp_chkst_pause_l(mp->mp_state)) { int pause_ret = ffp_pause_l(mp->ffplayer); if (pause_ret == 0) ijkmp_change_state_l(mp, MP_STATE_PAUSED); } pthread_mutex_unlock(&mp->mutex); break; case FFP_REQ_SEEK: MPTRACE("ijkmp_get_msg: FFP_REQ_SEEK\n"); continue_wait_next_msg = 1; pthread_mutex_lock(&mp->mutex); if (0 == ikjmp_chkst_seek_l(mp->mp_state)) { if (0 == ffp_seek_to_l(mp->ffplayer, msg->arg1)) { ALOGD("ijkmp_get_msg: FFP_REQ_SEEK: seek to %d\n", (int)msg->arg1); mp->restart_from_beginning = 0; } } pthread_mutex_unlock(&mp->mutex); break; } if (continue_wait_next_msg) continue; return retval; } return -1; }
static status_t limitFoundMissingAttr(AString name, const char *attr, bool found = true) { ALOGE("limit '%s' with %s'%s' attribute", name.c_str(), (found ? "" : "no "), attr); return -EINVAL; }
bool UrlAudioPlayer::prepare(const std::string &url, SLuint32 locatorType, std::shared_ptr<AssetFd> assetFd, int start, int length) { _url = url; _assetFd = assetFd; ALOGV("UrlAudioPlayer::prepare: %s, %d, %d, %d, %d", _url.c_str(), (int)locatorType, assetFd->getFd(), start, length); SLDataSource audioSrc; SLDataFormat_MIME formatMime = {SL_DATAFORMAT_MIME, nullptr, SL_CONTAINERTYPE_UNSPECIFIED}; audioSrc.pFormat = &formatMime; //Note: locFd & locUri should be outside of the following if/else block // Although locFd & locUri are only used inside if/else block, its lifecycle // will be destroyed right after '}' block. And since we pass a pointer to // 'audioSrc.pLocator=&locFd/&locUri', pLocator will point to an invalid address // while invoking Engine::createAudioPlayer interface. So be care of change the position // of these two variables. SLDataLocator_AndroidFD locFd; SLDataLocator_URI locUri; if (locatorType == SL_DATALOCATOR_ANDROIDFD) { locFd = {locatorType, _assetFd->getFd(), start, length}; audioSrc.pLocator = &locFd; } else if (locatorType == SL_DATALOCATOR_URI) { locUri = {locatorType, (SLchar *) _url.c_str()}; audioSrc.pLocator = &locUri; ALOGV("locUri: locatorType: %d", (int)locUri.locatorType); } else { ALOGE("Oops, invalid locatorType: %d", (int)locatorType); return false; } // configure audio sink SLDataLocator_OutputMix locOutmix = {SL_DATALOCATOR_OUTPUTMIX, _outputMixObj}; SLDataSink audioSnk = {&locOutmix, nullptr}; // create audio player const SLInterfaceID ids[3] = {SL_IID_SEEK, SL_IID_PREFETCHSTATUS, SL_IID_VOLUME}; const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; SLresult result = (*_engineItf)->CreateAudioPlayer(_engineItf, &_playObj, &audioSrc, &audioSnk, 3, ids, req); SL_RETURN_VAL_IF_FAILED(result, false, "CreateAudioPlayer failed"); // realize the player result = (*_playObj)->Realize(_playObj, SL_BOOLEAN_FALSE); SL_RETURN_VAL_IF_FAILED(result, false, "Realize failed"); // get the play interface result = (*_playObj)->GetInterface(_playObj, SL_IID_PLAY, &_playItf); SL_RETURN_VAL_IF_FAILED(result, false, "GetInterface SL_IID_PLAY failed"); // get the seek interface result = (*_playObj)->GetInterface(_playObj, SL_IID_SEEK, &_seekItf); SL_RETURN_VAL_IF_FAILED(result, false, "GetInterface SL_IID_SEEK failed"); // get the volume interface result = (*_playObj)->GetInterface(_playObj, SL_IID_VOLUME, &_volumeItf); SL_RETURN_VAL_IF_FAILED(result, false, "GetInterface SL_IID_VOLUME failed"); result = (*_playItf)->RegisterCallback(_playItf, SLUrlAudioPlayerCallbackProxy::playEventCallback, this); SL_RETURN_VAL_IF_FAILED(result, false, "RegisterCallback failed"); result = (*_playItf)->SetCallbackEventsMask(_playItf, SL_PLAYEVENT_HEADATEND); SL_RETURN_VAL_IF_FAILED(result, false, "SetCallbackEventsMask SL_PLAYEVENT_HEADATEND failed"); setState(State::INITIALIZED); setVolume(1.0f); return true; }
/* * Look up an entry. * * We probe on collisions, wrapping around the table. */ void* dvmHashTableLookup(HashTable* pHashTable, u4 itemHash, void* item, HashCompareFunc cmpFunc, bool doAdd) { HashEntry* pEntry; HashEntry* pEnd; void* result = NULL; assert(pHashTable->tableSize > 0); assert(item != HASH_TOMBSTONE); assert(item != NULL); /* jump to the first entry and probe for a match */ pEntry = &pHashTable->pEntries[itemHash & (pHashTable->tableSize-1)]; pEnd = &pHashTable->pEntries[pHashTable->tableSize]; while (pEntry->data != NULL) { if (pEntry->data != HASH_TOMBSTONE && pEntry->hashValue == itemHash && (*cmpFunc)(pEntry->data, item) == 0) { /* match */ //ALOGD("+++ match on entry %d", pEntry - pHashTable->pEntries); break; } pEntry++; if (pEntry == pEnd) { /* wrap around to start */ if (pHashTable->tableSize == 1) break; /* edge case - single-entry table */ pEntry = pHashTable->pEntries; } //ALOGI("+++ look probing %d...", pEntry - pHashTable->pEntries); } if (pEntry->data == NULL) { if (doAdd) { pEntry->hashValue = itemHash; pEntry->data = item; pHashTable->numEntries++; /* * We've added an entry. See if this brings us too close to full. */ if ((pHashTable->numEntries+pHashTable->numDeadEntries) * LOAD_DENOM > pHashTable->tableSize * LOAD_NUMER) { if (!resizeHash(pHashTable, pHashTable->tableSize * 2)) { /* don't really have a way to indicate failure */ ALOGE("Dalvik hash resize failure"); dvmAbort(); } /* note "pEntry" is now invalid */ } else { //ALOGW("okay %d/%d/%d", // pHashTable->numEntries, pHashTable->tableSize, // (pHashTable->tableSize * LOAD_NUMER) / LOAD_DENOM); } /* full table is bad -- search for nonexistent never halts */ assert(pHashTable->numEntries < pHashTable->tableSize); result = item; } else { assert(result == NULL); } } else { result = pEntry->data; } return result; }
void TimestretchBufferProvider::processFrames(void *dstBuffer, size_t *dstFrames, const void *srcBuffer, size_t *srcFrames) { ALOGV("processFrames(%zu %zu) remaining(%zu)", *dstFrames, *srcFrames, mRemaining); // Note dstFrames is the required number of frames. // Ensure consumption from src is as expected. //TODO: add logic to track "very accurate" consumption related to speed, original sampling //rate, actual frames processed. const size_t targetSrc = *dstFrames * mPlaybackRate.mSpeed; if (*srcFrames < targetSrc) { // limit dst frames to that possible *dstFrames = *srcFrames / mPlaybackRate.mSpeed; } else if (*srcFrames > targetSrc + 1) { *srcFrames = targetSrc + 1; } if (!mAudioPlaybackRateValid) { //fallback mode if (*dstFrames > 0) { switch(mPlaybackRate.mFallbackMode) { case AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT: if (*dstFrames <= *srcFrames) { size_t copySize = mFrameSize * *dstFrames; memcpy(dstBuffer, srcBuffer, copySize); } else { // cyclically repeat the source. for (size_t count = 0; count < *dstFrames; count += *srcFrames) { size_t remaining = min(*srcFrames, *dstFrames - count); memcpy((uint8_t*)dstBuffer + mFrameSize * count, srcBuffer, mFrameSize * remaining); } } break; case AUDIO_TIMESTRETCH_FALLBACK_DEFAULT: case AUDIO_TIMESTRETCH_FALLBACK_MUTE: memset(dstBuffer,0, mFrameSize * *dstFrames); break; case AUDIO_TIMESTRETCH_FALLBACK_FAIL: default: if(!mFallbackFailErrorShown) { ALOGE("invalid parameters in TimestretchBufferProvider fallbackMode:%d", mPlaybackRate.mFallbackMode); mFallbackFailErrorShown = true; } break; } } } else { switch (mFormat) { case AUDIO_FORMAT_PCM_FLOAT: if (sonicWriteFloatToStream(mSonicStream, (float*)srcBuffer, *srcFrames) != 1) { ALOGE("sonicWriteFloatToStream cannot realloc"); *srcFrames = 0; // cannot consume all of srcBuffer } *dstFrames = sonicReadFloatFromStream(mSonicStream, (float*)dstBuffer, *dstFrames); break; case AUDIO_FORMAT_PCM_16_BIT: if (sonicWriteShortToStream(mSonicStream, (short*)srcBuffer, *srcFrames) != 1) { ALOGE("sonicWriteShortToStream cannot realloc"); *srcFrames = 0; // cannot consume all of srcBuffer } *dstFrames = sonicReadShortFromStream(mSonicStream, (short*)dstBuffer, *dstFrames); break; default: // could also be caught on construction LOG_ALWAYS_FATAL("invalid format %#x for TimestretchBufferProvider", mFormat); } } }
/* Camera Preview Hint */ static void process_cam_preview_hint(void *metadata) { char governor[80]; struct cam_preview_metadata_t cam_preview_metadata; ALOGI("Got process_video_encode_hint"); if (get_scaling_governor_check_cores(governor, sizeof(governor),CPU0) == -1) { if (get_scaling_governor_check_cores(governor, sizeof(governor),CPU1) == -1) { if (get_scaling_governor_check_cores(governor, sizeof(governor),CPU2) == -1) { if (get_scaling_governor_check_cores(governor, sizeof(governor),CPU3) == -1) { ALOGE("Can't obtain scaling governor."); return HINT_HANDLED; } } } } /* Initialize cam preveiw metadata struct fields. */ memset(&cam_preview_metadata, 0, sizeof(struct cam_preview_metadata_t)); cam_preview_metadata.state = -1; cam_preview_metadata.hint_id = CAM_PREVIEW_HINT_ID; if (metadata) { if (parse_cam_preview_metadata((char *)metadata, &cam_preview_metadata) == -1) { ALOGE("Error occurred while parsing metadata."); return; } } else { return; } if (cam_preview_metadata.state == 1) { if ((strncmp(governor, INTERACTIVE_GOVERNOR, strlen(INTERACTIVE_GOVERNOR)) == 0) && (strlen(governor) == strlen(INTERACTIVE_GOVERNOR))) { /* Sched_load and migration_notification disable * timer rate - 40mS*/ int resource_values[] = {0x41430000, 0x1, 0x41434000, 0x1, 0x41424000, 0x28, }; if (!cam_preview_hint_sent) { perform_hint_action(cam_preview_metadata.hint_id, resource_values, sizeof(resource_values)/sizeof(resource_values[0])); cam_preview_hint_sent = 1; } } } else if (cam_preview_metadata.state == 0) { if ((strncmp(governor, INTERACTIVE_GOVERNOR, strlen(INTERACTIVE_GOVERNOR)) == 0) && (strlen(governor) == strlen(INTERACTIVE_GOVERNOR))) { undo_hint_action(cam_preview_metadata.hint_id); cam_preview_hint_sent = 0; return ; } } return; }
status_t MediaCodecList::addLimit(const char **attrs) { sp<AMessage> msg = new AMessage(); size_t i = 0; while (attrs[i] != NULL) { if (attrs[i + 1] == NULL) { return -EINVAL; } // attributes with values if (!strcmp(attrs[i], "name") || !strcmp(attrs[i], "default") || !strcmp(attrs[i], "in") || !strcmp(attrs[i], "max") || !strcmp(attrs[i], "min") || !strcmp(attrs[i], "range") || !strcmp(attrs[i], "ranges") || !strcmp(attrs[i], "scale") || !strcmp(attrs[i], "value")) { msg->setString(attrs[i], attrs[i + 1]); ++i; } else { return -EINVAL; } ++i; } AString name; if (!msg->findString("name", &name)) { ALOGE("limit with no 'name' attribute"); return -EINVAL; } // size, blocks, bitrate, frame-rate, blocks-per-second, aspect-ratio: range // quality: range + default + [scale] // complexity: range + default bool found; if (name == "aspect-ratio" || name == "bitrate" || name == "block-count" || name == "blocks-per-second" || name == "complexity" || name == "frame-rate" || name == "quality" || name == "size") { AString min, max; if (msg->findString("min", &min) && msg->findString("max", &max)) { min.append("-"); min.append(max); if (msg->contains("range") || msg->contains("value")) { return limitError(name, "has 'min' and 'max' as well as 'range' or " "'value' attributes"); } msg->setString("range", min); } else if (msg->contains("min") || msg->contains("max")) { return limitError(name, "has only 'min' or 'max' attribute"); } else if (msg->findString("value", &max)) { min = max; min.append("-"); min.append(max); if (msg->contains("range")) { return limitError(name, "has both 'range' and 'value' attributes"); } msg->setString("range", min); } AString range, scale = "linear", def, in_; if (!msg->findString("range", &range)) { return limitError(name, "with no 'range', 'value' or 'min'/'max' attributes"); } if ((name == "quality" || name == "complexity") ^ (found = msg->findString("default", &def))) { return limitFoundMissingAttr(name, "default", found); } if (name != "quality" && msg->findString("scale", &scale)) { return limitFoundMissingAttr(name, "scale"); } if ((name == "aspect-ratio") ^ (found = msg->findString("in", &in_))) { return limitFoundMissingAttr(name, "in", found); } if (name == "aspect-ratio") { if (!(in_ == "pixels") && !(in_ == "blocks")) { return limitInvalidAttr(name, "in", in_); } in_.erase(5, 1); // (pixel|block)-aspect-ratio in_.append("-"); in_.append(name); name = in_; } if (name == "quality") { mCurrentInfo->addDetail("quality-scale", scale); } if (name == "quality" || name == "complexity") { AString tag = name; tag.append("-default"); mCurrentInfo->addDetail(tag, def); } AString tag = name; tag.append("-range"); mCurrentInfo->addDetail(tag, range); } else { AString max, value, ranges; if (msg->contains("default")) { return limitFoundMissingAttr(name, "default"); } else if (msg->contains("in")) { return limitFoundMissingAttr(name, "in"); } else if ((name == "channel-count") ^ (found = msg->findString("max", &max))) { return limitFoundMissingAttr(name, "max", found); } else if (msg->contains("min")) { return limitFoundMissingAttr(name, "min"); } else if (msg->contains("range")) { return limitFoundMissingAttr(name, "range"); } else if ((name == "sample-rate") ^ (found = msg->findString("ranges", &ranges))) { return limitFoundMissingAttr(name, "ranges", found); } else if (msg->contains("scale")) { return limitFoundMissingAttr(name, "scale"); } else if ((name == "alignment" || name == "block-size") ^ (found = msg->findString("value", &value))) { return limitFoundMissingAttr(name, "value", found); } if (max.size()) { AString tag = "max-"; tag.append(name); mCurrentInfo->addDetail(tag, max); } else if (value.size()) { mCurrentInfo->addDetail(name, value); } else if (ranges.size()) { AString tag = name; tag.append("-ranges"); mCurrentInfo->addDetail(tag, ranges); } else { ALOGW("Ignoring unrecognized limit '%s'", name.c_str()); } } return OK; }
bool SFWatchDog::threadLoop() { XLOGV("[%s]", __func__); { Mutex::Autolock _l(mScreenLock); } nsecs_t stopTime = 1; if (isSFThreadHang(&stopTime)) { char cmds[256]; static uint32_t rtt_ct = SW_WATCHDOG_RTTCOUNT; if (rtt_ct > 0) { rtt_ct --; } else { XLOGD("[SF-WD] swap rtt dump file"); // swap rtt dump file snprintf(cmds, sizeof(cmds), "mv %s.txt %s_1.txt", RTT_DUMP, RTT_DUMP); system(cmds); rtt_ct = SW_WATCHDOG_RTTCOUNT; } // append SurfaceFlinger rtt information to rtt file char filename[100]; snprintf(filename, sizeof(filename), "%s.txt", RTT_DUMP); int fd = open(filename, O_CREAT | O_WRONLY | O_NOFOLLOW, 0666); /* -rw-rw-rw- */ if (fd < 0) { ALOGE("Can't open %s: %s\n", filename, strerror(errno)); return true; } if (lseek(fd, 0, SEEK_END) < 0) { fprintf(stderr, "lseek: %s\n", strerror(errno)); } else { dump_backtrace_to_file(getpid(), fd); } close(fd); XLOGD("[SF-WD] dump rtt file: %s.txt", RTT_DUMP); XLOGW("[SF-WD] ============================================"); } else { stopTime = 1; } getProperty(); char value[PROPERTY_VALUE_MAX]; snprintf(value, sizeof(value), "%" PRId64 " ", stopTime); if (stopTime < 0 || stopTime >= 2147483247) { volatile nsecs_t tmpStopTime = stopTime; XLOGD("[SF-WD] tmpStopTime=(%" PRId64 ", %" PRId64 ")", tmpStopTime, stopTime); abort(); } uint32_t ret = property_set("service.sf.status", value); if (mUpdateCount) { if (mShowLog) XLOGV("[SF-WD] mUpdateCount: %d", mUpdateCount); #if 0 aee_ioctl_wdt_kick(WDT_SETBY_SF); #endif mUpdateCount = 0; } //else { // XLOGV("[SF-WD] mUpdateCount not update!!!!!: %d", mUpdateCount); // aee_ioctl_wdt_kick(WDT_SETBY_SF_NEED_NOT_UPDATE); //} usleep(mTimer * 1000); return true; }
void SoftMPEG4::onQueueFilled(OMX_U32 /* portIndex */) { if (mSignalledError || mOutputPortSettingsChange != NONE) { return; } List<BufferInfo *> &inQueue = getPortQueue(0); List<BufferInfo *> &outQueue = getPortQueue(1); while (!inQueue.empty() && outQueue.size() == kNumOutputBuffers) { BufferInfo *inInfo = *inQueue.begin(); OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; if (inHeader == NULL) { inQueue.erase(inQueue.begin()); inInfo->mOwnedByUs = false; continue; } PortInfo *port = editPortInfo(1); OMX_BUFFERHEADERTYPE *outHeader = port->mBuffers.editItemAt(mNumSamplesOutput & 1).mHeader; if (inHeader->nFilledLen == 0) { inQueue.erase(inQueue.begin()); inInfo->mOwnedByUs = false; notifyEmptyBufferDone(inHeader); ++mInputBufferCount; if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { outHeader->nFilledLen = 0; outHeader->nFlags = OMX_BUFFERFLAG_EOS; List<BufferInfo *>::iterator it = outQueue.begin(); while ((*it)->mHeader != outHeader) { ++it; } BufferInfo *outInfo = *it; outInfo->mOwnedByUs = false; outQueue.erase(it); outInfo = NULL; notifyFillBufferDone(outHeader); outHeader = NULL; } return; } uint8_t *bitstream = inHeader->pBuffer + inHeader->nOffset; uint32_t *start_code = (uint32_t *)bitstream; bool volHeader = *start_code == 0xB0010000; if (volHeader) { PVCleanUpVideoDecoder(mHandle); mInitialized = false; } if (!mInitialized) { uint8_t *vol_data[1]; int32_t vol_size = 0; vol_data[0] = NULL; if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) || volHeader) { vol_data[0] = bitstream; vol_size = inHeader->nFilledLen; } MP4DecodingMode mode = (mMode == MODE_MPEG4) ? MPEG4_MODE : H263_MODE; Bool success = PVInitVideoDecoder( mHandle, vol_data, &vol_size, 1, outputBufferWidth(), outputBufferHeight(), mode); if (!success) { ALOGW("PVInitVideoDecoder failed. Unsupported content?"); notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); mSignalledError = true; return; } MP4DecodingMode actualMode = PVGetDecBitstreamMode(mHandle); if (mode != actualMode) { notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); mSignalledError = true; return; } PVSetPostProcType((VideoDecControls *) mHandle, 0); bool hasFrameData = false; if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) { inInfo->mOwnedByUs = false; inQueue.erase(inQueue.begin()); inInfo = NULL; notifyEmptyBufferDone(inHeader); inHeader = NULL; } else if (volHeader) { hasFrameData = true; } mInitialized = true; if (mode == MPEG4_MODE && handlePortSettingsChange()) { return; } if (!hasFrameData) { continue; } } if (!mFramesConfigured) { PortInfo *port = editPortInfo(1); OMX_BUFFERHEADERTYPE *outHeader = port->mBuffers.editItemAt(1).mHeader; OMX_U32 yFrameSize = sizeof(uint8) * mHandle->size; if ((outHeader->nAllocLen < yFrameSize) || (outHeader->nAllocLen - yFrameSize < yFrameSize / 2)) { ALOGE("Too small output buffer for reference frame: %lu bytes", (unsigned long)outHeader->nAllocLen); android_errorWriteLog(0x534e4554, "30033990"); notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); mSignalledError = true; return; } PVSetReferenceYUV(mHandle, outHeader->pBuffer); mFramesConfigured = true; } uint32_t useExtTimestamp = (inHeader->nOffset == 0); // decoder deals in ms (int32_t), OMX in us (int64_t) // so use fake timestamp instead uint32_t timestamp = 0xFFFFFFFF; if (useExtTimestamp) { mPvToOmxTimeMap.add(mPvTime, inHeader->nTimeStamp); timestamp = mPvTime; mPvTime++; } int32_t bufferSize = inHeader->nFilledLen; int32_t tmp = bufferSize; OMX_U32 frameSize; OMX_U64 yFrameSize = (OMX_U64)mWidth * (OMX_U64)mHeight; if (yFrameSize > ((OMX_U64)UINT32_MAX / 3) * 2) { ALOGE("Frame size too large"); notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); mSignalledError = true; return; } frameSize = (OMX_U32)(yFrameSize + (yFrameSize / 2)); if (outHeader->nAllocLen < frameSize) { android_errorWriteLog(0x534e4554, "27833616"); ALOGE("Insufficient output buffer size"); notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); mSignalledError = true; return; } // The PV decoder is lying to us, sometimes it'll claim to only have // consumed a subset of the buffer when it clearly consumed all of it. // ignore whatever it says... if (PVDecodeVideoFrame( mHandle, &bitstream, ×tamp, &tmp, &useExtTimestamp, outHeader->pBuffer) != PV_TRUE) { ALOGE("failed to decode video frame."); notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); mSignalledError = true; return; } // H263 doesn't have VOL header, the frame size information is in short header, i.e. the // decoder may detect size change after PVDecodeVideoFrame. if (handlePortSettingsChange()) { return; } // decoder deals in ms, OMX in us. outHeader->nTimeStamp = mPvToOmxTimeMap.valueFor(timestamp); mPvToOmxTimeMap.removeItem(timestamp); inHeader->nOffset += bufferSize; inHeader->nFilledLen = 0; if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { outHeader->nFlags = OMX_BUFFERFLAG_EOS; } else { outHeader->nFlags = 0; } if (inHeader->nFilledLen == 0) { inInfo->mOwnedByUs = false; inQueue.erase(inQueue.begin()); inInfo = NULL; notifyEmptyBufferDone(inHeader); inHeader = NULL; } ++mInputBufferCount; outHeader->nOffset = 0; outHeader->nFilledLen = frameSize; List<BufferInfo *>::iterator it = outQueue.begin(); while ((*it)->mHeader != outHeader) { ++it; } BufferInfo *outInfo = *it; outInfo->mOwnedByUs = false; outQueue.erase(it); outInfo = NULL; notifyFillBufferDone(outHeader); outHeader = NULL; ++mNumSamplesOutput; } }
bool IntelHWComposer::commitDisplays(size_t numDisplays, hwc_display_contents_1_t** displays) { if (!initCheck()) { ALOGE("%s: failed to initialize HWComposer\n", __func__); return false; } android::Mutex::Autolock _l(mLock); mPlaneManager->resetPlaneContexts(); size_t disp; buffer_handle_t bufferHandles[INTEL_DISPLAY_PLANE_NUM]; int acquireFenceFd[INTEL_DISPLAY_PLANE_NUM]; int* releaseFenceFd[INTEL_DISPLAY_PLANE_NUM]={0}; int i,j, numBuffers = 0; bool ret = true; for (disp = 0; disp < numDisplays && disp < DISPLAY_NUM; disp++) { hwc_display_contents_1_t *list = displays[disp]; if (list && mDisplayDevice[disp]) { for (int i = 0; i < list->numHwLayers; i++) { list->hwLayers[i].releaseFenceFd = -1; } mDisplayDevice[disp]->commit(list, bufferHandles, acquireFenceFd, releaseFenceFd, numBuffers); } } void *context = mPlaneManager->getPlaneContexts(); // commit plane contexts if (numBuffers) { ALOGD_IF(ALLOW_HWC_PRINT, "%s: commits %d buffers\n", __func__, numBuffers); int err = mGrallocModule->PostBuffers(mGrallocModule, bufferHandles, acquireFenceFd, releaseFenceFd, numBuffers, context, mPlaneManager->getContextLength()); if (err) { ALOGE("%s: Post2 failed with errno %d\n", __func__, err); ret = false; } } for (disp = 0; disp < numDisplays && disp < DISPLAY_NUM; disp++) { hwc_display_contents_1_t *list = displays[disp]; if (list) { for (i = 0; i < list->numHwLayers; i++) { if (list->hwLayers[i].releaseFenceFd == LAYER_SAME_RGB_BUFFER_SKIP_RELEASEFENCEFD){ for(j = 0; j < INTEL_DISPLAY_PLANE_NUM; j++){ if(!releaseFenceFd[j]) continue; if(*releaseFenceFd[j] >= 0){ //every layer relase fence fd dup from a same fd list->hwLayers[i].releaseFenceFd = dup(*releaseFenceFd[j]); break; } } } if (list->hwLayers[i].acquireFenceFd >= 0) close(list->hwLayers[i].acquireFenceFd); list->hwLayers[i].acquireFenceFd = -1; } if (list->outbufAcquireFenceFd != -1) { close(list->outbufAcquireFenceFd); list->outbufAcquireFenceFd = -1; } } } if ( ret == false || mForceDumpPostBuffer) { dumpPost2Buffers(numBuffers, bufferHandles); dumpLayerLists(numDisplays, displays); } return ret; }
static int read_request(int fd, debugger_request_t* out_request) { ucred cr; socklen_t len = sizeof(cr); int status = getsockopt(fd, SOL_SOCKET, SO_PEERCRED, &cr, &len); if (status != 0) { ALOGE("cannot get credentials\n"); return -1; } ALOGV("reading tid\n"); fcntl(fd, F_SETFL, O_NONBLOCK); pollfd pollfds[1]; pollfds[0].fd = fd; pollfds[0].events = POLLIN; pollfds[0].revents = 0; status = TEMP_FAILURE_RETRY(poll(pollfds, 1, 3000)); if (status != 1) { ALOGE("timed out reading tid (from pid=%d uid=%d)\n", cr.pid, cr.uid); return -1; } debugger_msg_t msg; memset(&msg, 0, sizeof(msg)); status = TEMP_FAILURE_RETRY(read(fd, &msg, sizeof(msg))); if (status < 0) { ALOGE("read failure? %s (pid=%d uid=%d)\n", strerror(errno), cr.pid, cr.uid); return -1; } if (status != sizeof(debugger_msg_t)) { ALOGE("invalid crash request of size %d (from pid=%d uid=%d)\n", status, cr.pid, cr.uid); return -1; } out_request->action = msg.action; out_request->tid = msg.tid; out_request->pid = cr.pid; out_request->uid = cr.uid; out_request->gid = cr.gid; out_request->abort_msg_address = msg.abort_msg_address; out_request->original_si_code = msg.original_si_code; if (msg.action == DEBUGGER_ACTION_CRASH) { // Ensure that the tid reported by the crashing process is valid. char buf[64]; struct stat s; snprintf(buf, sizeof buf, "/proc/%d/task/%d", out_request->pid, out_request->tid); if (stat(buf, &s)) { ALOGE("tid %d does not exist in pid %d. ignoring debug request\n", out_request->tid, out_request->pid); return -1; } } else if (cr.uid == 0 || (cr.uid == AID_SYSTEM && msg.action == DEBUGGER_ACTION_DUMP_BACKTRACE)) { // Only root or system can ask us to attach to any process and dump it explicitly. // However, system is only allowed to collect backtraces but cannot dump tombstones. status = get_process_info(out_request->tid, &out_request->pid, &out_request->uid, &out_request->gid); if (status < 0) { ALOGE("tid %d does not exist. ignoring explicit dump request\n", out_request->tid); return -1; } } else { // No one else is allowed to dump arbitrary processes. return -1; } return 0; }
static void message_loop_n(JNIEnv *env, AirStashMediaPlayer *mp) { jobject weak_thiz = (jobject) airstashmp_get_weak_thiz(mp); JNI_CHECK_GOTO(weak_thiz, env, NULL, "mpjni: message_loop_n: null weak_thiz", LABEL_RETURN); while (1) { AVMessage msg; int retval = airstashmp_get_msg(mp, &msg, 1); if (retval < 0) break; // block-get should never return 0 assert(retval > 0); switch (msg.what) { case FFP_MSG_FLUSH: MPTRACE("FFP_MSG_FLUSH:\n"); post_event(env, weak_thiz, MEDIA_NOP, 0, 0); break; case FFP_MSG_ERROR: MPTRACE("FFP_MSG_ERROR: %d\n", msg.arg1); post_event(env, weak_thiz, MEDIA_ERROR, MEDIA_ERROR_AIRSTASH_PLAYER, msg.arg1); break; case FFP_MSG_PREPARED: MPTRACE("FFP_MSG_PREPARED:\n"); post_event(env, weak_thiz, MEDIA_PREPARED, 0, 0); break; case FFP_MSG_COMPLETED: MPTRACE("FFP_MSG_COMPLETED:\n"); post_event(env, weak_thiz, MEDIA_PLAYBACK_COMPLETE, 0, 0); break; case FFP_MSG_VIDEO_SIZE_CHANGED: MPTRACE("FFP_MSG_VIDEO_SIZE_CHANGED: %d, %d\n", msg.arg1, msg.arg2); post_event(env, weak_thiz, MEDIA_SET_VIDEO_SIZE, msg.arg1, msg.arg2); break; case FFP_MSG_SAR_CHANGED: MPTRACE("FFP_MSG_SAR_CHANGED: %d, %d\n", msg.arg1, msg.arg2); post_event(env, weak_thiz, MEDIA_SET_VIDEO_SAR, msg.arg1, msg.arg2); break; case FFP_MSG_VIDEO_RENDERING_START: MPTRACE("FFP_MSG_VIDEO_RENDERING_START:\n"); post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_RENDERING_START, 0); break; case FFP_MSG_AUDIO_RENDERING_START: MPTRACE("FFP_MSG_AUDIO_RENDERING_START:\n"); post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_AUDIO_RENDERING_START, 0); break; case FFP_MSG_VIDEO_ROTATION_CHANGED: MPTRACE("FFP_MSG_VIDEO_ROTATION_CHANGED: %d\n", msg.arg1); post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_ROTATION_CHANGED, msg.arg1); break; case FFP_MSG_BUFFERING_START: MPTRACE("FFP_MSG_BUFFERING_START:\n"); post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0); break; case FFP_MSG_BUFFERING_END: MPTRACE("FFP_MSG_BUFFERING_END:\n"); post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_BUFFERING_END, 0); break; case FFP_MSG_BUFFERING_UPDATE: // MPTRACE("FFP_MSG_BUFFERING_UPDATE: %d, %d", msg.arg1, msg.arg2); post_event(env, weak_thiz, MEDIA_BUFFERING_UPDATE, msg.arg1, msg.arg2); break; case FFP_MSG_BUFFERING_BYTES_UPDATE: break; case FFP_MSG_BUFFERING_TIME_UPDATE: break; case FFP_MSG_SEEK_COMPLETE: MPTRACE("FFP_MSG_SEEK_COMPLETE:\n"); post_event(env, weak_thiz, MEDIA_SEEK_COMPLETE, 0, 0); break; case FFP_MSG_PLAYBACK_STATE_CHANGED: break; default: ALOGE("unknown FFP_MSG_xxx(%d)\n", msg.what); break; } } LABEL_RETURN: ; }
static void handle_request(int fd) { ALOGV("handle_request(%d)\n", fd); debugger_request_t request; memset(&request, 0, sizeof(request)); int status = read_request(fd, &request); if (!status) { ALOGV("BOOM: pid=%d uid=%d gid=%d tid=%d\n", request.pid, request.uid, request.gid, request.tid); // At this point, the thread that made the request is blocked in // a read() call. If the thread has crashed, then this gives us // time to PTRACE_ATTACH to it before it has a chance to really fault. // // The PTRACE_ATTACH sends a SIGSTOP to the target process, but it // won't necessarily have stopped by the time ptrace() returns. (We // currently assume it does.) We write to the file descriptor to // ensure that it can run as soon as we call PTRACE_CONT below. // See details in bionic/libc/linker/debugger.c, in function // debugger_signal_handler(). if (ptrace(PTRACE_ATTACH, request.tid, 0, 0)) { ALOGE("ptrace attach failed: %s\n", strerror(errno)); } else { bool detach_failed = false; bool attach_gdb = should_attach_gdb(&request); if (TEMP_FAILURE_RETRY(write(fd, "\0", 1)) != 1) { ALOGE("failed responding to client: %s\n", strerror(errno)); } else { char* tombstone_path = NULL; if (request.action == DEBUGGER_ACTION_CRASH) { close(fd); fd = -1; } int total_sleep_time_usec = 0; for (;;) { int signal = wait_for_signal(request.tid, &total_sleep_time_usec); if (signal < 0) { break; } switch (signal) { case SIGSTOP: if (request.action == DEBUGGER_ACTION_DUMP_TOMBSTONE) { ALOGV("stopped -- dumping to tombstone\n"); tombstone_path = engrave_tombstone(request.pid, request.tid, signal, request.original_si_code, request.abort_msg_address, true, &detach_failed, &total_sleep_time_usec); } else if (request.action == DEBUGGER_ACTION_DUMP_BACKTRACE) { ALOGV("stopped -- dumping to fd\n"); dump_backtrace(fd, -1, request.pid, request.tid, &detach_failed, &total_sleep_time_usec); } else { ALOGV("stopped -- continuing\n"); status = ptrace(PTRACE_CONT, request.tid, 0, 0); if (status) { ALOGE("ptrace continue failed: %s\n", strerror(errno)); } continue; // loop again } break; case SIGABRT: case SIGBUS: case SIGFPE: case SIGILL: case SIGPIPE: case SIGSEGV: #ifdef SIGSTKFLT case SIGSTKFLT: #endif case SIGTRAP: ALOGV("stopped -- fatal signal\n"); // Send a SIGSTOP to the process to make all of // the non-signaled threads stop moving. Without // this we get a lot of "ptrace detach failed: // No such process". kill(request.pid, SIGSTOP); // don't dump sibling threads when attaching to GDB because it // makes the process less reliable, apparently... tombstone_path = engrave_tombstone(request.pid, request.tid, signal, request.original_si_code, request.abort_msg_address, !attach_gdb, &detach_failed, &total_sleep_time_usec); break; default: ALOGE("process stopped due to unexpected signal %d\n", signal); break; } break; } if (request.action == DEBUGGER_ACTION_DUMP_TOMBSTONE) { if (tombstone_path) { write(fd, tombstone_path, strlen(tombstone_path)); } close(fd); fd = -1; } free(tombstone_path); } ALOGV("detaching\n"); if (attach_gdb) { // stop the process so we can debug kill(request.pid, SIGSTOP); // detach so we can attach gdbserver if (ptrace(PTRACE_DETACH, request.tid, 0, 0)) { ALOGE("ptrace detach from %d failed: %s\n", request.tid, strerror(errno)); detach_failed = true; } // if debug.db.uid is set, its value indicates if we should wait // for user action for the crashing process. // in this case, we log a message and turn the debug LED on // waiting for a gdb connection (for instance) wait_for_user_action(request); } else { // just detach if (ptrace(PTRACE_DETACH, request.tid, 0, 0)) { ALOGE("ptrace detach from %d failed: %s\n", request.tid, strerror(errno)); detach_failed = true; } } // resume stopped process (so it can crash in peace). kill(request.pid, SIGCONT); // If we didn't successfully detach, we're still the parent, and the // actual parent won't receive a death notification via wait(2). At this point // there's not much we can do about that. if (detach_failed) { ALOGE("debuggerd committing suicide to free the zombie!\n"); kill(getpid(), SIGKILL); } } } if (fd >= 0) { close(fd); } }
bool VideoOverlay::draw(hwc_context_t *ctx, hwc_layer_list_t *list) { if(!sIsModeOn || sYuvLayerIndex == -1) { return true; } private_handle_t *hnd = (private_handle_t *) list->hwLayers[sYuvLayerIndex].handle; private_handle_t *cchnd = NULL; if(sCCLayerIndex != -1) { cchnd = (private_handle_t *)list->hwLayers[sCCLayerIndex].handle; ctx->qbuf->lockAndAdd(cchnd); } // Lock this buffer for read. ctx->qbuf->lockAndAdd(hnd); bool ret = true; overlay::Overlay& ov = *(ctx->mOverlay); ovutils::eOverlayState state = ov.getState(); switch (state) { case ovutils::OV_2D_VIDEO_ON_PANEL_TV: // Play external if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE1)) { ALOGE("%s: queueBuffer failed for external", __FUNCTION__); ret = false; } //Play CC on external if (cchnd && !ov.queueBuffer(cchnd->fd, cchnd->offset, ovutils::OV_PIPE2)) { ALOGE("%s: queueBuffer failed for cc external", __FUNCTION__); ret = false; } // Play primary if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE0)) { ALOGE("%s: queueBuffer failed for primary", __FUNCTION__); ret = false; } break; case ovutils::OV_2D_VIDEO_ON_PANEL: // Play primary if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE0)) { ALOGE("%s: queueBuffer failed for primary", __FUNCTION__); ret = false; } break; case ovutils::OV_2D_VIDEO_ON_TV: // Play external if (!ov.queueBuffer(hnd->fd, hnd->offset, ovutils::OV_PIPE1)) { ALOGE("%s: queueBuffer failed for external", __FUNCTION__); ret = false; } //Play CC on external if (cchnd && !ov.queueBuffer(cchnd->fd, cchnd->offset, ovutils::OV_PIPE2)) { ALOGE("%s: queueBuffer failed for cc external", __FUNCTION__); ret = false; } break; default: ALOGE("%s Unused state %s", __FUNCTION__, ovutils::getStateString(state)); break; } return ret; }
static jint nativeWaitWakeup(JNIEnv *env, jobject clazz, jobject outBuf) { if (outBuf == NULL) { jniThrowException(env, "java/lang/NullPointerException", "null argument"); return -1; } // Register our wakeup callback if not yet done. if (!wakeup_init) { wakeup_init = true; ALOGV("Creating semaphore..."); int ret = sem_init(&wakeup_sem, 0, 0); if (ret < 0) { char buf[80]; strerror_r(errno, buf, sizeof(buf)); ALOGE("Error creating semaphore: %s\n", buf); jniThrowException(env, "java/lang/IllegalStateException", buf); return -1; } ALOGV("Registering callback..."); set_wakeup_callback(&wakeup_callback); } // Wait for wakeup. ALOGV("Waiting for wakeup..."); int ret = sem_wait(&wakeup_sem); if (ret < 0) { char buf[80]; strerror_r(errno, buf, sizeof(buf)); ALOGE("Error waiting on semaphore: %s\n", buf); // Return 0 here to let it continue looping but not return results. return 0; } FILE *fp = fopen(LAST_RESUME_REASON, "r"); if (fp == NULL) { ALOGE("Failed to open %s", LAST_RESUME_REASON); return -1; } char* mergedreason = (char*)env->GetDirectBufferAddress(outBuf); int remainreasonlen = (int)env->GetDirectBufferCapacity(outBuf); ALOGV("Reading wakeup reasons"); char* mergedreasonpos = mergedreason; char reasonline[128]; int i = 0; while (fgets(reasonline, sizeof(reasonline), fp) != NULL) { char* pos = reasonline; char* endPos; int len; // First field is the index or 'Abort'. int irq = (int)strtol(pos, &endPos, 10); if (pos != endPos) { // Write the irq number to the merged reason string. len = snprintf(mergedreasonpos, remainreasonlen, i == 0 ? "%d" : ":%d", irq); } else { // The first field is not an irq, it may be the word Abort. const size_t abortPrefixLen = strlen("Abort:"); if (strncmp(pos, "Abort:", abortPrefixLen) != 0) { // Ooops. ALOGE("Bad reason line: %s", reasonline); continue; } // Write 'Abort' to the merged reason string. len = snprintf(mergedreasonpos, remainreasonlen, i == 0 ? "Abort" : ":Abort"); endPos = pos + abortPrefixLen; } pos = endPos; if (len >= 0 && len < remainreasonlen) { mergedreasonpos += len; remainreasonlen -= len; } // Skip whitespace; rest of the buffer is the reason string. while (*pos == ' ') { pos++; } // Chop newline at end. char* endpos = pos; while (*endpos != 0) { if (*endpos == '\n') { *endpos = 0; break; } endpos++; } len = snprintf(mergedreasonpos, remainreasonlen, ":%s", pos); if (len >= 0 && len < remainreasonlen) { mergedreasonpos += len; remainreasonlen -= len; } i++; } ALOGV("Got %d reasons", i); if (i > 0) { *mergedreasonpos = 0; } if (fclose(fp) != 0) { ALOGE("Failed to close %s", LAST_RESUME_REASON); return -1; } return mergedreasonpos - mergedreason; }
void *SsbSipMfcEncOpen(void) { int hMFCOpen; _MFCLIB *pCTX; char mfc_dev_name[64]; int ret; struct v4l2_capability cap; getMFCName(mfc_dev_name, 64); ALOGI("[%s] dev name is %s\n",__func__,mfc_dev_name); if (access(mfc_dev_name, F_OK) != 0) { ALOGE("[%s] MFC device node not exists",__func__); return NULL; } hMFCOpen = open(mfc_dev_name, O_RDWR | O_NONBLOCK, 0); if (hMFCOpen < 0) { ALOGE("[%s] Failed to open MFC device",__func__); return NULL; } pCTX = (_MFCLIB *)malloc(sizeof(_MFCLIB)); if (pCTX == NULL) { ALOGE("[%s] malloc failed.",__func__); return NULL; } memset(pCTX, 0, sizeof(_MFCLIB)); pCTX->hMFC = hMFCOpen; memset(&cap, 0, sizeof(cap)); ret = ioctl(pCTX->hMFC, VIDIOC_QUERYCAP, &cap); if (ret != 0) { ALOGE("[%s] VIDIOC_QUERYCAP failed",__func__); close(pCTX->hMFC); free(pCTX); return NULL; } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)) { ALOGE("[%s] Device does not support capture",__func__); close(pCTX->hMFC); free(pCTX); return NULL; } if (!(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)) { ALOGE("[%s] Device does not support output",__func__); close(pCTX->hMFC); free(pCTX); return NULL; } if (!(cap.capabilities & V4L2_CAP_STREAMING)) { ALOGE("[%s] Device does not support streaming",__func__); close(pCTX->hMFC); free(pCTX); return NULL; } pCTX->v4l2_enc.bRunning = 0; /* physical address is used for Input source */ pCTX->v4l2_enc.bInputPhyVir = 1; pCTX->cacheablebuffer = NO_CACHE; return (void *)pCTX; }
status_t OMXNodeInstance::freeNode(OMXMaster *master) { static int32_t kMaxNumIterations = 10; // Transition the node from its current state all the way down // to "Loaded". // This ensures that all active buffers are properly freed even // for components that don't do this themselves on a call to // "FreeHandle". // The code below may trigger some more events to be dispatched // by the OMX component - we want to ignore them as our client // does not expect them. mDying = true; OMX_STATETYPE state; CHECK_EQ(OMX_GetState(mHandle, &state), OMX_ErrorNone); switch (state) { case OMX_StateExecuting: { ALOGV("forcing Executing->Idle"); sendCommand(OMX_CommandStateSet, OMX_StateIdle); OMX_ERRORTYPE err; int32_t iteration = 0; while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone && state != OMX_StateIdle && state != OMX_StateInvalid) { if (++iteration > kMaxNumIterations) { ALOGE("component failed to enter Idle state, aborting."); state = OMX_StateInvalid; break; } usleep(100000); } CHECK_EQ(err, OMX_ErrorNone); if (state == OMX_StateInvalid) { break; } // fall through } case OMX_StateIdle: { ALOGV("forcing Idle->Loaded"); sendCommand(OMX_CommandStateSet, OMX_StateLoaded); freeActiveBuffers(); OMX_ERRORTYPE err; int32_t iteration = 0; while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone && state != OMX_StateLoaded && state != OMX_StateInvalid) { if (++iteration > kMaxNumIterations) { ALOGE("component failed to enter Loaded state, aborting."); state = OMX_StateInvalid; break; } ALOGV("waiting for Loaded state..."); usleep(100000); } CHECK_EQ(err, OMX_ErrorNone); // fall through } case OMX_StateLoaded: case OMX_StateInvalid: break; default: CHECK(!"should not be here, unknown state."); break; } ALOGV("calling destroyComponentInstance"); OMX_ERRORTYPE err = master->destroyComponentInstance( static_cast<OMX_COMPONENTTYPE *>(mHandle)); ALOGV("destroyComponentInstance returned err %d", err); mHandle = NULL; if (err != OMX_ErrorNone) { ALOGE("FreeHandle FAILED with error 0x%08x.", err); } mOwner->invalidateNodeID(mNodeID); mNodeID = NULL; ALOGV("OMXNodeInstance going away."); delete this; return StatusFromOMXError(err); }
status_t SpeechPhoneCallController::OpenModemSpeechControlFlow(const audio_mode_t audio_mode) { Mutex::Autolock _l(mLock); ALOGD("+%s(), audio_mode = %d", __FUNCTION__, audio_mode); if (IsModeIncall(audio_mode) == false) { ALOGE("-%s() new_mode(%d) != MODE_IN_CALL / MODE_IN_CALL_2", __FUNCTION__, audio_mode); return INVALID_OPERATION; } // get speech driver instance mSpeechDriverFactory->SetActiveModemIndexByAudioMode(audio_mode); const modem_index_t modem_index = mSpeechDriverFactory->GetActiveModemIndex(); SpeechDriverInterface *pSpeechDriver = mSpeechDriverFactory->GetSpeechDriver(); // check BT device const bool bt_device_on = android_audio_legacy::AudioSystem::isBluetoothScoDevice((android_audio_legacy::AudioSystem::audio_devices)mAudioResourceManager->getDlOutputDevice()); #if 1 int sample_rate; if (bt_device_on == true) { if (mBTMode == 0) //NB BTSCO { sample_rate = 8000; } else { sample_rate = 16000; } } else { sample_rate = 16000; } ALOGD("+%s(), bt_device_on = %d, sample_rate = %d", __FUNCTION__, bt_device_on, sample_rate); #else const int sample_rate = (bt_device_on == true) ? 8000 : 16000; // TODO: MT6628 BT only use NB #endif // enable clock SetAfeAnalogClock(true); // set sampling rate mAudioAnalogInstance->SetFrequency(AudioAnalogType::DEVICE_OUT_DAC, sample_rate); mAudioAnalogInstance->SetFrequency(AudioAnalogType::DEVICE_IN_ADC, sample_rate); // set device if (CheckTtyNeedOn() == true) { SetTtyInOutDevice(GetRoutingForTty(), mTty_Ctm, audio_mode); } else { // Note: set output device in phone call will also assign input device mAudioResourceManager->setDlOutputDevice(mAudioResourceManager->getDlOutputDevice()); } // get device const audio_devices_t output_device = (audio_devices_t)mAudioResourceManager->getDlOutputDevice(); const audio_devices_t input_device = (audio_devices_t)mAudioResourceManager->getUlInputDevice(); ALOGD("%s(), output_device = 0x%x, input_device = 0x%x", __FUNCTION__, output_device, input_device); // Open ADC/DAC I2S, or DAIBT OpenModemSpeechDigitalPart(modem_index, output_device); // AFE_ON mAudioDigitalInstance->SetAfeEnable(true); // Clean Side Tone Filter gain pSpeechDriver->SetSidetoneGain(0); // Set PMIC digital/analog part - uplink has pop, open first mAudioResourceManager->StartInputDevice(); if (bt_device_on == false) { usleep(kDelayForUplinkPulseMs * 1000); } // PMIC HW pulse // set MODEM_PCM - open modem pcm here s.t. modem/DSP can learn the uplink background noise, but not zero SetModemPcmAttribute(modem_index, sample_rate); mAudioDigitalInstance->SetModemPcmEnable(modem_index, true); // Set MD side sampling rate pSpeechDriver->SetModemSideSamplingRate(sample_rate); // Set speech mode pSpeechDriver->SetSpeechMode(input_device, output_device); // Speech/VT on if (mVtNeedOn == true) { pSpeechDriver->VideoTelephonyOn(); // trun on P2W for Video Telephony bool wideband_on = false; // VT default use Narrow Band (8k), modem side will SRC to 16K pSpeechDriver->PCM2WayOn(wideband_on); } else { pSpeechDriver->SpeechOn(); // turn on TTY if (CheckTtyNeedOn() == true) { pSpeechDriver->TtyCtmOn(BAUDOT_MODE); } } // Set PMIC digital/analog part - DL need trim code. mAudioResourceManager->StartOutputDevice(); // start Side Tone Filter if (CheckSideToneFilterNeedOn(output_device) == true) { mAudioDigitalInstance->EnableSideToneFilter(true); } // check VM need open SpeechVMRecorder *pSpeechVMRecorder = SpeechVMRecorder::GetInstance(); if (pSpeechVMRecorder->GetVMRecordCapability() == true) { ALOGD("%s(), Open VM/EPL record", __FUNCTION__); pSpeechVMRecorder->Open(); } ALOGD("-%s(), audio_mode = %d", __FUNCTION__, audio_mode); return NO_ERROR; }
void OMXNodeInstance::onObserverDied(OMXMaster *master) { ALOGE("!!! Observer died. Quickly, do something, ... anything..."); // Try to force shutdown of the node and hope for the best. freeNode(master); }
static int JpegEncode(struct uvc_device *dev, const void* frame, void* pOutBuffer, unsigned int* pOutBufferSize) { V4L2BUF_t * pbuf = (V4L2BUF_t *)frame; int result = 0; int src_format = 0; unsigned int src_addr_phy = 0; int src_width = 0; int src_height = 0; char dataTime[64]; int nVbvBufferSize = 2*1024*1024; JpegEncInfo sjpegInfo; EXIFInfo exifInfo; VideoEncoder* pVideoEnc = NULL; VencInputBuffer inputBuffer; VencOutputBuffer outputBuffer; src_format = pbuf->format; src_addr_phy = pbuf->addrPhyY; src_width = pbuf->crop_rect.width; src_height = pbuf->crop_rect.height; memset(&sjpegInfo, 0, sizeof(JpegEncInfo)); memset(&exifInfo, 0, sizeof(EXIFInfo)); sjpegInfo.sBaseInfo.nInputWidth = src_width; sjpegInfo.sBaseInfo.nInputHeight = src_height; sjpegInfo.sBaseInfo.nDstWidth = dev->width; sjpegInfo.sBaseInfo.nDstHeight = dev->height; sjpegInfo.sBaseInfo.eInputFormat = (src_format == V4L2_PIX_FMT_NV21) ? VENC_PIXEL_YVU420SP: VENC_PIXEL_YUV420SP; sjpegInfo.quality = 80;//90; sjpegInfo.pAddrPhyY = (unsigned char*)src_addr_phy; sjpegInfo.pAddrPhyC = (unsigned char*)src_addr_phy + ALIGN_16B(src_width) * ALIGN_16B(src_height); sjpegInfo.bEnableCorp = 0; sjpegInfo.sCropInfo.nLeft = 0; sjpegInfo.sCropInfo.nTop = 0; sjpegInfo.sCropInfo.nWidth = src_width; sjpegInfo.sCropInfo.nHeight = src_height; //getCurrentDateTime(dataTime); //strcpy((char*)exifInfo.CameraMake, "MID MAKE"); //strcpy((char*)exifInfo.CameraModel, "MID MODEL"); //strcpy((char*)exifInfo.DateTime, dataTime); exifInfo.ThumbWidth = 320; exifInfo.ThumbHeight = 240; exifInfo.Orientation = 0; exifInfo.enableGpsInfo = 0; exifInfo.WhiteBalance = 0; pVideoEnc = VideoEncCreate(VENC_CODEC_JPEG); if (pVideoEnc == NULL) { ALOGE("<F:%s, L:%d> VideoEncCreate failed!", __FUNCTION__, __LINE__); return -1; } //VideoEncSetParameter(pVideoEnc, VENC_IndexParamJpegExifInfo, &exifInfo); VideoEncSetParameter(pVideoEnc, VENC_IndexParamJpegQuality, &sjpegInfo.quality); VideoEncSetParameter(pVideoEnc, VENC_IndexParamSetVbvSize, &nVbvBufferSize); if (VideoEncInit(pVideoEnc, &sjpegInfo.sBaseInfo)< 0) { ALOGE("VideoEncInit failed"); return -1; } memset(&inputBuffer, 0, sizeof(VencInputBuffer)); #ifdef UVC_DEBUG VideoEncDestroy(pVideoEnc); memcpy(pOutBuffer, dev->imgdata, dev->imgsize); *pOutBufferSize = dev->imgsize; return 0; #endif inputBuffer.pAddrPhyY = sjpegInfo.pAddrPhyY; inputBuffer.pAddrPhyC = sjpegInfo.pAddrPhyC; inputBuffer.bEnableCorp = sjpegInfo.bEnableCorp; inputBuffer.sCropInfo.nLeft = sjpegInfo.sCropInfo.nLeft; inputBuffer.sCropInfo.nTop = sjpegInfo.sCropInfo.nTop; inputBuffer.sCropInfo.nWidth = sjpegInfo.sCropInfo.nWidth; inputBuffer.sCropInfo.nHeight = sjpegInfo.sCropInfo.nHeight; AddOneInputBuffer(pVideoEnc, &inputBuffer); if (VideoEncodeOneFrame(pVideoEnc)!= 0) { ALOGE("(f:%s, l:%d) jpeg encoder error", __FUNCTION__, __LINE__); } AlreadyUsedInputBuffer(pVideoEnc,&inputBuffer); memset(&outputBuffer, 0, sizeof(VencOutputBuffer)); result = GetOneBitstreamFrame(pVideoEnc, &outputBuffer); if (result < 0) { ALOGE("GetOneBitstreamFrame return ret(%d)", result); goto Exit; } //ALOGV("pData0(%p), nSize0(%u), pData1(%p), nSize1(%u)", // outputBuffer.pData0, outputBuffer.nSize0, outputBuffer.pData1, outputBuffer.nSize1); if (outputBuffer.nSize0 + outputBuffer.nSize1 > dev->imgsize) { ALOGE("nSize0(%d) + nSize1(%d) > imagsize(%d)", outputBuffer.nSize0, outputBuffer.nSize1, dev->imgsize); result = -1; } else { memcpy(pOutBuffer, outputBuffer.pData0, outputBuffer.nSize0); if(outputBuffer.nSize1) { memcpy(((unsigned char*)pOutBuffer + outputBuffer.nSize0), outputBuffer.pData1, outputBuffer.nSize1); *pOutBufferSize = outputBuffer.nSize0 + outputBuffer.nSize1; } else { *pOutBufferSize = outputBuffer.nSize0; } } FreeOneBitStreamFrame(pVideoEnc, &outputBuffer); Exit: if (pVideoEnc) { VideoEncDestroy(pVideoEnc); } return result; }
void signalExceptionForError(JNIEnv* env, jobject obj, status_t err, bool canThrowRemoteException) { switch (err) { case UNKNOWN_ERROR: jniThrowException(env, "java/lang/RuntimeException", "Unknown error"); break; case NO_MEMORY: jniThrowException(env, "java/lang/OutOfMemoryError", NULL); break; case INVALID_OPERATION: jniThrowException(env, "java/lang/UnsupportedOperationException", NULL); break; case BAD_VALUE: jniThrowException(env, "java/lang/IllegalArgumentException", NULL); break; case BAD_INDEX: jniThrowException(env, "java/lang/IndexOutOfBoundsException", NULL); break; case BAD_TYPE: jniThrowException(env, "java/lang/IllegalArgumentException", NULL); break; case NAME_NOT_FOUND: jniThrowException(env, "java/util/NoSuchElementException", NULL); break; case PERMISSION_DENIED: jniThrowException(env, "java/lang/SecurityException", NULL); break; case NOT_ENOUGH_DATA: jniThrowException(env, "android/os/ParcelFormatException", "Not enough data"); break; case NO_INIT: jniThrowException(env, "java/lang/RuntimeException", "Not initialized"); break; case ALREADY_EXISTS: jniThrowException(env, "java/lang/RuntimeException", "Item already exists"); break; case DEAD_OBJECT: // DeadObjectException is a checked exception, only throw from certain methods. jniThrowException(env, canThrowRemoteException ? "android/os/DeadObjectException" : "java/lang/RuntimeException", NULL); break; case UNKNOWN_TRANSACTION: jniThrowException(env, "java/lang/RuntimeException", "Unknown transaction code"); break; case FAILED_TRANSACTION: ALOGE("!!! FAILED BINDER TRANSACTION !!!"); // TransactionTooLargeException is a checked exception, only throw from certain methods. // FIXME: Transaction too large is the most common reason for FAILED_TRANSACTION // but it is not the only one. The Binder driver can return BR_FAILED_REPLY // for other reasons also, such as if the transaction is malformed or // refers to an FD that has been closed. We should change the driver // to enable us to distinguish these cases in the future. jniThrowException(env, canThrowRemoteException ? "android/os/TransactionTooLargeException" : "java/lang/RuntimeException", NULL); break; case FDS_NOT_ALLOWED: jniThrowException(env, "java/lang/RuntimeException", "Not allowed to write file descriptors here"); break; default: ALOGE("Unknown binder error code. 0x%x", err); String8 msg; msg.appendFormat("Unknown binder error code. 0x%x", err); // RemoteException is a checked exception, only throw from certain methods. jniThrowException(env, canThrowRemoteException ? "android/os/RemoteException" : "java/lang/RuntimeException", msg.string()); break; } }