/** * pre-condition: gp != 0 */ XAresult android_Player_setPlayState(const android::sp<android::GenericPlayer> &gp, SLuint32 playState, AndroidObjectState* pObjState) { XAresult result = XA_RESULT_SUCCESS; AndroidObjectState objState = *pObjState; switch (playState) { case SL_PLAYSTATE_STOPPED: { SL_LOGV("setting AVPlayer to SL_PLAYSTATE_STOPPED"); gp->stop(); } break; case SL_PLAYSTATE_PAUSED: { SL_LOGV("setting AVPlayer to SL_PLAYSTATE_PAUSED"); switch(objState) { case ANDROID_UNINITIALIZED: *pObjState = ANDROID_PREPARING; gp->prepare(); break; case ANDROID_PREPARING: break; case ANDROID_READY: gp->pause(); break; default: SL_LOGE("Android object in invalid state"); break; } } break; case SL_PLAYSTATE_PLAYING: { SL_LOGV("setting AVPlayer to SL_PLAYSTATE_PLAYING"); switch(objState) { case ANDROID_UNINITIALIZED: *pObjState = ANDROID_PREPARING; gp->prepare(); // intended fall through case ANDROID_PREPARING: // intended fall through case ANDROID_READY: gp->play(); break; default: SL_LOGE("Android object in invalid state"); break; } } break; default: // checked by caller, should not happen break; } return result; }
//----------------------------------------------------------------------------- SLresult android_audioRecorder_realize(CAudioRecorder* ar, SLboolean async) { SL_LOGV("android_audioRecorder_realize(%p) entering", ar); SLresult result = SL_RESULT_SUCCESS; // initialize platform-independent CAudioRecorder fields if (SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE != ar->mDataSink.mLocator.mLocatorType) { SL_LOGE(ERROR_RECORDER_SINK_MUST_BE_ANDROIDSIMPLEBUFFERQUEUE); return SL_RESULT_CONTENT_UNSUPPORTED; } // the following platform-independent fields have been initialized in CreateAudioRecorder() // ar->mNumChannels // ar->mSampleRateMilliHz SL_LOGV("new AudioRecord %u channels, %u mHz", ar->mNumChannels, ar->mSampleRateMilliHz); // currently nothing analogous to canUseFastTrack() for recording audio_input_flags_t policy = AUDIO_INPUT_FLAG_FAST; // initialize platform-specific CAudioRecorder fields ar->mAudioRecord = new android::AudioRecord(); ar->mAudioRecord->set(ar->mRecordSource, // source sles_to_android_sampleRate(ar->mSampleRateMilliHz), // sample rate in Hertz AUDIO_FORMAT_PCM_16_BIT, //FIXME use format from buffer queue sink sles_to_android_channelMaskIn(ar->mNumChannels, 0 /*no channel mask*/), // channel config 0, //frameCount min audioRecorder_callback,// callback_t (void*)ar, // user, callback data, here the AudioRecorder 0, // notificationFrames false, // threadCanCallJava, note: this will prevent direct Java // callbacks, but we don't want them in the recording loop 0, // session ID android::AudioRecord::TRANSFER_CALLBACK, // transfer type policy); // audio_input_flags_t if (android::NO_ERROR != ar->mAudioRecord->initCheck()) { SL_LOGE("android_audioRecorder_realize(%p) error creating AudioRecord object", ar); result = SL_RESULT_CONTENT_UNSUPPORTED; } #ifdef MONITOR_RECORDING gMonitorFp = fopen(MONITOR_TARGET, "w"); if (NULL == gMonitorFp) { SL_LOGE("error opening %s", MONITOR_TARGET); } else { SL_LOGE("recording to %s", MONITOR_TARGET); } // SL_LOGE so it's always displayed #endif return result; }
//----------------------------------------------------------------------------- SLresult android_audioRecorder_create(CAudioRecorder* ar) { SL_LOGV("android_audioRecorder_create(%p) entering", ar); const SLDataSource *pAudioSrc = &ar->mDataSource.u.mSource; const SLDataSink *pAudioSnk = &ar->mDataSink.u.mSink; SLresult result = SL_RESULT_SUCCESS; const SLuint32 sourceLocatorType = *(SLuint32 *)pAudioSrc->pLocator; const SLuint32 sinkLocatorType = *(SLuint32 *)pAudioSnk->pLocator; // the following platform-independent fields have been initialized in CreateAudioRecorder() // ar->mNumChannels // ar->mSampleRateMilliHz if ((SL_DATALOCATOR_IODEVICE == sourceLocatorType) && (SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE == sinkLocatorType)) { // microphone to simple buffer queue ar->mAndroidObjType = AUDIORECORDER_FROM_MIC_TO_PCM_BUFFERQUEUE; ar->mAudioRecord.clear(); ar->mCallbackProtector = new android::CallbackProtector(); ar->mRecordSource = AUDIO_SOURCE_DEFAULT; } else { result = SL_RESULT_CONTENT_UNSUPPORTED; } return result; }
//----------------------------------------------------------------------------- SLresult android_genericFx_createEffect(IAndroidEffect* iae, SLInterfaceID pUuid, audio_session_t sessionId) { SLresult result = SL_RESULT_SUCCESS; // does this effect already exist? if (0 <= iae->mEffects->indexOfKey(KEY_FROM_GUID(pUuid))) { return result; } // create new effect android::sp<android::AudioEffect> pFx = new android::AudioEffect( NULL, // not using type to create effect android::String16(), (const effect_uuid_t*)pUuid, 0,// priority 0,// effect callback 0,// callback data sessionId, 0 );// output // verify effect was successfully created before storing it android::status_t status = pFx->initCheck(); if (android::NO_ERROR != status) { SL_LOGE("AudioEffect initCheck() returned %d, effect will not be stored", status); result = SL_RESULT_RESOURCE_ERROR; } else { SL_LOGV("AudioEffect successfully created on session %d", sessionId); iae->mEffects->add(KEY_FROM_GUID(pUuid), pFx); } return result; }
//----------------------------------------------------------------------------- void android_audioRecorder_setRecordState(CAudioRecorder* ar, SLuint32 state) { SL_LOGV("android_audioRecorder_setRecordState(%p, %u) entering", ar, state); if (ar->mAudioRecord == 0) { return; } switch (state) { case SL_RECORDSTATE_STOPPED: ar->mAudioRecord->stop(); break; case SL_RECORDSTATE_PAUSED: // Note that pausing is treated like stop as this implementation only records to a buffer // queue, so there is no notion of destination being "opened" or "closed" (See description // of SL_RECORDSTATE in specification) ar->mAudioRecord->stop(); break; case SL_RECORDSTATE_RECORDING: ar->mAudioRecord->start(); break; default: break; } }
//----------------------------------------------------------------------------- SLresult audioRecorder_setPerformanceMode(CAudioRecorder* ar, SLuint32 mode) { SLresult result = SL_RESULT_SUCCESS; SL_LOGV("performance mode set to %d", mode); SLuint32 perfMode = ANDROID_PERFORMANCE_MODE_DEFAULT; switch (mode) { case SL_ANDROID_PERFORMANCE_LATENCY: perfMode = ANDROID_PERFORMANCE_MODE_LATENCY; break; case SL_ANDROID_PERFORMANCE_LATENCY_EFFECTS: perfMode = ANDROID_PERFORMANCE_MODE_LATENCY_EFFECTS; break; case SL_ANDROID_PERFORMANCE_NONE: perfMode = ANDROID_PERFORMANCE_MODE_NONE; break; case SL_ANDROID_PERFORMANCE_POWER_SAVING: perfMode = ANDROID_PERFORMANCE_MODE_POWER_SAVING; break; default: SL_LOGE(ERROR_CONFIG_PERF_MODE_UNKNOWN); result = SL_RESULT_PARAMETER_INVALID; break; } // performance mode needs to be set before the object is realized // (ar->mAudioRecord is supposed to be NULL until then) if (SL_OBJECT_STATE_UNREALIZED != ar->mObject.mState) { SL_LOGE(ERROR_CONFIG_PERF_MODE_REALIZED); result = SL_RESULT_PRECONDITIONS_VIOLATED; } else { ar->mPerformanceMode = perfMode; } return result; }
// Called with a lock on MediaPlayer, and blocks until safe to destroy XAresult android_Player_preDestroy(CMediaPlayer *mp) { SL_LOGV("android_Player_preDestroy(%p)", mp); // Not yet clear why this order is important, but it reduces detected deadlocks object_unlock_exclusive(&mp->mObject); if (mp->mCallbackProtector != 0) { mp->mCallbackProtector->requestCbExitAndWait(); } object_lock_exclusive(&mp->mObject); if (mp->mAVPlayer != 0) { mp->mAVPlayer->preDestroy(); } SL_LOGV("android_Player_preDestroy(%p) after mAVPlayer->preDestroy()", mp); return XA_RESULT_SUCCESS; }
//----------------------------------------------------------------------------- void android_eq_init(audio_session_t sessionId, IEqualizer* ieq) { SL_LOGV("android_eq_init on session %d", sessionId); if (!android_fx_initEffectObj(sessionId, ieq->mEqEffect, &ieq->mEqDescriptor.type)) { SL_LOGE("Equalizer effect initialization failed"); return; } // initialize number of bands, band level range, and number of presets uint16_t num = 0; if (android::NO_ERROR == android_eq_getParam(ieq->mEqEffect, EQ_PARAM_NUM_BANDS, 0, &num)) { ieq->mNumBands = num; } int16_t range[2] = {0, 0}; if (android::NO_ERROR == android_eq_getParam(ieq->mEqEffect, EQ_PARAM_LEVEL_RANGE, 0, range)) { ieq->mBandLevelRangeMin = range[0]; ieq->mBandLevelRangeMax = range[1]; } SL_LOGV(" EQ init: num bands = %u, band range=[%d %d]mB", num, range[0], range[1]); // FIXME don't store presets names, they can be queried each time they're needed // initialize preset number and names, store in IEngine uint16_t numPresets = 0; if (android::NO_ERROR == android_eq_getParam(ieq->mEqEffect, EQ_PARAM_GET_NUM_OF_PRESETS, 0, &numPresets)) { ieq->mThis->mEngine->mEqNumPresets = numPresets; ieq->mNumPresets = numPresets; } object_lock_exclusive(&ieq->mThis->mEngine->mObject); char name[EFFECT_STRING_LEN_MAX]; if ((0 < numPresets) && (NULL == ieq->mThis->mEngine->mEqPresetNames)) { ieq->mThis->mEngine->mEqPresetNames = (char **)new char *[numPresets]; for(uint32_t i = 0 ; i < numPresets ; i++) { if (android::NO_ERROR == android_eq_getParam(ieq->mEqEffect, EQ_PARAM_GET_PRESET_NAME, i, name)) { ieq->mThis->mEngine->mEqPresetNames[i] = new char[strlen(name) + 1]; strcpy(ieq->mThis->mEngine->mEqPresetNames[i], name); SL_LOGV(" EQ init: presets = %u is %s", i, ieq->mThis->mEngine->mEqPresetNames[i]); } } } object_unlock_exclusive(&ieq->mThis->mEngine->mObject); }
//----------------------------------------------------------------------------- void android_agc_init(audio_session_t sessionId, IAndroidAutomaticGainControl* iagc) { SL_LOGV("android_agc_init on session %d", sessionId); if (!android_fx_initEffectObj(sessionId, iagc->mAGCEffect, &iagc->mAGCDescriptor.type)) { SL_LOGE("AGC effect initialization failed"); return; } }
//----------------------------------------------------------------------------- void android_aec_init(audio_session_t sessionId, IAndroidAcousticEchoCancellation* iaec) { SL_LOGV("android_aec_init on session %d", sessionId); if (!android_fx_initEffectObj(sessionId, iaec->mAECEffect, &iaec->mAECDescriptor.type)) { SL_LOGE("AEC effect initialization failed"); return; } }
void CallbackProtector::requestCbExit() { Mutex::Autolock _l(mLock); mSafeToEnterCb = false; #ifdef USE_DEBUG mRequesterThread = pthread_self(); mRequesterTid = gettid(); #endif if (mCbCount) { #ifdef USE_DEBUG SL_LOGV("Callback protector detected in-progress callback by thread %p tid %d during" " non-blocking destroy requested by thread %p tid %d", (void *) mCallbackThread, mCallbackTid, (void *) pthread_self(), gettid()); #else SL_LOGV("Callback protector detected in-progress callback during non-blocking destroy"); #endif } }
//----------------------------------------------------------------------------- void android_ns_init(audio_session_t sessionId, IAndroidNoiseSuppression* ins) { SL_LOGV("android_ns_init on session %d", sessionId); if (!android_fx_initEffectObj(sessionId, ins->mNSEffect, &ins->mNSDescriptor.type)) { SL_LOGE("NS effect initialization failed"); return; } }
//----------------------------------------------------------------------------- void android_audioRecorder_destroy(CAudioRecorder* ar) { SL_LOGV("android_audioRecorder_destroy(%p) entering", ar); if (ar->mAudioRecord != 0) { ar->mAudioRecord->stop(); ar->mAudioRecord.clear(); } // explicit destructor ar->mAudioRecord.~sp(); ar->mCallbackProtector.~sp(); }
//----------------------------------------------------------------------------- void android_audioRecorder_useRecordEventMask(CAudioRecorder *ar) { IRecord *pRecordItf = &ar->mRecord; SLuint32 eventFlags = pRecordItf->mCallbackEventsMask; if (ar->mAudioRecord == 0) { return; } if ((eventFlags & SL_RECORDEVENT_HEADATMARKER) && (pRecordItf->mMarkerPosition != 0)) { ar->mAudioRecord->setMarkerPosition((uint32_t)((((int64_t)pRecordItf->mMarkerPosition * sles_to_android_sampleRate(ar->mSampleRateMilliHz)))/1000)); } else { // clear marker ar->mAudioRecord->setMarkerPosition(0); } if (eventFlags & SL_RECORDEVENT_HEADATNEWPOS) { SL_LOGV("pos update period %d", pRecordItf->mPositionUpdatePeriod); ar->mAudioRecord->setPositionUpdatePeriod( (uint32_t)((((int64_t)pRecordItf->mPositionUpdatePeriod * sles_to_android_sampleRate(ar->mSampleRateMilliHz)))/1000)); } else { // clear periodic update ar->mAudioRecord->setPositionUpdatePeriod(0); } if (eventFlags & SL_RECORDEVENT_HEADATLIMIT) { // FIXME support SL_RECORDEVENT_HEADATLIMIT SL_LOGD("[ FIXME: IRecord_SetCallbackEventsMask(SL_RECORDEVENT_HEADATLIMIT) on an " "SL_OBJECTID_AUDIORECORDER to be implemented ]"); } if (eventFlags & SL_RECORDEVENT_HEADMOVING) { // FIXME support SL_RECORDEVENT_HEADMOVING SL_LOGD("[ FIXME: IRecord_SetCallbackEventsMask(SL_RECORDEVENT_HEADMOVING) on an " "SL_OBJECTID_AUDIORECORDER to be implemented ]"); } if (eventFlags & SL_RECORDEVENT_BUFFER_FULL) { // nothing to do for SL_RECORDEVENT_BUFFER_FULL since this will not be encountered on // recording to buffer queues } if (eventFlags & SL_RECORDEVENT_HEADSTALLED) { // nothing to do for SL_RECORDEVENT_HEADSTALLED, callback event will be checked against mask // when AudioRecord::EVENT_OVERRUN is encountered } }
//----------------------------------------------------------------------------- SLresult android_audioRecorder_checkSourceSinkSupport(CAudioRecorder* ar) { const SLDataSource *pAudioSrc = &ar->mDataSource.u.mSource; const SLDataSink *pAudioSnk = &ar->mDataSink.u.mSink; // Sink check: // only buffer queue sinks are supported, regardless of the data source if (SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE != *(SLuint32 *)pAudioSnk->pLocator) { SL_LOGE(ERROR_RECORDER_SINK_MUST_BE_ANDROIDSIMPLEBUFFERQUEUE); return SL_RESULT_PARAMETER_INVALID; } else { // only PCM buffer queues are supported SLuint32 formatType = *(SLuint32 *)pAudioSnk->pFormat; if (SL_DATAFORMAT_PCM == formatType) { SLDataFormat_PCM *df_pcm = (SLDataFormat_PCM *)ar->mDataSink.u.mSink.pFormat; ar->mSampleRateMilliHz = df_pcm->samplesPerSec; ar->mNumChannels = df_pcm->numChannels; SL_LOGV("AudioRecorder requested sample rate = %u mHz, %u channel(s)", ar->mSampleRateMilliHz, ar->mNumChannels); } else { SL_LOGE(ERROR_RECORDER_SINK_FORMAT_MUST_BE_PCM); return SL_RESULT_PARAMETER_INVALID; } } // Source check: // only input device sources are supported // check it's an IO device if (SL_DATALOCATOR_IODEVICE != *(SLuint32 *)pAudioSrc->pLocator) { SL_LOGE(ERROR_RECORDER_SOURCE_MUST_BE_IODEVICE); return SL_RESULT_PARAMETER_INVALID; } else { // check it's an input device SLDataLocator_IODevice *dl_iod = (SLDataLocator_IODevice *) pAudioSrc->pLocator; if (SL_IODEVICE_AUDIOINPUT != dl_iod->deviceType) { SL_LOGE(ERROR_RECORDER_IODEVICE_MUST_BE_AUDIOINPUT); return SL_RESULT_PARAMETER_INVALID; } // check it's the default input device, others aren't supported here if (SL_DEFAULTDEVICEID_AUDIOINPUT != dl_iod->deviceID) { SL_LOGE(ERROR_RECORDER_INPUT_ID_MUST_BE_DEFAULT); return SL_RESULT_PARAMETER_INVALID; } } return SL_RESULT_SUCCESS; }
void CallbackProtector::exitCb() { Mutex::Autolock _l(mLock); CHECK(mCbCount > 0); mCbCount--; if (mCbCount == 0) { if (!mSafeToEnterCb) { #ifdef USE_DEBUG SL_LOGV("Callback protector detected return from callback by thread %p tid %d during" " destroy requested by thread %p tid %d", (void *) mCallbackThread, mCallbackTid, (void *) mRequesterThread, mRequesterTid); #else SL_LOGV("Callback protector detected return from callback during destroy"); #endif mCbExitedCondition.broadcast(); } #ifdef USE_DEBUG mCallbackThread = (pthread_t) NULL; mCallbackTid = 0; #endif } }
bool CallbackProtector::enterCb() { Mutex::Autolock _l(mLock); if (mSafeToEnterCb) { mCbCount++; #ifdef USE_DEBUG if (mCbCount > 1) { SL_LOGV("Callback protector allowed multiple or nested callback entry: %u", mCbCount); } else { mCallbackThread = pthread_self(); mCallbackTid = gettid(); } #endif } else { #ifdef USE_DEBUG SL_LOGV("Callback protector denied callback entry by thread %p tid %d during destroy" " requested by thread %p tid %d", (void *) pthread_self(), gettid(), (void *) mRequesterThread, mRequesterTid); #else SL_LOGV("Callback protector denied callback entry during destroy"); #endif } return mSafeToEnterCb; }
StreamSourceAppProxy::StreamSourceAppProxy( IAndroidBufferQueue *androidBufferQueue, const sp<CallbackProtector> &callbackProtector, // sp<StreamPlayer> would cause StreamPlayer's destructor to run during it's own // construction. If you pass in a sp<> to 'this' inside a constructor, then first the // refcount is increased from 0 to 1, then decreased from 1 to 0, which causes the object's // destructor to run from inside it's own constructor. StreamPlayer * /* const sp<StreamPlayer> & */ player) : mBuffersHasBeenSet(false), mAndroidBufferQueue(androidBufferQueue), mCallbackProtector(callbackProtector), mPlayer(player) { SL_LOGV("StreamSourceAppProxy::StreamSourceAppProxy()"); }
//----------------------------------------------------------------------------- void android_virt_init(audio_session_t sessionId, IVirtualizer* ivi) { SL_LOGV("android_virt_init on session %d", sessionId); if (!android_fx_initEffectObj(sessionId, ivi->mVirtualizerEffect, &ivi->mVirtualizerDescriptor.type)) { SL_LOGE("Virtualizer effect initialization failed"); return; } // initialize strength int16_t strength; if (android::NO_ERROR == android_virt_getParam(ivi->mVirtualizerEffect, VIRTUALIZER_PARAM_STRENGTH, &strength)) { ivi->mStrength = (SLpermille) strength; } }
//----------------------------------------------------------------------------- XAresult android_Player_destroy(CMediaPlayer *mp) { SL_LOGV("android_Player_destroy(%p)", mp); mp->mAVPlayer.clear(); // placeholder: not necessary yet as session ID lifetime doesn't extend beyond player // android::AudioSystem::releaseAudioSessionId(mp->mSessionId); mp->mCallbackProtector.clear(); // explicit destructor mp->mAVPlayer.~sp(); mp->mCallbackProtector.~sp(); return XA_RESULT_SUCCESS; }
//----------------------------------------------------------------------------- void android_bb_init(audio_session_t sessionId, IBassBoost* ibb) { SL_LOGV("session %d", sessionId); if (!android_fx_initEffectObj(sessionId, ibb->mBassBoostEffect, &ibb->mBassBoostDescriptor.type)) { SL_LOGE("BassBoost effect initialization failed"); return; } // initialize strength int16_t strength; if (android::NO_ERROR == android_bb_getParam(ibb->mBassBoostEffect, BASSBOOST_PARAM_STRENGTH, &strength)) { ibb->mStrength = (SLpermille) strength; } }
//----------------------------------------------------------------------------- void android_prev_init(IPresetReverb* ipr) { SL_LOGV("session is implicitly %d (aux effect)", AUDIO_SESSION_OUTPUT_MIX); if (!android_fx_initEffectObj(AUDIO_SESSION_OUTPUT_MIX /*sessionId*/, ipr->mPresetReverbEffect, &ipr->mPresetReverbDescriptor.type)) { SL_LOGE("PresetReverb effect initialization failed"); return; } // initialize preset uint16_t preset; if (android::NO_ERROR == android_prev_getPreset(ipr->mPresetReverbEffect, &preset)) { ipr->mPreset = preset; // enable the effect if it has a preset loaded ipr->mPresetReverbEffect->setEnabled(SL_REVERBPRESET_NONE != preset); } }
//----------------------------------------------------------------------------- void android_audioRecorder_destroy(CAudioRecorder* ar) { SL_LOGV("android_audioRecorder_destroy(%p) entering", ar); if (ar->mAudioRecord != 0) { ar->mAudioRecord->stop(); ar->mAudioRecord.clear(); } // explicit destructor ar->mAudioRecord.~sp(); ar->mCallbackProtector.~sp(); #ifdef MONITOR_RECORDING if (NULL != gMonitorFp) { fclose(gMonitorFp); gMonitorFp = NULL; } #endif }
//----------------------------------------------------------------------------- void android_erev_init(IEnvironmentalReverb* ier) { SL_LOGV("session is implicitly %d (aux effect)", AUDIO_SESSION_OUTPUT_MIX); if (!android_fx_initEffectObj(AUDIO_SESSION_OUTPUT_MIX /*sessionId*/, ier->mEnvironmentalReverbEffect, &ier->mEnvironmentalReverbDescriptor.type)) { SL_LOGE("EnvironmentalReverb effect initialization failed"); return; } // enable env reverb: other SL ES effects have an explicit SetEnabled() function, and the // preset reverb state depends on the selected preset. ier->mEnvironmentalReverbEffect->setEnabled(true); // initialize reverb properties SLEnvironmentalReverbSettings properties; if (android::NO_ERROR == android_erev_getParam(ier->mEnvironmentalReverbEffect, REVERB_PARAM_PROPERTIES, &properties)) { ier->mProperties = properties; } }
/* * pre-conditions: * mp != NULL * mp->mAVPlayer != 0 (player is realized) * nativeWindow can be NULL, but if NULL it is treated as an error */ SLresult android_Player_setNativeWindow(CMediaPlayer *mp, ANativeWindow *nativeWindow) { assert(mp != NULL); assert(mp->mAVPlayer != 0); if (nativeWindow == NULL) { SL_LOGE("ANativeWindow is NULL"); return SL_RESULT_PARAMETER_INVALID; } SLresult result; int err; int value; // this could crash if app passes in a bad parameter, but that's OK err = (*nativeWindow->query)(nativeWindow, NATIVE_WINDOW_CONCRETE_TYPE, &value); if (0 != err) { SL_LOGE("Query NATIVE_WINDOW_CONCRETE_TYPE on ANativeWindow * %p failed; " "errno %d", nativeWindow, err); result = SL_RESULT_PARAMETER_INVALID; } else { switch (value) { case NATIVE_WINDOW_SURFACE: { // Surface SL_LOGV("Displaying on ANativeWindow of type NATIVE_WINDOW_SURFACE"); android::sp<android::Surface> surface( static_cast<android::Surface *>(nativeWindow)); android::sp<android::IGraphicBufferProducer> nativeSurfaceTexture( surface->getIGraphicBufferProducer()); mp->mAVPlayer->setVideoSurfaceTexture(nativeSurfaceTexture); result = SL_RESULT_SUCCESS; } break; case NATIVE_WINDOW_FRAMEBUFFER: // FramebufferNativeWindow // fall through default: SL_LOGE("ANativeWindow * %p has unknown or unsupported concrete type %d", nativeWindow, value); result = SL_RESULT_PARAMETER_INVALID; break; } } return result; }
void AudioToCbRenderer::onRender() { SL_LOGV("AudioToCbRenderer::onRender"); Mutex::Autolock _l(mBufferSourceLock); if (NULL == mDecodeBuffer) { // nothing to render, move along //SL_LOGV("AudioToCbRenderer::onRender NULL buffer, exiting"); return; } if (mStateFlags & kFlagPlaying) { if (NULL != mDecodeCbf) { size_t full = mDecodeBuffer->range_length(); size_t consumed = 0; size_t offset = 0; while (offset < full) { consumed = mDecodeCbf( (const uint8_t *)mDecodeBuffer->data() + offset + mDecodeBuffer->range_offset(), mDecodeBuffer->range_length() - offset, mDecodeUser); offset += consumed; //SL_LOGV("consumed=%u, offset=%u, full=%u", consumed, offset, full); if (consumed == 0) { // decoded data is not being consumed, skip this buffer break; } } } (new AMessage(kWhatDecode, id()))->post(); } mDecodeBuffer->release(); mDecodeBuffer = NULL; updateOneShot(); }
//----------------------------------------------------------------------------- bool android_fx_initEffectDescriptor(const SLInterfaceID effectId, effect_descriptor_t* fxDescrLoc) { uint32_t numEffects = 0; effect_descriptor_t descriptor; bool foundEffect = false; // any effects? android::status_t res = android::AudioEffect::queryNumberEffects(&numEffects); if (android::NO_ERROR != res) { SL_LOGE("unable to find any effects."); goto effectError; } // request effect in the effects? for (uint32_t i=0 ; i < numEffects ; i++) { res = android::AudioEffect::queryEffect(i, &descriptor); if ((android::NO_ERROR == res) && (0 == memcmp(effectId, &descriptor.type, sizeof(effect_uuid_t)))) { SL_LOGV("found effect %d %s", i, descriptor.name); foundEffect = true; break; } } if (foundEffect) { memcpy(fxDescrLoc, &descriptor, sizeof(effect_descriptor_t)); } else { SL_LOGE("unable to find an implementation for the requested effect."); goto effectError; } return true; effectError: // the requested effect wasn't found memset(fxDescrLoc, 0, sizeof(effect_descriptor_t)); return false; }
//-------------------------------------------------- // Event handlers void AacBqToPcmCbRenderer::onPrepare() { SL_LOGD("AacBqToPcmCbRenderer::onPrepare()"); Mutex::Autolock _l(mBufferSourceLock); // Initialize the PCM format info with the known parameters before the start of the decode { android::Mutex::Autolock autoLock(mPcmFormatLock); mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE] = SL_PCMSAMPLEFORMAT_FIXED_16; mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE] = 16; //FIXME not true on all platforms mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS] = SL_BYTEORDER_LITTLEENDIAN; // initialization with the default values: they will be replaced by the actual values // once the decoder has figured them out mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = UNKNOWN_NUMCHANNELS; mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = UNKNOWN_SAMPLERATE; mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = SL_ANDROID_UNKNOWN_CHANNELMASK; } sp<MediaExtractor> extractor = new AacAdtsExtractor(mBqSource); // only decoding a single track of data const size_t kTrackToDecode = 0; sp<IMediaSource> source = extractor->getTrack(kTrackToDecode); if (source == 0) { SL_LOGE("AacBqToPcmCbRenderer::onPrepare: error getting source from extractor"); notifyPrepared(ERROR_UNSUPPORTED); return; } // the audio content is not raw PCM, so we need a decoder source = SimpleDecodingSource::Create(source); if (source == NULL) { SL_LOGE("AacBqToPcmCbRenderer::onPrepare: Could not instantiate decoder."); notifyPrepared(ERROR_UNSUPPORTED); return; } sp<MetaData> meta = source->getFormat(); SL_LOGD("AacBqToPcmCbRenderer::onPrepare() after instantiating decoder"); if (source->start() != OK) { SL_LOGE("AacBqToPcmCbRenderer::onPrepare() Failed to start source/decoder."); notifyPrepared(MEDIA_ERROR_BASE); return; } //--------------------------------- int32_t channelCount; CHECK(meta->findInt32(kKeyChannelCount, &channelCount)); int32_t sr; CHECK(meta->findInt32(kKeySampleRate, &sr)); // FIXME similar to AudioSfDecoder::onPrepare() // already "good to go" (compare to AudioSfDecoder::onPrepare) mCacheStatus = kStatusHigh; mCacheFill = 1000; notifyStatus(); notifyCacheFill(); { android::Mutex::Autolock autoLock(mPcmFormatLock); mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = sr; mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = channelCount; mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = sles_channel_out_mask_from_count(channelCount); } SL_LOGV("AacBqToPcmCbRenderer::onPrepare() channel count=%d SR=%d", channelCount, sr); //--------------------------------- // The data source, and audio source (a decoder) are ready to be used mDataSource = mBqSource; mAudioSource = source; mAudioSourceStarted = true; //------------------------------------- // signal successful completion of prepare mStateFlags |= kFlagPrepared; // skipping past AudioToCbRenderer and AudioSfDecoder GenericPlayer::onPrepare(); SL_LOGD("AacBqToPcmCbRenderer::onPrepare() done, mStateFlags=0x%x", mStateFlags); }
//----------------------------------------------------------------------------- static void player_handleMediaPlayerEventNotifications(int event, int data1, int data2, void* user) { // FIXME This code is derived from similar code in sfplayer_handlePrefetchEvent. The two // versions are quite similar, but still different enough that they need to be separate. // At some point they should be re-factored and merged if feasible. // As with other OpenMAX AL implementation code, this copy mostly uses SL_ symbols // rather than XA_ unless the difference is significant. if (NULL == user) { return; } CMediaPlayer* mp = (CMediaPlayer*) user; if (!android::CallbackProtector::enterCbIfOk(mp->mCallbackProtector)) { // it is not safe to enter the callback (the media player is about to go away) return; } union { char c[sizeof(int)]; int i; } u; u.i = event; SL_LOGV("player_handleMediaPlayerEventNotifications(event='%c%c%c%c' (%d), data1=%d, data2=%d, " "user=%p) from AVPlayer", u.c[3], u.c[2], u.c[1], u.c[0], event, data1, data2, user); switch(event) { case android::GenericPlayer::kEventPrepared: { SL_LOGV("Received GenericPlayer::kEventPrepared for CMediaPlayer %p", mp); // assume no callback slPrefetchCallback callback = NULL; void* callbackPContext; XAuint32 events; object_lock_exclusive(&mp->mObject); // mark object as prepared; same state is used for successful or unsuccessful prepare assert(mp->mAndroidObjState == ANDROID_PREPARING); mp->mAndroidObjState = ANDROID_READY; if (PLAYER_SUCCESS == data1) { // Most of successful prepare completion for mp->mAVPlayer // is handled by GenericPlayer and its subclasses. } else { // AVPlayer prepare() failed prefetching, there is no event in XAPrefetchStatus to // indicate a prefetch error, so we signal it by sending simultaneously two events: // - SL_PREFETCHEVENT_FILLLEVELCHANGE with a level of 0 // - SL_PREFETCHEVENT_STATUSCHANGE with a status of SL_PREFETCHSTATUS_UNDERFLOW SL_LOGE(ERROR_PLAYER_PREFETCH_d, data1); if (IsInterfaceInitialized(&mp->mObject, MPH_XAPREFETCHSTATUS)) { mp->mPrefetchStatus.mLevel = 0; mp->mPrefetchStatus.mStatus = SL_PREFETCHSTATUS_UNDERFLOW; if (!(~mp->mPrefetchStatus.mCallbackEventsMask & (SL_PREFETCHEVENT_FILLLEVELCHANGE | SL_PREFETCHEVENT_STATUSCHANGE))) { callback = mp->mPrefetchStatus.mCallback; callbackPContext = mp->mPrefetchStatus.mContext; events = SL_PREFETCHEVENT_FILLLEVELCHANGE | SL_PREFETCHEVENT_STATUSCHANGE; } } } object_unlock_exclusive(&mp->mObject); // callback with no lock held if (NULL != callback) { (*callback)(&mp->mPrefetchStatus.mItf, callbackPContext, events); } break; } case android::GenericPlayer::kEventHasVideoSize: { SL_LOGV("Received AVPlayer::kEventHasVideoSize (%d,%d) for CMediaPlayer %p", data1, data2, mp); object_lock_exclusive(&mp->mObject); // remove an existing video info entry (here we only have one video stream) for(size_t i=0 ; i < mp->mStreamInfo.mStreamInfoTable.size() ; i++) { if (XA_DOMAINTYPE_VIDEO == mp->mStreamInfo.mStreamInfoTable.itemAt(i).domain) { mp->mStreamInfo.mStreamInfoTable.removeAt(i); break; } } // update the stream information with a new video info entry StreamInfo streamInfo; streamInfo.domain = XA_DOMAINTYPE_VIDEO; streamInfo.videoInfo.codecId = 0;// unknown, we don't have that info FIXME streamInfo.videoInfo.width = (XAuint32)data1; streamInfo.videoInfo.height = (XAuint32)data2; streamInfo.videoInfo.bitRate = 0;// unknown, we don't have that info FIXME streamInfo.videoInfo.frameRate = 0; streamInfo.videoInfo.duration = XA_TIME_UNKNOWN; StreamInfo &contInfo = mp->mStreamInfo.mStreamInfoTable.editItemAt(0); contInfo.containerInfo.numStreams = 1; ssize_t index = mp->mStreamInfo.mStreamInfoTable.add(streamInfo); // callback is unconditional; there is no bitmask of enabled events xaStreamEventChangeCallback callback = mp->mStreamInfo.mCallback; void* callbackPContext = mp->mStreamInfo.mContext; object_unlock_exclusive(&mp->mObject); // enqueue notification (outside of lock) that the stream information has been updated if ((NULL != callback) && (index >= 0)) { #ifndef USE_ASYNCHRONOUS_STREAMCBEVENT_PROPERTYCHANGE_CALLBACK (*callback)(&mp->mStreamInfo.mItf, XA_STREAMCBEVENT_PROPERTYCHANGE /*eventId*/, 1 /*streamIndex, only one stream supported here, 0 is reserved*/, NULL /*pEventData, always NULL in OpenMAX AL 1.0.1*/, callbackPContext /*pContext*/); #else SLresult res = EnqueueAsyncCallback_piipp(mp, callback, /*p1*/ &mp->mStreamInfo.mItf, /*i1*/ XA_STREAMCBEVENT_PROPERTYCHANGE /*eventId*/, /*i2*/ 1 /*streamIndex, only one stream supported here, 0 is reserved*/, /*p2*/ NULL /*pEventData, always NULL in OpenMAX AL 1.0.1*/, /*p3*/ callbackPContext /*pContext*/); ALOGW_IF(SL_RESULT_SUCCESS != res, "Callback %p(%p, XA_STREAMCBEVENT_PROPERTYCHANGE, 1, NULL, %p) dropped", callback, &mp->mStreamInfo.mItf, callbackPContext); #endif } break; } case android::GenericPlayer::kEventEndOfStream: { SL_LOGV("Received AVPlayer::kEventEndOfStream for CMediaPlayer %p", mp); object_lock_exclusive(&mp->mObject); // should be xaPlayCallback but we're sharing the itf between SL and AL slPlayCallback playCallback = NULL; void * playContext = NULL; // XAPlayItf callback or no callback? if (mp->mPlay.mEventFlags & XA_PLAYEVENT_HEADATEND) { playCallback = mp->mPlay.mCallback; playContext = mp->mPlay.mContext; } mp->mPlay.mState = XA_PLAYSTATE_PAUSED; object_unlock_exclusive(&mp->mObject); // enqueue callback with no lock held if (NULL != playCallback) { #ifndef USE_ASYNCHRONOUS_PLAY_CALLBACK (*playCallback)(&mp->mPlay.mItf, playContext, XA_PLAYEVENT_HEADATEND); #else SLresult res = EnqueueAsyncCallback_ppi(mp, playCallback, &mp->mPlay.mItf, playContext, XA_PLAYEVENT_HEADATEND); ALOGW_IF(SL_RESULT_SUCCESS != res, "Callback %p(%p, %p, SL_PLAYEVENT_HEADATEND) dropped", playCallback, &mp->mPlay.mItf, playContext); #endif } break; } case android::GenericPlayer::kEventChannelCount: { SL_LOGV("kEventChannelCount channels = %d", data1); object_lock_exclusive(&mp->mObject); if (UNKNOWN_NUMCHANNELS == mp->mNumChannels && UNKNOWN_NUMCHANNELS != data1) { mp->mNumChannels = data1; android_Player_volumeUpdate(mp); } object_unlock_exclusive(&mp->mObject); } break; case android::GenericPlayer::kEventPrefetchFillLevelUpdate: { SL_LOGV("kEventPrefetchFillLevelUpdate"); if (!IsInterfaceInitialized(&mp->mObject, MPH_XAPREFETCHSTATUS)) { break; } slPrefetchCallback callback = NULL; void* callbackPContext = NULL; // SLPrefetchStatusItf callback or no callback? interface_lock_exclusive(&mp->mPrefetchStatus); if (mp->mPrefetchStatus.mCallbackEventsMask & SL_PREFETCHEVENT_FILLLEVELCHANGE) { callback = mp->mPrefetchStatus.mCallback; callbackPContext = mp->mPrefetchStatus.mContext; } mp->mPrefetchStatus.mLevel = (SLpermille)data1; interface_unlock_exclusive(&mp->mPrefetchStatus); // callback with no lock held if (NULL != callback) { (*callback)(&mp->mPrefetchStatus.mItf, callbackPContext, SL_PREFETCHEVENT_FILLLEVELCHANGE); } } break; case android::GenericPlayer::kEventPrefetchStatusChange: { SL_LOGV("kEventPrefetchStatusChange"); if (!IsInterfaceInitialized(&mp->mObject, MPH_XAPREFETCHSTATUS)) { break; } slPrefetchCallback callback = NULL; void* callbackPContext = NULL; // SLPrefetchStatusItf callback or no callback? object_lock_exclusive(&mp->mObject); if (mp->mPrefetchStatus.mCallbackEventsMask & SL_PREFETCHEVENT_STATUSCHANGE) { callback = mp->mPrefetchStatus.mCallback; callbackPContext = mp->mPrefetchStatus.mContext; } if (data1 >= android::kStatusIntermediate) { mp->mPrefetchStatus.mStatus = SL_PREFETCHSTATUS_SUFFICIENTDATA; } else if (data1 < android::kStatusIntermediate) { mp->mPrefetchStatus.mStatus = SL_PREFETCHSTATUS_UNDERFLOW; } object_unlock_exclusive(&mp->mObject); // callback with no lock held if (NULL != callback) { (*callback)(&mp->mPrefetchStatus.mItf, callbackPContext, SL_PREFETCHEVENT_STATUSCHANGE); } } break; case android::GenericPlayer::kEventPlay: { SL_LOGV("kEventPlay"); interface_lock_shared(&mp->mPlay); slPlayCallback callback = mp->mPlay.mCallback; void* callbackPContext = mp->mPlay.mContext; interface_unlock_shared(&mp->mPlay); if (NULL != callback) { (*callback)(&mp->mPlay.mItf, callbackPContext, (SLuint32) data1); // SL_PLAYEVENT_HEAD* } } break; case android::GenericPlayer::kEventErrorAfterPrepare: { SL_LOGV("kEventErrorAfterPrepare"); // assume no callback slPrefetchCallback callback = NULL; void* callbackPContext = NULL; object_lock_exclusive(&mp->mObject); if (IsInterfaceInitialized(&mp->mObject, MPH_XAPREFETCHSTATUS)) { mp->mPrefetchStatus.mLevel = 0; mp->mPrefetchStatus.mStatus = SL_PREFETCHSTATUS_UNDERFLOW; if (!(~mp->mPrefetchStatus.mCallbackEventsMask & (SL_PREFETCHEVENT_FILLLEVELCHANGE | SL_PREFETCHEVENT_STATUSCHANGE))) { callback = mp->mPrefetchStatus.mCallback; callbackPContext = mp->mPrefetchStatus.mContext; } } object_unlock_exclusive(&mp->mObject); // FIXME there's interesting information in data1, but no API to convey it to client SL_LOGE("Error after prepare: %d", data1); // callback with no lock held if (NULL != callback) { (*callback)(&mp->mPrefetchStatus.mItf, callbackPContext, SL_PREFETCHEVENT_FILLLEVELCHANGE | SL_PREFETCHEVENT_STATUSCHANGE); } } break; default: { SL_LOGE("Received unknown event %d, data %d from AVPlayer", event, data1); } } mp->mCallbackProtector->exitCb(); }
//----------------------------------------------------------------------------- // FIXME abstract out the diff between CMediaPlayer and CAudioPlayer XAresult android_Player_realize(CMediaPlayer *mp, SLboolean async) { SL_LOGV("android_Player_realize_l(%p)", mp); XAresult result = XA_RESULT_SUCCESS; const SLDataSource *pDataSrc = &mp->mDataSource.u.mSource; const SLuint32 sourceLocator = *(SLuint32 *)pDataSrc->pLocator; AudioPlayback_Parameters ap_params; ap_params.sessionId = mp->mSessionId; ap_params.streamType = mp->mStreamType; switch(mp->mAndroidObjType) { case AUDIOVIDEOPLAYER_FROM_TS_ANDROIDBUFFERQUEUE: { mp->mAVPlayer = new android::StreamPlayer(&ap_params, true /*hasVideo*/, &mp->mAndroidBufferQueue, mp->mCallbackProtector); mp->mAVPlayer->init(player_handleMediaPlayerEventNotifications, (void*)mp); } break; case AUDIOVIDEOPLAYER_FROM_URIFD: { mp->mAVPlayer = new android::LocAVPlayer(&ap_params, true /*hasVideo*/); mp->mAVPlayer->init(player_handleMediaPlayerEventNotifications, (void*)mp); switch (mp->mDataSource.mLocator.mLocatorType) { case XA_DATALOCATOR_URI: ((android::LocAVPlayer*)mp->mAVPlayer.get())->setDataSource( (const char*)mp->mDataSource.mLocator.mURI.URI); break; case XA_DATALOCATOR_ANDROIDFD: { int64_t offset = (int64_t)mp->mDataSource.mLocator.mFD.offset; ((android::LocAVPlayer*)mp->mAVPlayer.get())->setDataSource( (int)mp->mDataSource.mLocator.mFD.fd, offset == SL_DATALOCATOR_ANDROIDFD_USE_FILE_SIZE ? (int64_t)PLAYER_FD_FIND_FILE_SIZE : offset, (int64_t)mp->mDataSource.mLocator.mFD.length); } break; default: SL_LOGE("Invalid or unsupported data locator type %u for data source", mp->mDataSource.mLocator.mLocatorType); result = XA_RESULT_PARAMETER_INVALID; } } break; case INVALID_TYPE: // intended fall-through default: SL_LOGE("Unable to realize MediaPlayer, invalid internal Android object type"); result = XA_RESULT_PARAMETER_INVALID; break; } if (XA_RESULT_SUCCESS == result) { // if there is a video sink if (XA_DATALOCATOR_NATIVEDISPLAY == mp->mImageVideoSink.mLocator.mLocatorType) { ANativeWindow *nativeWindow = (ANativeWindow *) mp->mImageVideoSink.mLocator.mNativeDisplay.hWindow; // we already verified earlier that hWindow is non-NULL assert(nativeWindow != NULL); result = android_Player_setNativeWindow(mp, nativeWindow); } } return result; }