static int ijkmp_prepare_async_l(IjkMediaPlayer *mp) { assert(mp); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE); // MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED); // MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END); assert(mp->data_source); ijkmp_change_state_l(mp, MP_STATE_ASYNC_PREPARING); msg_queue_start(&mp->ffplayer->msg_queue); // released in msg_loop ijkmp_inc_ref(mp); mp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, mp->msg_loop, mp, "ff_msg_loop"); // TODO: 9 release weak_thiz if pthread_create() failed; int retval = ffp_prepare_async_l(mp->ffplayer, mp->data_source); if (retval < 0) { ijkmp_change_state_l(mp, MP_STATE_ERROR); return retval; } return 0; }
static int aout_open_audio_n(JNIEnv *env, SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained) { assert(desired); SDL_Aout_Opaque *opaque = aout->opaque; opaque->spec = *desired; opaque->atrack = SDL_Android_AudioTrack_new_from_sdl_spec(env, desired); if (!opaque->atrack) { ALOGE("aout_open_audio_n: failed to new AudioTrcak()"); return -1; } opaque->buffer_size = SDL_Android_AudioTrack_get_min_buffer_size(opaque->atrack); if (opaque->buffer_size <= 0) { ALOGE("aout_open_audio_n: failed to getMinBufferSize()"); SDL_Android_AudioTrack_free(env, opaque->atrack); opaque->atrack = NULL; return -1; } opaque->buffer = malloc(opaque->buffer_size); if (!opaque->buffer) { ALOGE("aout_open_audio_n: failed to allocate buffer"); SDL_Android_AudioTrack_free(env, opaque->atrack); opaque->atrack = NULL; return -1; } if (obtained) { SDL_Android_AudioTrack_get_target_spec(opaque->atrack, obtained); SDLTRACE("audio target format fmt:0x%x, channel:0x%x", (int)obtained->format, (int)obtained->channels); } opaque->audio_session_id = SDL_Android_AudioTrack_getAudioSessionId(env, opaque->atrack); ALOGI("audio_session_id = %d\n", opaque->audio_session_id); opaque->pause_on = 1; opaque->abort_request = 0; opaque->audio_tid = SDL_CreateThreadEx(&opaque->_audio_tid, aout_thread, aout, "ff_aout_android"); if (!opaque->audio_tid) { ALOGE("aout_open_audio_n: failed to create audio thread"); SDL_Android_AudioTrack_free(env, opaque->atrack); opaque->atrack = NULL; return -1; } return 0; }
static int func_run_sync(IJKFF_Pipenode *node) { JNIEnv *env = NULL; IJKFF_Pipenode_Opaque *opaque = node->opaque; FFPlayer *ffp = opaque->ffp; VideoState *is = ffp->is; Decoder *d = &is->viddec; PacketQueue *q = d->queue; int ret = 0; int dequeue_count = 0; if (!opaque->acodec) { return ffp_video_thread(ffp); } if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) { ALOGE("%s: SetupThreadEnv failed\n", __func__); return -1; } opaque->frame_width = opaque->avctx->width; opaque->frame_height = opaque->avctx->height; opaque->enqueue_thread = SDL_CreateThreadEx(&opaque->_enqueue_thread, enqueue_thread_func, node, "amediacodec_input_thread"); if (!opaque->enqueue_thread) { ALOGE("%s: SDL_CreateThreadEx failed\n", __func__); ret = -1; goto fail; } while (!q->abort_request) { int64_t timeUs = opaque->acodec_first_dequeue_output_request ? 0 : AMC_OUTPUT_TIMEOUT_US; ret = drain_output_buffer(env, node, timeUs, &dequeue_count); if (opaque->acodec_first_dequeue_output_request) { SDL_LockMutex(opaque->acodec_first_dequeue_output_mutex); opaque->acodec_first_dequeue_output_request = false; SDL_CondSignal(opaque->acodec_first_dequeue_output_cond); SDL_UnlockMutex(opaque->acodec_first_dequeue_output_mutex); } if (ret != 0) { ret = -1; goto fail; } } fail: ffp_packet_queue_abort(&opaque->fake_pictq); if (opaque->n_buf_out) { int i; if (opaque->acodec) { for (i = 0; i < opaque->n_buf_out; i++) { if (opaque->amc_buf_out[i].pts != AV_NOPTS_VALUE) SDL_AMediaCodec_releaseOutputBuffer(opaque->acodec, opaque->amc_buf_out[i].port, false); } } free(opaque->amc_buf_out); opaque->n_buf_out = 0; opaque->amc_buf_out = NULL; opaque->off_buf_out = 0; opaque->last_queued_pts = AV_NOPTS_VALUE; } if (opaque->acodec) SDL_AMediaCodec_stop(opaque->acodec); SDL_WaitThread(opaque->enqueue_thread, NULL); ALOGI("MediaCodec: %s: exit: %d", __func__, ret); return ret; #if 0 fallback_to_ffplay: ALOGW("fallback to ffplay decoder\n"); return ffp_video_thread(opaque->ffp); #endif }
static int func_run_sync(IJKFF_Pipenode *node) { JNIEnv *env = NULL; IJKFF_Pipenode_Opaque *opaque = node->opaque; FFPlayer *ffp = opaque->ffp; VideoState *is = ffp->is; int ret = 0; int dequeue_count = 0; if (!opaque->acodec) { return ffp_video_thread(ffp); } if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) { ALOGE("%s: SetupThreadEnv failed\n", __func__); return -1; } opaque->frame_width = opaque->avctx->width; opaque->frame_height = opaque->avctx->height; ffpipeline_set_surface_need_reconfigure(opaque->pipeline, true); ret = reconfigure_codec_l(env, node); if (ret != 0) { ALOGE("%s: reconfigure_codec failed\n", __func__); goto fail; } opaque->enqueue_thread = SDL_CreateThreadEx(&opaque->_enqueue_thread, enqueue_thread_func, node, "amediacodec_input_thread"); if (!opaque->enqueue_thread) { ALOGE("%s: SDL_CreateThreadEx failed\n", __func__); ret = -1; goto fail; } while (!is->abort_request) { int64_t timeUs = opaque->acodec_first_dequeue_output_request ? 0 : AMC_OUTPUT_TIMEOUT_US; ret = drain_output_buffer(env, node, timeUs, &dequeue_count); if (opaque->acodec_first_dequeue_output_request) { SDL_LockMutex(opaque->acodec_first_dequeue_output_mutex); opaque->acodec_first_dequeue_output_request = false; SDL_CondSignal(opaque->acodec_first_dequeue_output_cond); SDL_UnlockMutex(opaque->acodec_first_dequeue_output_mutex); } if (ret != 0) { ret = -1; goto fail; } } fail: ffp_packet_queue_abort(&opaque->fake_pictq); if (opaque->acodec) SDL_AMediaCodec_stop(opaque->acodec); SDL_WaitThread(opaque->enqueue_thread, NULL); SDL_AMediaCodec_decreaseReferenceP(&opaque->acodec); ALOGI("MediaCodec: %s: exit: %d", __func__, ret); return ret; #if 0 fallback_to_ffplay: ALOGW("fallback to ffplay decoder\n"); return ffp_video_thread(opaque->ffp); #endif }
static int aout_open_audio(SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained) { SDLTRACE("%s\n", __func__); assert(desired); SDLTRACE("aout_open_audio()\n"); SDL_Aout_Opaque *opaque = aout->opaque; SLEngineItf slEngine = opaque->slEngine; SLDataFormat_PCM *format_pcm = &opaque->format_pcm; int ret = 0; opaque->spec = *desired; // config audio src SLDataLocator_AndroidSimpleBufferQueue loc_bufq = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, OPENSLES_BUFFERS }; int native_sample_rate = audiotrack_get_native_output_sample_rate(NULL); ALOGI("OpenSL-ES: native sample rate %d Hz\n", native_sample_rate); CHECK_COND_ERROR((desired->format == AUDIO_S16SYS), "%s: not AUDIO_S16SYS", __func__); CHECK_COND_ERROR((desired->channels == 2 || desired->channels == 1), "%s: not 1,2 channel", __func__); CHECK_COND_ERROR((desired->freq >= 8000 && desired->freq <= 48000), "%s: unsupport freq %d Hz", __func__, desired->freq); if (SDL_Android_GetApiLevel() < IJK_API_21_LOLLIPOP && native_sample_rate > 0 && desired->freq < native_sample_rate) { // Don't try to play back a sample rate higher than the native one, // since OpenSL ES will try to use the fast path, which AudioFlinger // will reject (fast path can't do resampling), and will end up with // too small buffers for the resampling. See http://b.android.com/59453 // for details. This bug is still present in 4.4. If it is fixed later // this workaround could be made conditional. // // by VLC/android_opensles.c ALOGW("OpenSL-ES: force resample %lu to native sample rate %d\n", (unsigned long) format_pcm->samplesPerSec / 1000, (int) native_sample_rate); format_pcm->samplesPerSec = native_sample_rate * 1000; } format_pcm->formatType = SL_DATAFORMAT_PCM; format_pcm->numChannels = desired->channels; format_pcm->samplesPerSec = desired->freq * 1000; // milli Hz // format_pcm->numChannels = 2; // format_pcm->samplesPerSec = SL_SAMPLINGRATE_44_1; format_pcm->bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16; format_pcm->containerSize = SL_PCMSAMPLEFORMAT_FIXED_16; switch (desired->channels) { case 2: format_pcm->channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT; break; case 1: format_pcm->channelMask = SL_SPEAKER_FRONT_CENTER; break; default: ALOGE("%s, invalid channel %d", __func__, desired->channels); goto fail; } format_pcm->endianness = SL_BYTEORDER_LITTLEENDIAN; SLDataSource audio_source = {&loc_bufq, format_pcm}; // config audio sink SLDataLocator_OutputMix loc_outmix = { SL_DATALOCATOR_OUTPUTMIX, opaque->slOutputMixObject }; SLDataSink audio_sink = {&loc_outmix, NULL}; SLObjectItf slPlayerObject = NULL; const SLInterfaceID ids2[] = { SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_VOLUME, SL_IID_PLAY }; static const SLboolean req2[] = { SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE }; ret = (*slEngine)->CreateAudioPlayer(slEngine, &slPlayerObject, &audio_source, &audio_sink, sizeof(ids2) / sizeof(*ids2), ids2, req2); CHECK_OPENSL_ERROR(ret, "%s: slEngine->CreateAudioPlayer() failed", __func__); opaque->slPlayerObject = slPlayerObject; ret = (*slPlayerObject)->Realize(slPlayerObject, SL_BOOLEAN_FALSE); CHECK_OPENSL_ERROR(ret, "%s: slPlayerObject->Realize() failed", __func__); ret = (*slPlayerObject)->GetInterface(slPlayerObject, SL_IID_PLAY, &opaque->slPlayItf); CHECK_OPENSL_ERROR(ret, "%s: slPlayerObject->GetInterface(SL_IID_PLAY) failed", __func__); ret = (*slPlayerObject)->GetInterface(slPlayerObject, SL_IID_VOLUME, &opaque->slVolumeItf); CHECK_OPENSL_ERROR(ret, "%s: slPlayerObject->GetInterface(SL_IID_VOLUME) failed", __func__); ret = (*slPlayerObject)->GetInterface(slPlayerObject, SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &opaque->slBufferQueueItf); CHECK_OPENSL_ERROR(ret, "%s: slPlayerObject->GetInterface(SL_IID_ANDROIDSIMPLEBUFFERQUEUE) failed", __func__); ret = (*opaque->slBufferQueueItf)->RegisterCallback(opaque->slBufferQueueItf, aout_opensles_callback, (void*)aout); CHECK_OPENSL_ERROR(ret, "%s: slBufferQueueItf->RegisterCallback() failed", __func__); // set the player's state to playing // ret = (*opaque->slPlayItf)->SetPlayState(opaque->slPlayItf, SL_PLAYSTATE_PLAYING); // CHECK_OPENSL_ERROR(ret, "%s: slBufferQueueItf->slPlayItf() failed", __func__); opaque->bytes_per_frame = format_pcm->numChannels * format_pcm->bitsPerSample / 8; opaque->milli_per_buffer = OPENSLES_BUFLEN; opaque->frames_per_buffer = opaque->milli_per_buffer * format_pcm->samplesPerSec / 1000000; // samplesPerSec is in milli opaque->bytes_per_buffer = opaque->bytes_per_frame * opaque->frames_per_buffer; opaque->buffer_capacity = OPENSLES_BUFFERS * opaque->bytes_per_buffer; ALOGI("OpenSL-ES: bytes_per_frame = %d bytes\n", (int)opaque->bytes_per_frame); ALOGI("OpenSL-ES: milli_per_buffer = %d ms\n", (int)opaque->milli_per_buffer); ALOGI("OpenSL-ES: frame_per_buffer = %d frames\n", (int)opaque->frames_per_buffer); ALOGI("OpenSL-ES: bytes_per_buffer = %d bytes\n", (int)opaque->bytes_per_buffer); ALOGI("OpenSL-ES: buffer_capacity = %d bytes\n", (int)opaque->buffer_capacity); opaque->buffer = malloc(opaque->buffer_capacity); CHECK_COND_ERROR(opaque->buffer, "%s: failed to alloc buffer %d\n", __func__, (int)opaque->buffer_capacity); // (*opaque->slPlayItf)->SetPositionUpdatePeriod(opaque->slPlayItf, 1000); // enqueue empty buffer to start play memset(opaque->buffer, 0, opaque->buffer_capacity); for(int i = 0; i < OPENSLES_BUFFERS; ++i) { ret = (*opaque->slBufferQueueItf)->Enqueue(opaque->slBufferQueueItf, opaque->buffer + i * opaque->bytes_per_buffer, opaque->bytes_per_buffer); CHECK_OPENSL_ERROR(ret, "%s: slBufferQueueItf->Enqueue(000...) failed", __func__); } opaque->pause_on = 1; opaque->abort_request = 0; opaque->audio_tid = SDL_CreateThreadEx(&opaque->_audio_tid, aout_thread, aout, "ff_aout_opensles"); CHECK_COND_ERROR(opaque->audio_tid, "%s: failed to SDL_CreateThreadEx", __func__); if (obtained) { *obtained = *desired; obtained->size = opaque->buffer_capacity; obtained->freq = format_pcm->samplesPerSec / 1000; } return opaque->buffer_capacity; fail: aout_close_audio(aout); return -1; }
static int func_run_sync(IJKFF_Pipenode *node) { JNIEnv *env = NULL; IJKFF_Pipenode_Opaque *opaque = node->opaque; FFPlayer *ffp = opaque->ffp; VideoState *is = ffp->is; Decoder *d = &is->viddec; PacketQueue *q = d->queue; int ret = 0; int dequeue_count = 0; AVFrame *frame = NULL; int got_frame = 0; AVRational tb = is->video_st->time_base; AVRational frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL); double duration; double pts; if (!opaque->acodec) { return ffp_video_thread(ffp); } if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) { ALOGE("%s: SetupThreadEnv failed\n", __func__); return -1; } frame = av_frame_alloc(); if (!frame) goto fail; if (opaque->frame_rotate_degrees == 90 || opaque->frame_rotate_degrees == 270) { opaque->frame_width = opaque->avctx->height; opaque->frame_height = opaque->avctx->width; } else { opaque->frame_width = opaque->avctx->width; opaque->frame_height = opaque->avctx->height; } opaque->enqueue_thread = SDL_CreateThreadEx(&opaque->_enqueue_thread, enqueue_thread_func, node, "amediacodec_input_thread"); if (!opaque->enqueue_thread) { ALOGE("%s: SDL_CreateThreadEx failed\n", __func__); ret = -1; goto fail; } while (!q->abort_request) { int64_t timeUs = opaque->acodec_first_dequeue_output_request ? 0 : AMC_OUTPUT_TIMEOUT_US; got_frame = 0; ret = drain_output_buffer(env, node, timeUs, &dequeue_count, frame, &got_frame); if (opaque->acodec_first_dequeue_output_request) { SDL_LockMutex(opaque->acodec_first_dequeue_output_mutex); opaque->acodec_first_dequeue_output_request = false; SDL_CondSignal(opaque->acodec_first_dequeue_output_cond); SDL_UnlockMutex(opaque->acodec_first_dequeue_output_mutex); } if (ret != 0) { ret = -1; if (got_frame && frame->opaque) SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false); goto fail; } if (got_frame) { duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0); pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb); ret = ffp_queue_picture(ffp, frame, pts, duration, av_frame_get_pkt_pos(frame), is->viddec.pkt_serial); if (ret) { if (frame->opaque) SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false); } av_frame_unref(frame); } } fail: av_frame_free(&frame); SDL_AMediaCodecFake_abort(opaque->acodec); if (opaque->n_buf_out) { free(opaque->amc_buf_out); opaque->n_buf_out = 0; opaque->amc_buf_out = NULL; opaque->off_buf_out = 0; opaque->last_queued_pts = AV_NOPTS_VALUE; } if (opaque->acodec) { SDL_VoutAndroid_invalidateAllBuffers(opaque->weak_vout); SDL_LockMutex(opaque->acodec_mutex); SDL_AMediaCodec_stop(opaque->acodec); SDL_UnlockMutex(opaque->acodec_mutex); } SDL_WaitThread(opaque->enqueue_thread, NULL); SDL_AMediaCodec_decreaseReferenceP(&opaque->acodec); ALOGI("MediaCodec: %s: exit: %d", __func__, ret); return ret; #if 0 fallback_to_ffplay: ALOGW("fallback to ffplay decoder\n"); return ffp_video_thread(opaque->ffp); #endif }