int SDL_Android_AudioTrack_global_init(JNIEnv *env) { jclass clazz; jint sdk_int = SDL_Android_GetApiLevel(); clazz = (*env)->FindClass(env, "android/media/AudioTrack"); IJK_CHECK_RET(clazz, -1, "missing AudioTrack"); // FindClass returns LocalReference g_clazz.clazz = (*env)->NewGlobalRef(env, clazz); IJK_CHECK_RET(g_clazz.clazz, -1, "AudioTrack NewGlobalRef failed"); (*env)->DeleteLocalRef(env, clazz); g_clazz.constructor = (*env)->GetMethodID(env, g_clazz.clazz, "<init>", "(IIIIII)V"); IJK_CHECK_RET(g_clazz.constructor, -1, "missing AudioTrack.<init>"); g_clazz.getMinBufferSize = (*env)->GetStaticMethodID(env, g_clazz.clazz, "getMinBufferSize", "(III)I"); IJK_CHECK_RET(g_clazz.getMinBufferSize, -1, "missing AudioTrack.getMinBufferSize"); g_clazz.getMaxVolume = (*env)->GetStaticMethodID(env, g_clazz.clazz, "getMaxVolume", "()F"); IJK_CHECK_RET(g_clazz.getMaxVolume, -1, "missing AudioTrack.getMaxVolume"); g_clazz.getMinVolume = (*env)->GetStaticMethodID(env, g_clazz.clazz, "getMinVolume", "()F"); IJK_CHECK_RET(g_clazz.getMinVolume, -1, "missing AudioTrack.getMinVolume"); g_clazz.getNativeOutputSampleRate = (*env)->GetStaticMethodID(env, g_clazz.clazz, "getNativeOutputSampleRate", "(I)I"); IJK_CHECK_RET(g_clazz.getNativeOutputSampleRate, -1, "missing AudioTrack.getNativeOutputSampleRate"); g_clazz.play = (*env)->GetMethodID(env, g_clazz.clazz, "play", "()V"); IJK_CHECK_RET(g_clazz.play, -1, "missing AudioTrack.play"); g_clazz.pause = (*env)->GetMethodID(env, g_clazz.clazz, "pause", "()V"); IJK_CHECK_RET(g_clazz.pause, -1, "missing AudioTrack.pause"); g_clazz.flush = (*env)->GetMethodID(env, g_clazz.clazz, "flush", "()V"); IJK_CHECK_RET(g_clazz.flush, -1, "missing AudioTrack.flush"); g_clazz.stop = (*env)->GetMethodID(env, g_clazz.clazz, "stop", "()V"); IJK_CHECK_RET(g_clazz.stop, -1, "missing AudioTrack.stop"); g_clazz.release = (*env)->GetMethodID(env, g_clazz.clazz, "release", "()V"); IJK_CHECK_RET(g_clazz.release, -1, "missing AudioTrack.release"); g_clazz.write_byte = (*env)->GetMethodID(env, g_clazz.clazz, "write", "([BII)I"); IJK_CHECK_RET(g_clazz.write_byte, -1, "missing AudioTrack.write(byte[], ...)"); g_clazz.setStereoVolume = (*env)->GetMethodID(env, g_clazz.clazz, "setStereoVolume", "(FF)I"); IJK_CHECK_RET(g_clazz.setStereoVolume, -1, "missing AudioTrack.setStereoVolume"); g_clazz.getAudioSessionId = (*env)->GetMethodID(env, g_clazz.clazz, "getAudioSessionId", "()I"); IJK_CHECK_RET(g_clazz.getAudioSessionId, -1, "missing AudioTrack.getAudioSessionId"); if (sdk_int >= IJK_API_21_LOLLIPOP) { g_clazz.write_float = (*env)->GetMethodID(env, g_clazz.clazz, "write", "([FIII)I"); IJK_CHECK_RET(g_clazz.write_float, -1, "missing AudioTrack.write(float[], ...)"); } SDLTRACE("android.media.AudioTrack class loaded"); return 0; }
IJKFF_Pipenode *ffpipenode_create_video_decoder_from_android_mediacodec(FFPlayer *ffp, IJKFF_Pipeline *pipeline, SDL_Vout *vout) { ALOGD("ffpipenode_create_video_decoder_from_android_mediacodec()\n"); if (SDL_Android_GetApiLevel() < IJK_API_16_JELLY_BEAN) return NULL; if (!ffp || !ffp->is) return NULL; IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque)); if (!node) return node; VideoState *is = ffp->is; IJKFF_Pipenode_Opaque *opaque = node->opaque; JNIEnv *env = NULL; int ret = 0; node->func_destroy = func_destroy; node->func_run_sync = func_run_sync; node->func_flush = func_flush; opaque->pipeline = pipeline; opaque->ffp = ffp; opaque->decoder = &is->viddec; opaque->weak_vout = vout; opaque->avctx = opaque->decoder->avctx; switch (opaque->avctx->profile) { case FF_PROFILE_H264_HIGH_10: case FF_PROFILE_H264_HIGH_10_INTRA: case FF_PROFILE_H264_HIGH_422: case FF_PROFILE_H264_HIGH_422_INTRA: case FF_PROFILE_H264_HIGH_444_PREDICTIVE: case FF_PROFILE_H264_HIGH_444_INTRA: case FF_PROFILE_H264_CAVLC_444: goto fail; } switch (opaque->avctx->codec_id) { case AV_CODEC_ID_H264: strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_AVC); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; default: ALOGE("%s:create: not H264\n", __func__); goto fail; } if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) { ALOGE("%s:create: SetupThreadEnv failed\n", __func__); goto fail; } opaque->acodec_mutex = SDL_CreateMutex(); opaque->acodec_cond = SDL_CreateCond(); opaque->acodec_first_dequeue_output_mutex = SDL_CreateMutex(); opaque->acodec_first_dequeue_output_cond = SDL_CreateCond(); ffp_packet_queue_init(&opaque->fake_pictq); ffp_packet_queue_start(&opaque->fake_pictq); if (!opaque->acodec_cond || !opaque->acodec_cond || !opaque->acodec_first_dequeue_output_mutex || !opaque->acodec_first_dequeue_output_cond) { ALOGE("%s:open_video_decoder: SDL_CreateCond() failed\n", __func__); goto fail; } ALOGI("AMediaFormat: %s, %dx%d\n", opaque->mcc.mime_type, opaque->avctx->width, opaque->avctx->height); opaque->input_aformat = SDL_AMediaFormatJava_createVideoFormat(env, opaque->mcc.mime_type, opaque->avctx->width, opaque->avctx->height); if (opaque->avctx->extradata && opaque->avctx->extradata_size > 0) { if (opaque->avctx->codec_id == AV_CODEC_ID_H264 && opaque->avctx->extradata[0] == 1) { #if AMC_USE_AVBITSTREAM_FILTER opaque->bsfc = av_bitstream_filter_init("h264_mp4toannexb"); if (!opaque->bsfc) { ALOGE("Cannot open the h264_mp4toannexb BSF!\n"); goto fail; } opaque->orig_extradata_size = opaque->avctx->extradata_size; opaque->orig_extradata = (uint8_t*) av_mallocz(opaque->avctx->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE); if (!opaque->orig_extradata) { goto fail; } memcpy(opaque->orig_extradata, opaque->avctx->extradata, opaque->avctx->extradata_size); for(int i = 0; i < opaque->avctx->extradata_size; i+=4) { ALOGE("csd-0[%d]: %02x%02x%02x%02x\n", opaque->avctx->extradata_size, (int)opaque->avctx->extradata[i+0], (int)opaque->avctx->extradata[i+1], (int)opaque->avctx->extradata[i+2], (int)opaque->avctx->extradata[i+3]); } SDL_AMediaFormat_setBuffer(opaque->input_aformat, "csd-0", opaque->avctx->extradata, opaque->avctx->extradata_size); #else size_t sps_pps_size = 0; size_t convert_size = opaque->avctx->extradata_size + 20; uint8_t *convert_buffer = (uint8_t *)calloc(1, convert_size); if (!convert_buffer) { ALOGE("%s:sps_pps_buffer: alloc failed\n", __func__); goto fail; } if (0 != convert_sps_pps(opaque->avctx->extradata, opaque->avctx->extradata_size, convert_buffer, convert_size, &sps_pps_size, &opaque->nal_size)) { ALOGE("%s:convert_sps_pps: failed\n", __func__); goto fail; } SDL_AMediaFormat_setBuffer(opaque->input_aformat, "csd-0", convert_buffer, sps_pps_size); for(int i = 0; i < sps_pps_size; i+=4) { ALOGE("csd-0[%d]: %02x%02x%02x%02x\n", (int)sps_pps_size, (int)convert_buffer[i+0], (int)convert_buffer[i+1], (int)convert_buffer[i+2], (int)convert_buffer[i+3]); } free(convert_buffer); #endif } else { // Codec specific data // SDL_AMediaFormat_setBuffer(opaque->aformat, "csd-0", opaque->avctx->extradata, opaque->avctx->extradata_size); ALOGE("csd-0: naked\n"); } } else { ALOGE("no buffer(%d)\n", opaque->avctx->extradata_size); } ret = reconfigure_codec_l(env, node); if (ret != 0) goto fail; ffp_set_video_codec_info(ffp, MEDIACODEC_MODULE_NAME, opaque->mcc.codec_name); opaque->off_buf_out = 0; if (opaque->n_buf_out) { int i; opaque->amc_buf_out = calloc(opaque->n_buf_out, sizeof(*opaque->amc_buf_out)); assert(opaque->amc_buf_out != NULL); for (i = 0; i < opaque->n_buf_out; i++) opaque->amc_buf_out[i].pts = AV_NOPTS_VALUE; } return node; fail: ffpipenode_free_p(&node); return NULL; }
static int aout_open_audio(SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained) { SDLTRACE("%s\n", __func__); assert(desired); SDLTRACE("aout_open_audio()\n"); SDL_Aout_Opaque *opaque = aout->opaque; SLEngineItf slEngine = opaque->slEngine; SLDataFormat_PCM *format_pcm = &opaque->format_pcm; int ret = 0; opaque->spec = *desired; // config audio src SLDataLocator_AndroidSimpleBufferQueue loc_bufq = { SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, OPENSLES_BUFFERS }; int native_sample_rate = audiotrack_get_native_output_sample_rate(NULL); ALOGI("OpenSL-ES: native sample rate %d Hz\n", native_sample_rate); CHECK_COND_ERROR((desired->format == AUDIO_S16SYS), "%s: not AUDIO_S16SYS", __func__); CHECK_COND_ERROR((desired->channels == 2 || desired->channels == 1), "%s: not 1,2 channel", __func__); CHECK_COND_ERROR((desired->freq >= 8000 && desired->freq <= 48000), "%s: unsupport freq %d Hz", __func__, desired->freq); if (SDL_Android_GetApiLevel() < IJK_API_21_LOLLIPOP && native_sample_rate > 0 && desired->freq < native_sample_rate) { // Don't try to play back a sample rate higher than the native one, // since OpenSL ES will try to use the fast path, which AudioFlinger // will reject (fast path can't do resampling), and will end up with // too small buffers for the resampling. See http://b.android.com/59453 // for details. This bug is still present in 4.4. If it is fixed later // this workaround could be made conditional. // // by VLC/android_opensles.c ALOGW("OpenSL-ES: force resample %lu to native sample rate %d\n", (unsigned long) format_pcm->samplesPerSec / 1000, (int) native_sample_rate); format_pcm->samplesPerSec = native_sample_rate * 1000; } format_pcm->formatType = SL_DATAFORMAT_PCM; format_pcm->numChannels = desired->channels; format_pcm->samplesPerSec = desired->freq * 1000; // milli Hz // format_pcm->numChannels = 2; // format_pcm->samplesPerSec = SL_SAMPLINGRATE_44_1; format_pcm->bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16; format_pcm->containerSize = SL_PCMSAMPLEFORMAT_FIXED_16; switch (desired->channels) { case 2: format_pcm->channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT; break; case 1: format_pcm->channelMask = SL_SPEAKER_FRONT_CENTER; break; default: ALOGE("%s, invalid channel %d", __func__, desired->channels); goto fail; } format_pcm->endianness = SL_BYTEORDER_LITTLEENDIAN; SLDataSource audio_source = {&loc_bufq, format_pcm}; // config audio sink SLDataLocator_OutputMix loc_outmix = { SL_DATALOCATOR_OUTPUTMIX, opaque->slOutputMixObject }; SLDataSink audio_sink = {&loc_outmix, NULL}; SLObjectItf slPlayerObject = NULL; const SLInterfaceID ids2[] = { SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_VOLUME, SL_IID_PLAY }; static const SLboolean req2[] = { SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE }; ret = (*slEngine)->CreateAudioPlayer(slEngine, &slPlayerObject, &audio_source, &audio_sink, sizeof(ids2) / sizeof(*ids2), ids2, req2); CHECK_OPENSL_ERROR(ret, "%s: slEngine->CreateAudioPlayer() failed", __func__); opaque->slPlayerObject = slPlayerObject; ret = (*slPlayerObject)->Realize(slPlayerObject, SL_BOOLEAN_FALSE); CHECK_OPENSL_ERROR(ret, "%s: slPlayerObject->Realize() failed", __func__); ret = (*slPlayerObject)->GetInterface(slPlayerObject, SL_IID_PLAY, &opaque->slPlayItf); CHECK_OPENSL_ERROR(ret, "%s: slPlayerObject->GetInterface(SL_IID_PLAY) failed", __func__); ret = (*slPlayerObject)->GetInterface(slPlayerObject, SL_IID_VOLUME, &opaque->slVolumeItf); CHECK_OPENSL_ERROR(ret, "%s: slPlayerObject->GetInterface(SL_IID_VOLUME) failed", __func__); ret = (*slPlayerObject)->GetInterface(slPlayerObject, SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &opaque->slBufferQueueItf); CHECK_OPENSL_ERROR(ret, "%s: slPlayerObject->GetInterface(SL_IID_ANDROIDSIMPLEBUFFERQUEUE) failed", __func__); ret = (*opaque->slBufferQueueItf)->RegisterCallback(opaque->slBufferQueueItf, aout_opensles_callback, (void*)aout); CHECK_OPENSL_ERROR(ret, "%s: slBufferQueueItf->RegisterCallback() failed", __func__); // set the player's state to playing // ret = (*opaque->slPlayItf)->SetPlayState(opaque->slPlayItf, SL_PLAYSTATE_PLAYING); // CHECK_OPENSL_ERROR(ret, "%s: slBufferQueueItf->slPlayItf() failed", __func__); opaque->bytes_per_frame = format_pcm->numChannels * format_pcm->bitsPerSample / 8; opaque->milli_per_buffer = OPENSLES_BUFLEN; opaque->frames_per_buffer = opaque->milli_per_buffer * format_pcm->samplesPerSec / 1000000; // samplesPerSec is in milli opaque->bytes_per_buffer = opaque->bytes_per_frame * opaque->frames_per_buffer; opaque->buffer_capacity = OPENSLES_BUFFERS * opaque->bytes_per_buffer; ALOGI("OpenSL-ES: bytes_per_frame = %d bytes\n", (int)opaque->bytes_per_frame); ALOGI("OpenSL-ES: milli_per_buffer = %d ms\n", (int)opaque->milli_per_buffer); ALOGI("OpenSL-ES: frame_per_buffer = %d frames\n", (int)opaque->frames_per_buffer); ALOGI("OpenSL-ES: bytes_per_buffer = %d bytes\n", (int)opaque->bytes_per_buffer); ALOGI("OpenSL-ES: buffer_capacity = %d bytes\n", (int)opaque->buffer_capacity); opaque->buffer = malloc(opaque->buffer_capacity); CHECK_COND_ERROR(opaque->buffer, "%s: failed to alloc buffer %d\n", __func__, (int)opaque->buffer_capacity); // (*opaque->slPlayItf)->SetPositionUpdatePeriod(opaque->slPlayItf, 1000); // enqueue empty buffer to start play memset(opaque->buffer, 0, opaque->buffer_capacity); for(int i = 0; i < OPENSLES_BUFFERS; ++i) { ret = (*opaque->slBufferQueueItf)->Enqueue(opaque->slBufferQueueItf, opaque->buffer + i * opaque->bytes_per_buffer, opaque->bytes_per_buffer); CHECK_OPENSL_ERROR(ret, "%s: slBufferQueueItf->Enqueue(000...) failed", __func__); } opaque->pause_on = 1; opaque->abort_request = 0; opaque->audio_tid = SDL_CreateThreadEx(&opaque->_audio_tid, aout_thread, aout, "ff_aout_opensles"); CHECK_COND_ERROR(opaque->audio_tid, "%s: failed to SDL_CreateThreadEx", __func__); if (obtained) { *obtained = *desired; obtained->size = opaque->buffer_capacity; obtained->freq = format_pcm->samplesPerSec / 1000; } return opaque->buffer_capacity; fail: aout_close_audio(aout); return -1; }
IJKFF_Pipenode *ffpipenode_create_video_decoder_from_android_mediacodec(FFPlayer *ffp, IJKFF_Pipeline *pipeline, SDL_Vout *vout) { ALOGD("ffpipenode_create_video_decoder_from_android_mediacodec()\n"); if (SDL_Android_GetApiLevel() < IJK_API_16_JELLY_BEAN) return NULL; if (!ffp || !ffp->is) return NULL; IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque)); if (!node) return node; VideoState *is = ffp->is; IJKFF_Pipenode_Opaque *opaque = node->opaque; JNIEnv *env = NULL; int ret = 0; int rotate_degrees = 0; jobject jsurface = NULL; node->func_destroy = func_destroy; node->func_run_sync = func_run_sync; node->func_flush = func_flush; opaque->pipeline = pipeline; opaque->ffp = ffp; opaque->decoder = &is->viddec; opaque->weak_vout = vout; opaque->avctx = opaque->decoder->avctx; switch (opaque->avctx->codec_id) { case AV_CODEC_ID_H264: if (!ffp->mediacodec_avc && !ffp->mediacodec_all_videos) { ALOGE("%s: MediaCodec: AVC/H264 is disabled. codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } switch (opaque->avctx->profile) { case FF_PROFILE_H264_BASELINE: ALOGI("%s: MediaCodec: H264_BASELINE: enabled\n", __func__); break; case FF_PROFILE_H264_CONSTRAINED_BASELINE: ALOGI("%s: MediaCodec: H264_CONSTRAINED_BASELINE: enabled\n", __func__); break; case FF_PROFILE_H264_MAIN: ALOGI("%s: MediaCodec: H264_MAIN: enabled\n", __func__); break; case FF_PROFILE_H264_EXTENDED: ALOGI("%s: MediaCodec: H264_EXTENDED: enabled\n", __func__); break; case FF_PROFILE_H264_HIGH: ALOGI("%s: MediaCodec: H264_HIGH: enabled\n", __func__); break; case FF_PROFILE_H264_HIGH_10: ALOGW("%s: MediaCodec: H264_HIGH_10: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_10_INTRA: ALOGW("%s: MediaCodec: H264_HIGH_10_INTRA: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_422: ALOGW("%s: MediaCodec: H264_HIGH_10_422: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_422_INTRA: ALOGW("%s: MediaCodec: H264_HIGH_10_INTRA: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_444: ALOGW("%s: MediaCodec: H264_HIGH_10_444: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_444_PREDICTIVE: ALOGW("%s: MediaCodec: H264_HIGH_444_PREDICTIVE: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_444_INTRA: ALOGW("%s: MediaCodec: H264_HIGH_444_INTRA: disabled\n", __func__); goto fail; case FF_PROFILE_H264_CAVLC_444: ALOGW("%s: MediaCodec: H264_CAVLC_444: disabled\n", __func__); goto fail; default: ALOGW("%s: MediaCodec: (%d) unknown profile: disabled\n", __func__, opaque->avctx->profile); goto fail; } strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_AVC); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; case AV_CODEC_ID_HEVC: if (!ffp->mediacodec_hevc && !ffp->mediacodec_all_videos) { ALOGE("%s: MediaCodec/HEVC is disabled. codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_HEVC); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; case AV_CODEC_ID_MPEG2VIDEO: if (!ffp->mediacodec_mpeg2 && !ffp->mediacodec_all_videos) { ALOGE("%s: MediaCodec/MPEG2VIDEO is disabled. codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_MPEG2VIDEO); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; default: ALOGE("%s:create: not H264 or H265/HEVC, codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) { ALOGE("%s:create: SetupThreadEnv failed\n", __func__); goto fail; } opaque->acodec_mutex = SDL_CreateMutex(); opaque->acodec_cond = SDL_CreateCond(); opaque->acodec_first_dequeue_output_mutex = SDL_CreateMutex(); opaque->acodec_first_dequeue_output_cond = SDL_CreateCond(); opaque->any_input_mutex = SDL_CreateMutex(); opaque->any_input_cond = SDL_CreateCond(); if (!opaque->acodec_cond || !opaque->acodec_cond || !opaque->acodec_first_dequeue_output_mutex || !opaque->acodec_first_dequeue_output_cond) { ALOGE("%s:open_video_decoder: SDL_CreateCond() failed\n", __func__); goto fail; } ALOGI("AMediaFormat: %s, %dx%d\n", opaque->mcc.mime_type, opaque->avctx->width, opaque->avctx->height); opaque->input_aformat = SDL_AMediaFormatJava_createVideoFormat(env, opaque->mcc.mime_type, opaque->avctx->width, opaque->avctx->height); if (opaque->avctx->extradata && opaque->avctx->extradata_size > 0) { if ((opaque->avctx->codec_id == AV_CODEC_ID_H264 || opaque->avctx->codec_id == AV_CODEC_ID_HEVC) && opaque->avctx->extradata[0] == 1) { #if AMC_USE_AVBITSTREAM_FILTER if (opaque->avctx->codec_id == AV_CODEC_ID_H264) { opaque->bsfc = av_bitstream_filter_init("h264_mp4toannexb"); if (!opaque->bsfc) { ALOGE("Cannot open the h264_mp4toannexb BSF!\n"); goto fail; } } else { opaque->bsfc = av_bitstream_filter_init("hevc_mp4toannexb"); if (!opaque->bsfc) { ALOGE("Cannot open the hevc_mp4toannexb BSF!\n"); goto fail; } } opaque->orig_extradata_size = opaque->avctx->extradata_size; opaque->orig_extradata = (uint8_t*) av_mallocz(opaque->avctx->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE); if (!opaque->orig_extradata) { goto fail; } memcpy(opaque->orig_extradata, opaque->avctx->extradata, opaque->avctx->extradata_size); for(int i = 0; i < opaque->avctx->extradata_size; i+=4) { ALOGE("csd-0[%d]: %02x%02x%02x%02x\n", opaque->avctx->extradata_size, (int)opaque->avctx->extradata[i+0], (int)opaque->avctx->extradata[i+1], (int)opaque->avctx->extradata[i+2], (int)opaque->avctx->extradata[i+3]); } SDL_AMediaFormat_setBuffer(opaque->input_aformat, "csd-0", opaque->avctx->extradata, opaque->avctx->extradata_size); #else size_t sps_pps_size = 0; size_t convert_size = opaque->avctx->extradata_size + 20; uint8_t *convert_buffer = (uint8_t *)calloc(1, convert_size); if (!convert_buffer) { ALOGE("%s:sps_pps_buffer: alloc failed\n", __func__); goto fail; } if (opaque->avctx->codec_id == AV_CODEC_ID_H264) { if (0 != convert_sps_pps(opaque->avctx->extradata, opaque->avctx->extradata_size, convert_buffer, convert_size, &sps_pps_size, &opaque->nal_size)) { ALOGE("%s:convert_sps_pps: failed\n", __func__); goto fail; } } else { if (0 != convert_hevc_nal_units(opaque->avctx->extradata, opaque->avctx->extradata_size, convert_buffer, convert_size, &sps_pps_size, &opaque->nal_size)) { ALOGE("%s:convert_hevc_nal_units: failed\n", __func__); goto fail; } } SDL_AMediaFormat_setBuffer(opaque->input_aformat, "csd-0", convert_buffer, sps_pps_size); for(int i = 0; i < sps_pps_size; i+=4) { ALOGE("csd-0[%d]: %02x%02x%02x%02x\n", (int)sps_pps_size, (int)convert_buffer[i+0], (int)convert_buffer[i+1], (int)convert_buffer[i+2], (int)convert_buffer[i+3]); } free(convert_buffer); #endif } else { // Codec specific data // SDL_AMediaFormat_setBuffer(opaque->aformat, "csd-0", opaque->avctx->extradata, opaque->avctx->extradata_size); ALOGE("csd-0: naked\n"); } } else { ALOGE("no buffer(%d)\n", opaque->avctx->extradata_size); } rotate_degrees = ffp_get_video_rotate_degrees(ffp); if (ffp->mediacodec_auto_rotate && rotate_degrees != 0 && SDL_Android_GetApiLevel() >= IJK_API_21_LOLLIPOP) { ALOGI("amc: rotate in decoder: %d\n", rotate_degrees); opaque->frame_rotate_degrees = rotate_degrees; SDL_AMediaFormat_setInt32(opaque->input_aformat, "rotation-degrees", rotate_degrees); ffp_notify_msg2(ffp, FFP_MSG_VIDEO_ROTATION_CHANGED, 0); } else { ALOGI("amc: rotate notify: %d\n", rotate_degrees); ffp_notify_msg2(ffp, FFP_MSG_VIDEO_ROTATION_CHANGED, rotate_degrees); } if (!ffpipeline_select_mediacodec_l(pipeline, &opaque->mcc) || !opaque->mcc.codec_name[0]) { ALOGE("amc: no suitable codec\n"); goto fail; } jsurface = ffpipeline_get_surface_as_global_ref(env, pipeline); ret = reconfigure_codec_l(env, node, jsurface); J4A_DeleteGlobalRef__p(env, &jsurface); if (ret != 0) goto fail; ffp_set_video_codec_info(ffp, MEDIACODEC_MODULE_NAME, opaque->mcc.codec_name); opaque->off_buf_out = 0; if (opaque->n_buf_out) { int i; opaque->amc_buf_out = calloc(opaque->n_buf_out, sizeof(*opaque->amc_buf_out)); assert(opaque->amc_buf_out != NULL); for (i = 0; i < opaque->n_buf_out; i++) opaque->amc_buf_out[i].pts = AV_NOPTS_VALUE; } SDL_SpeedSamplerReset(&opaque->sampler); ffp->stat.vdec_type = FFP_PROPV_DECODER_MEDIACODEC; return node; fail: ffpipenode_free_p(&node); return NULL; }
int SDL_AMediaCodecJava__loadClass(JNIEnv *env) { jint sdk_int = SDL_Android_GetApiLevel(); ALOGI("MediaCodec: API-%d\n", sdk_int); if (sdk_int < IJK_API_16_JELLY_BEAN) { return 0; } //-------------------- IJK_FIND_JAVA_CLASS( env, g_clazz.clazz, "android/media/MediaCodec"); IJK_FIND_JAVA_STATIC_METHOD(env, g_clazz.jmid_createByCodecName, g_clazz.clazz, "createByCodecName", "(Ljava/lang/String;)Landroid/media/MediaCodec;"); IJK_FIND_JAVA_STATIC_METHOD(env, g_clazz.jmid_createDecoderByType, g_clazz.clazz, "createDecoderByType", "(Ljava/lang/String;)Landroid/media/MediaCodec;"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_configure, g_clazz.clazz, "configure", "(Landroid/media/MediaFormat;Landroid/view/Surface;Landroid/media/MediaCrypto;I)V"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_dequeueInputBuffer, g_clazz.clazz, "dequeueInputBuffer", "(J)I"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_dequeueOutputBuffer, g_clazz.clazz, "dequeueOutputBuffer", "(Landroid/media/MediaCodec$BufferInfo;J)I"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_flush, g_clazz.clazz, "flush", "()V"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_getInputBuffers, g_clazz.clazz, "getInputBuffers", "()[Ljava/nio/ByteBuffer;"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_getOutputBuffers, g_clazz.clazz, "getOutputBuffers", "()[Ljava/nio/ByteBuffer;"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_getOutputFormat, g_clazz.clazz, "getOutputFormat", "()Landroid/media/MediaFormat;"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_queueInputBuffer, g_clazz.clazz, "queueInputBuffer", "(IIIJI)V"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_release, g_clazz.clazz, "release", "()V"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_releaseOutputBuffer, g_clazz.clazz, "releaseOutputBuffer", "(IZ)V"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_start, g_clazz.clazz, "start", "()V"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_stop, g_clazz.clazz, "stop", "()V"); /*- if (sdk_int >= IJK_API_18_JELLY_BEAN_MR2) { IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_getCodecInfo, g_clazz.clazz, "getCodecInfo", "(I)Landroid/media/MediaCodecInfo;"); IJK_FIND_JAVA_METHOD(env, g_clazz.jmid_getName, g_clazz.clazz, "getName", "()Ljava/lang/String;"); } */ //-------------------- IJK_FIND_JAVA_CLASS( env, g_clazz_BufferInfo.clazz, "android/media/MediaCodec$BufferInfo"); IJK_FIND_JAVA_METHOD(env, g_clazz_BufferInfo.jmid__ctor, g_clazz_BufferInfo.clazz, "<init>" , "()V"); IJK_FIND_JAVA_FIELD(env, g_clazz_BufferInfo.jfid_flags, g_clazz_BufferInfo.clazz, "flags", "I"); IJK_FIND_JAVA_FIELD(env, g_clazz_BufferInfo.jfid_offset, g_clazz_BufferInfo.clazz, "offset", "I"); IJK_FIND_JAVA_FIELD(env, g_clazz_BufferInfo.jfid_presentationTimeUs, g_clazz_BufferInfo.clazz, "presentationTimeUs", "J"); IJK_FIND_JAVA_FIELD(env, g_clazz_BufferInfo.jfid_size, g_clazz_BufferInfo.clazz, "size", "I"); SDLTRACE("android.media.MediaCodec$BufferInfo class loaded"); SDLTRACE("android.media.MediaCodec class loaded"); return 0; }
SDL_Android_AudioTrack *SDL_Android_AudioTrack_new_from_spec(JNIEnv *env, SDL_Android_AudioTrack_Spec *spec) { assert(spec); jint sdk_int = SDL_Android_GetApiLevel(); switch (spec->channel_config) { case CHANNEL_OUT_MONO: ALOGI("SDL_Android_AudioTrack: %s", "CHANNEL_OUT_MONO"); break; case CHANNEL_OUT_STEREO: ALOGI("SDL_Android_AudioTrack: %s", "CHANNEL_OUT_STEREO"); break; default: ALOGE("SDL_Android_AudioTrack_new_from_spec: invalid channel %d", spec->channel_config); return NULL; } switch (spec->audio_format) { case ENCODING_PCM_16BIT: ALOGI("SDL_Android_AudioTrack: %s", "ENCODING_PCM_16BIT"); break; case ENCODING_PCM_8BIT: ALOGI("SDL_Android_AudioTrack: %s", "ENCODING_PCM_8BIT"); break; case ENCODING_PCM_FLOAT: ALOGI("SDL_Android_AudioTrack: %s", "ENCODING_PCM_FLOAT"); if (sdk_int < IJK_API_21_LOLLIPOP) { ALOGI("SDL_Android_AudioTrack: %s need API 21 or above", "ENCODING_PCM_FLOAT"); return NULL; } break; default: ALOGE("SDL_Android_AudioTrack_new_from_spec: invalid format %d", spec->audio_format); return NULL; } SDL_Android_AudioTrack *atrack = (SDL_Android_AudioTrack*) mallocz(sizeof(SDL_Android_AudioTrack)); if (!atrack) { (*env)->CallVoidMethod(env, atrack->thiz, g_clazz.release); return NULL; } atrack->spec = *spec; if (atrack->spec.sample_rate_in_hz < 4000 || atrack->spec.sample_rate_in_hz > 48000) { int native_sample_rate_in_hz = audiotrack_get_native_output_sample_rate(env); if (native_sample_rate_in_hz > 0) { ALOGE("SDL_Android_AudioTrack_new: cast sample rate %d to %d:", atrack->spec.sample_rate_in_hz, native_sample_rate_in_hz); atrack->spec.sample_rate_in_hz = native_sample_rate_in_hz; } } int min_buffer_size = audiotrack_get_min_buffer_size(env, &atrack->spec); if (min_buffer_size <= 0) { ALOGE("SDL_Android_AudioTrack_new: SDL_Android_AudioTrack_get_min_buffer_size: return %d:", min_buffer_size); free(atrack); return NULL; } jobject thiz = (*env)->NewObject(env, g_clazz.clazz, g_clazz.constructor, (int) atrack->spec.stream_type, (int) atrack->spec.sample_rate_in_hz, (int) atrack->spec.channel_config, (int) atrack->spec.audio_format, (int) min_buffer_size, (int) atrack->spec.mode); if (!thiz || (*env)->ExceptionCheck(env)) { ALOGE("SDL_Android_AudioTrack_new: NewObject: Exception:"); if ((*env)->ExceptionCheck(env)) { (*env)->ExceptionDescribe(env); (*env)->ExceptionClear(env); } free(atrack); return NULL; } atrack->min_buffer_size = min_buffer_size; atrack->spec.buffer_size_in_bytes = min_buffer_size; atrack->max_volume = audiotrack_get_max_volume(env); atrack->min_volume = audiotrack_get_min_volume(env); atrack->thiz = (*env)->NewGlobalRef(env, thiz); (*env)->DeleteLocalRef(env, thiz); // extra init float init_volume = 1.0f; init_volume = IJKMIN(init_volume, atrack->max_volume); init_volume = IJKMAX(init_volume, atrack->min_volume); ALOGI("SDL_Android_AudioTrack_new: init volume as %f/(%f,%f)", init_volume, atrack->min_volume, atrack->max_volume); audiotrack_set_stereo_volume(env, atrack, init_volume, init_volume); return atrack; }