static int64_t jni_set_media_data_source(JNIEnv* env, jobject thiz, jobject media_data_source) { int64_t nativeMediaDataSource = 0; pthread_mutex_lock(&g_clazz.mutex); jobject old = (jobject) (intptr_t) J4AC_IjkMediaPlayer__mNativeMediaDataSource__get__catchAll(env, thiz); if (old) { J4AC_IMediaDataSource__close__catchAll(env, old); J4A_DeleteGlobalRef__p(env, &old); J4AC_IjkMediaPlayer__mNativeMediaDataSource__set__catchAll(env, thiz, 0); } if (media_data_source) { jobject global_media_data_source = (*env)->NewGlobalRef(env, media_data_source); if (J4A_ExceptionCheck__catchAll(env) || !global_media_data_source) goto fail; nativeMediaDataSource = (int64_t) (intptr_t) global_media_data_source; J4AC_IjkMediaPlayer__mNativeMediaDataSource__set__catchAll(env, thiz, (jlong) nativeMediaDataSource); } fail: pthread_mutex_unlock(&g_clazz.mutex); return nativeMediaDataSource; }
static int64_t jni_set_ijkio_http(JNIEnv* env, jobject thiz, jobject ijk_io) { int64_t nativeIjkIOHttp = 0; pthread_mutex_lock(&g_clazz.mutex); jobject old = (jobject) (intptr_t) J4AC_IjkMediaPlayer__mNativeIjkIOHttp__get__catchAll(env, thiz); if (old) { J4AC_IIjkIOHttp__close__catchAll(env, old); J4A_DeleteGlobalRef__p(env, &old); J4AC_IjkMediaPlayer__mNativeIjkIOHttp__set__catchAll(env, thiz, 0); } if (ijk_io) { jobject global_ijkio_http = (*env)->NewGlobalRef(env, ijk_io); if (J4A_ExceptionCheck__catchAll(env) || !global_ijkio_http) goto fail; nativeIjkIOHttp = (int64_t) (intptr_t) global_ijkio_http; J4AC_IjkMediaPlayer__mNativeIjkIOHttp__set__catchAll(env, thiz, (jlong) nativeIjkIOHttp); } fail: pthread_mutex_unlock(&g_clazz.mutex); return nativeIjkIOHttp; }
IJKFF_Pipenode *ffpipenode_create_video_decoder_from_android_mediacodec(FFPlayer *ffp, IJKFF_Pipeline *pipeline, SDL_Vout *vout) { ALOGD("ffpipenode_create_video_decoder_from_android_mediacodec()\n"); if (SDL_Android_GetApiLevel() < IJK_API_16_JELLY_BEAN) return NULL; if (!ffp || !ffp->is) return NULL; IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque)); if (!node) return node; VideoState *is = ffp->is; IJKFF_Pipenode_Opaque *opaque = node->opaque; JNIEnv *env = NULL; int ret = 0; int rotate_degrees = 0; jobject jsurface = NULL; node->func_destroy = func_destroy; node->func_run_sync = func_run_sync; node->func_flush = func_flush; opaque->pipeline = pipeline; opaque->ffp = ffp; opaque->decoder = &is->viddec; opaque->weak_vout = vout; opaque->avctx = opaque->decoder->avctx; switch (opaque->avctx->codec_id) { case AV_CODEC_ID_H264: if (!ffp->mediacodec_avc && !ffp->mediacodec_all_videos) { ALOGE("%s: MediaCodec: AVC/H264 is disabled. codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } switch (opaque->avctx->profile) { case FF_PROFILE_H264_BASELINE: ALOGI("%s: MediaCodec: H264_BASELINE: enabled\n", __func__); break; case FF_PROFILE_H264_CONSTRAINED_BASELINE: ALOGI("%s: MediaCodec: H264_CONSTRAINED_BASELINE: enabled\n", __func__); break; case FF_PROFILE_H264_MAIN: ALOGI("%s: MediaCodec: H264_MAIN: enabled\n", __func__); break; case FF_PROFILE_H264_EXTENDED: ALOGI("%s: MediaCodec: H264_EXTENDED: enabled\n", __func__); break; case FF_PROFILE_H264_HIGH: ALOGI("%s: MediaCodec: H264_HIGH: enabled\n", __func__); break; case FF_PROFILE_H264_HIGH_10: ALOGW("%s: MediaCodec: H264_HIGH_10: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_10_INTRA: ALOGW("%s: MediaCodec: H264_HIGH_10_INTRA: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_422: ALOGW("%s: MediaCodec: H264_HIGH_10_422: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_422_INTRA: ALOGW("%s: MediaCodec: H264_HIGH_10_INTRA: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_444: ALOGW("%s: MediaCodec: H264_HIGH_10_444: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_444_PREDICTIVE: ALOGW("%s: MediaCodec: H264_HIGH_444_PREDICTIVE: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_444_INTRA: ALOGW("%s: MediaCodec: H264_HIGH_444_INTRA: disabled\n", __func__); goto fail; case FF_PROFILE_H264_CAVLC_444: ALOGW("%s: MediaCodec: H264_CAVLC_444: disabled\n", __func__); goto fail; default: ALOGW("%s: MediaCodec: (%d) unknown profile: disabled\n", __func__, opaque->avctx->profile); goto fail; } strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_AVC); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; case AV_CODEC_ID_HEVC: if (!ffp->mediacodec_hevc && !ffp->mediacodec_all_videos) { ALOGE("%s: MediaCodec/HEVC is disabled. codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_HEVC); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; case AV_CODEC_ID_MPEG2VIDEO: if (!ffp->mediacodec_mpeg2 && !ffp->mediacodec_all_videos) { ALOGE("%s: MediaCodec/MPEG2VIDEO is disabled. codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_MPEG2VIDEO); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; default: ALOGE("%s:create: not H264 or H265/HEVC, codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) { ALOGE("%s:create: SetupThreadEnv failed\n", __func__); goto fail; } opaque->acodec_mutex = SDL_CreateMutex(); opaque->acodec_cond = SDL_CreateCond(); opaque->acodec_first_dequeue_output_mutex = SDL_CreateMutex(); opaque->acodec_first_dequeue_output_cond = SDL_CreateCond(); opaque->any_input_mutex = SDL_CreateMutex(); opaque->any_input_cond = SDL_CreateCond(); if (!opaque->acodec_cond || !opaque->acodec_cond || !opaque->acodec_first_dequeue_output_mutex || !opaque->acodec_first_dequeue_output_cond) { ALOGE("%s:open_video_decoder: SDL_CreateCond() failed\n", __func__); goto fail; } ALOGI("AMediaFormat: %s, %dx%d\n", opaque->mcc.mime_type, opaque->avctx->width, opaque->avctx->height); opaque->input_aformat = SDL_AMediaFormatJava_createVideoFormat(env, opaque->mcc.mime_type, opaque->avctx->width, opaque->avctx->height); if (opaque->avctx->extradata && opaque->avctx->extradata_size > 0) { if ((opaque->avctx->codec_id == AV_CODEC_ID_H264 || opaque->avctx->codec_id == AV_CODEC_ID_HEVC) && opaque->avctx->extradata[0] == 1) { #if AMC_USE_AVBITSTREAM_FILTER if (opaque->avctx->codec_id == AV_CODEC_ID_H264) { opaque->bsfc = av_bitstream_filter_init("h264_mp4toannexb"); if (!opaque->bsfc) { ALOGE("Cannot open the h264_mp4toannexb BSF!\n"); goto fail; } } else { opaque->bsfc = av_bitstream_filter_init("hevc_mp4toannexb"); if (!opaque->bsfc) { ALOGE("Cannot open the hevc_mp4toannexb BSF!\n"); goto fail; } } opaque->orig_extradata_size = opaque->avctx->extradata_size; opaque->orig_extradata = (uint8_t*) av_mallocz(opaque->avctx->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE); if (!opaque->orig_extradata) { goto fail; } memcpy(opaque->orig_extradata, opaque->avctx->extradata, opaque->avctx->extradata_size); for(int i = 0; i < opaque->avctx->extradata_size; i+=4) { ALOGE("csd-0[%d]: %02x%02x%02x%02x\n", opaque->avctx->extradata_size, (int)opaque->avctx->extradata[i+0], (int)opaque->avctx->extradata[i+1], (int)opaque->avctx->extradata[i+2], (int)opaque->avctx->extradata[i+3]); } SDL_AMediaFormat_setBuffer(opaque->input_aformat, "csd-0", opaque->avctx->extradata, opaque->avctx->extradata_size); #else size_t sps_pps_size = 0; size_t convert_size = opaque->avctx->extradata_size + 20; uint8_t *convert_buffer = (uint8_t *)calloc(1, convert_size); if (!convert_buffer) { ALOGE("%s:sps_pps_buffer: alloc failed\n", __func__); goto fail; } if (opaque->avctx->codec_id == AV_CODEC_ID_H264) { if (0 != convert_sps_pps(opaque->avctx->extradata, opaque->avctx->extradata_size, convert_buffer, convert_size, &sps_pps_size, &opaque->nal_size)) { ALOGE("%s:convert_sps_pps: failed\n", __func__); goto fail; } } else { if (0 != convert_hevc_nal_units(opaque->avctx->extradata, opaque->avctx->extradata_size, convert_buffer, convert_size, &sps_pps_size, &opaque->nal_size)) { ALOGE("%s:convert_hevc_nal_units: failed\n", __func__); goto fail; } } SDL_AMediaFormat_setBuffer(opaque->input_aformat, "csd-0", convert_buffer, sps_pps_size); for(int i = 0; i < sps_pps_size; i+=4) { ALOGE("csd-0[%d]: %02x%02x%02x%02x\n", (int)sps_pps_size, (int)convert_buffer[i+0], (int)convert_buffer[i+1], (int)convert_buffer[i+2], (int)convert_buffer[i+3]); } free(convert_buffer); #endif } else { // Codec specific data // SDL_AMediaFormat_setBuffer(opaque->aformat, "csd-0", opaque->avctx->extradata, opaque->avctx->extradata_size); ALOGE("csd-0: naked\n"); } } else { ALOGE("no buffer(%d)\n", opaque->avctx->extradata_size); } rotate_degrees = ffp_get_video_rotate_degrees(ffp); if (ffp->mediacodec_auto_rotate && rotate_degrees != 0 && SDL_Android_GetApiLevel() >= IJK_API_21_LOLLIPOP) { ALOGI("amc: rotate in decoder: %d\n", rotate_degrees); opaque->frame_rotate_degrees = rotate_degrees; SDL_AMediaFormat_setInt32(opaque->input_aformat, "rotation-degrees", rotate_degrees); ffp_notify_msg2(ffp, FFP_MSG_VIDEO_ROTATION_CHANGED, 0); } else { ALOGI("amc: rotate notify: %d\n", rotate_degrees); ffp_notify_msg2(ffp, FFP_MSG_VIDEO_ROTATION_CHANGED, rotate_degrees); } if (!ffpipeline_select_mediacodec_l(pipeline, &opaque->mcc) || !opaque->mcc.codec_name[0]) { ALOGE("amc: no suitable codec\n"); goto fail; } jsurface = ffpipeline_get_surface_as_global_ref(env, pipeline); ret = reconfigure_codec_l(env, node, jsurface); J4A_DeleteGlobalRef__p(env, &jsurface); if (ret != 0) goto fail; ffp_set_video_codec_info(ffp, MEDIACODEC_MODULE_NAME, opaque->mcc.codec_name); opaque->off_buf_out = 0; if (opaque->n_buf_out) { int i; opaque->amc_buf_out = calloc(opaque->n_buf_out, sizeof(*opaque->amc_buf_out)); assert(opaque->amc_buf_out != NULL); for (i = 0; i < opaque->n_buf_out; i++) opaque->amc_buf_out[i].pts = AV_NOPTS_VALUE; } SDL_SpeedSamplerReset(&opaque->sampler); ffp->stat.vdec_type = FFP_PROPV_DECODER_MEDIACODEC; return node; fail: ffpipenode_free_p(&node); return NULL; }
static int feed_input_buffer(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *enqueue_count) { IJKFF_Pipenode_Opaque *opaque = node->opaque; FFPlayer *ffp = opaque->ffp; IJKFF_Pipeline *pipeline = opaque->pipeline; VideoState *is = ffp->is; Decoder *d = &is->viddec; PacketQueue *q = d->queue; sdl_amedia_status_t amc_ret = 0; int ret = 0; ssize_t input_buffer_index = 0; ssize_t copy_size = 0; int64_t time_stamp = 0; uint32_t queue_flags = 0; if (enqueue_count) *enqueue_count = 0; if (d->queue->abort_request) { ret = 0; goto fail; } opaque->avctx = opaque->decoder->avctx; if (!d->packet_pending || d->queue->serial != d->pkt_serial) { #if AMC_USE_AVBITSTREAM_FILTER #else H264ConvertState convert_state = {0, 0}; #endif AVPacket pkt; do { if (d->queue->nb_packets == 0) SDL_CondSignal(d->empty_queue_cond); if (ffp_packet_queue_get_or_buffering(ffp, d->queue, &pkt, &d->pkt_serial, &d->finished) < 0) { ret = -1; goto fail; } if (ffp_is_flush_packet(&pkt) || opaque->acodec_flush_request) { // request flush before lock, or never get mutex opaque->acodec_flush_request = true; SDL_LockMutex(opaque->acodec_mutex); if (SDL_AMediaCodec_isStarted(opaque->acodec)) { if (opaque->input_packet_count > 0) { // flush empty queue cause error on OMX.SEC.AVC.Decoder (Nexus S) SDL_VoutAndroid_invalidateAllBuffers(opaque->weak_vout); SDL_AMediaCodec_flush(opaque->acodec); opaque->input_packet_count = 0; } // If codec is configured in synchronous mode, codec will resume automatically // SDL_AMediaCodec_start(opaque->acodec); } opaque->acodec_flush_request = false; SDL_CondSignal(opaque->acodec_cond); SDL_UnlockMutex(opaque->acodec_mutex); d->finished = 0; d->next_pts = d->start_pts; d->next_pts_tb = d->start_pts_tb; } } while (ffp_is_flush_packet(&pkt) || d->queue->serial != d->pkt_serial); av_free_packet(&d->pkt); d->pkt_temp = d->pkt = pkt; d->packet_pending = 1; #if AMC_USE_AVBITSTREAM_FILTER // d->pkt_temp->data could be allocated by av_bitstream_filter_filter if (d->bfsc_ret > 0) { if (d->bfsc_data) av_freep(&d->bfsc_data); d->bfsc_ret = 0; } d->bfsc_ret = av_bitstream_filter_filter(opaque->bsfc, opaque->avctx, NULL, &d->pkt_temp.data, &d->pkt_temp.size, d->pkt.data, d->pkt.size, d->pkt.flags & AV_PKT_FLAG_KEY); if (d->bfsc_ret > 0) { d->bfsc_data = d->pkt_temp.data; } else if (d->bfsc_ret < 0) { ALOGE("%s: av_bitstream_filter_filter failed\n", __func__); ret = -1; goto fail; } if (d->pkt_temp.size == d->pkt.size + opaque->avctx->extradata_size) { d->pkt_temp.data += opaque->avctx->extradata_size; d->pkt_temp.size = d->pkt.size; } AMCTRACE("bsfc->filter(%d): %p[%d] -> %p[%d]", d->bfsc_ret, d->pkt.data, (int)d->pkt.size, d->pkt_temp.data, (int)d->pkt_temp.size); #else #if 0 AMCTRACE("raw [%d][%d] %02x%02x%02x%02x%02x%02x%02x%02x", (int)d->pkt_temp.size, (int)opaque->nal_size, d->pkt_temp.data[0], d->pkt_temp.data[1], d->pkt_temp.data[2], d->pkt_temp.data[3], d->pkt_temp.data[4], d->pkt_temp.data[5], d->pkt_temp.data[6], d->pkt_temp.data[7]); #endif if (opaque->avctx->codec_id == AV_CODEC_ID_H264 || opaque->avctx->codec_id == AV_CODEC_ID_HEVC) { convert_h264_to_annexb(d->pkt_temp.data, d->pkt_temp.size, opaque->nal_size, &convert_state); int64_t time_stamp = d->pkt_temp.pts; if (!time_stamp && d->pkt_temp.dts) time_stamp = d->pkt_temp.dts; if (time_stamp > 0) { time_stamp = av_rescale_q(time_stamp, is->video_st->time_base, AV_TIME_BASE_Q); } else { time_stamp = 0; } } #if 0 AMCTRACE("input[%d][%d][%lld,%lld (%d, %d) -> %lld] %02x%02x%02x%02x%02x%02x%02x%02x", (int)d->pkt_temp.size, (int)opaque->nal_size, (int64_t)d->pkt_temp.pts, (int64_t)d->pkt_temp.dts, (int)is->video_st->time_base.num, (int)is->video_st->time_base.den, (int64_t)time_stamp, d->pkt_temp.data[0], d->pkt_temp.data[1], d->pkt_temp.data[2], d->pkt_temp.data[3], d->pkt_temp.data[4], d->pkt_temp.data[5], d->pkt_temp.data[6], d->pkt_temp.data[7]); #endif #endif } if (d->pkt_temp.data) { // reconfigure surface if surface changed // NULL surface cause no display if (ffpipeline_is_surface_need_reconfigure_l(pipeline)) { jobject new_surface = NULL; // request reconfigure before lock, or never get mutex ffpipeline_lock_surface(pipeline); ffpipeline_set_surface_need_reconfigure_l(pipeline, false); new_surface = ffpipeline_get_surface_as_global_ref_l(env, pipeline); ffpipeline_unlock_surface(pipeline); if (opaque->jsurface == new_surface || (opaque->jsurface && new_surface && (*env)->IsSameObject(env, new_surface, opaque->jsurface))) { ALOGI("%s: same surface, reuse previous surface\n", __func__); J4A_DeleteGlobalRef__p(env, &new_surface); } else { opaque->acodec_reconfigure_request = true; SDL_LockMutex(opaque->acodec_mutex); ret = reconfigure_codec_l(env, node, new_surface); opaque->acodec_reconfigure_request = false; SDL_CondSignal(opaque->acodec_cond); SDL_UnlockMutex(opaque->acodec_mutex); J4A_DeleteGlobalRef__p(env, &new_surface); if (ret != 0) { ALOGE("%s: reconfigure_codec failed\n", __func__); ret = 0; goto fail; } SDL_LockMutex(opaque->acodec_first_dequeue_output_mutex); while (!q->abort_request && !opaque->acodec_reconfigure_request && !opaque->acodec_flush_request && opaque->acodec_first_dequeue_output_request) { SDL_CondWaitTimeout(opaque->acodec_first_dequeue_output_cond, opaque->acodec_first_dequeue_output_mutex, 1000); } SDL_UnlockMutex(opaque->acodec_first_dequeue_output_mutex); if (q->abort_request || opaque->acodec_reconfigure_request || opaque->acodec_flush_request) { ret = 0; goto fail; } } } #if 0 // no need to decode without surface if (!opaque->jsurface) { ret = amc_decode_picture_fake(node, 1000); goto fail; } #endif queue_flags = 0; input_buffer_index = SDL_AMediaCodec_dequeueInputBuffer(opaque->acodec, timeUs); if (input_buffer_index < 0) { if (SDL_AMediaCodec_isInputBuffersValid(opaque->acodec)) { // timeout ret = 0; goto fail; } else { // enqueue fake frame queue_flags |= AMEDIACODEC__BUFFER_FLAG_FAKE_FRAME; copy_size = d->pkt_temp.size; } } else { SDL_AMediaCodecFake_flushFakeFrames(opaque->acodec); copy_size = SDL_AMediaCodec_writeInputData(opaque->acodec, input_buffer_index, d->pkt_temp.data, d->pkt_temp.size); if (!copy_size) { ALOGE("%s: SDL_AMediaCodec_getInputBuffer failed\n", __func__); ret = -1; goto fail; } } time_stamp = d->pkt_temp.pts; if (!time_stamp && d->pkt_temp.dts) time_stamp = d->pkt_temp.dts; if (time_stamp > 0) { time_stamp = av_rescale_q(time_stamp, is->video_st->time_base, AV_TIME_BASE_Q); } else { time_stamp = 0; } // ALOGE("queueInputBuffer, %lld\n", time_stamp); amc_ret = SDL_AMediaCodec_queueInputBuffer(opaque->acodec, input_buffer_index, 0, copy_size, time_stamp, queue_flags); if (amc_ret != SDL_AMEDIA_OK) { ALOGE("%s: SDL_AMediaCodec_getInputBuffer failed\n", __func__); ret = -1; goto fail; } // ALOGE("%s: queue %d/%d", __func__, (int)copy_size, (int)input_buffer_size); opaque->input_packet_count++; if (enqueue_count) ++*enqueue_count; } if (copy_size < 0) { d->packet_pending = 0; } else { d->pkt_temp.dts = d->pkt_temp.pts = AV_NOPTS_VALUE; if (d->pkt_temp.data) { d->pkt_temp.data += copy_size; d->pkt_temp.size -= copy_size; if (d->pkt_temp.size <= 0) d->packet_pending = 0; } else { // FIXME: detect if decode finished // if (!got_frame) { d->packet_pending = 0; d->finished = d->pkt_serial; // } } } fail: return ret; }