static int ParseVideoExtra(decoder_t *p_dec, uint8_t *p_extra, int i_extra) { decoder_sys_t *p_sys = p_dec->p_sys; if (p_dec->fmt_in.i_codec == VLC_CODEC_H264 || p_dec->fmt_in.i_codec == VLC_CODEC_HEVC) { int buf_size = i_extra + 20; uint32_t size = i_extra; void *p_buf = malloc(buf_size); if (!p_buf) { msg_Warn(p_dec, "extra buffer allocation failed"); return VLC_EGENERIC; } if (p_dec->fmt_in.i_codec == VLC_CODEC_H264) { if (p_extra[0] == 1 && convert_sps_pps(p_dec, p_extra, i_extra, p_buf, buf_size, &size, &p_sys->u.video.i_nal_length_size) == VLC_SUCCESS) H264SetCSD(p_dec, p_buf, size, NULL); } else { if (convert_hevc_nal_units(p_dec, p_extra, i_extra, p_buf, buf_size, &size, &p_sys->u.video.i_nal_length_size) == VLC_SUCCESS) { struct csd csd; csd.p_buf = p_buf; csd.i_size = size; CSDDup(p_dec, &csd, 1); } } free(p_buf); } return VLC_SUCCESS; }
/* Parse the SPS/PPS Metadata to feed the decoder for avc1 */ static int crystal_insert_sps_pps( decoder_t *p_dec, uint8_t *p_buf, uint32_t i_buf_size) { decoder_sys_t *p_sys = p_dec->p_sys; int ret; p_sys->i_sps_pps_size = 0; p_sys->p_sps_pps_buf = malloc( p_dec->fmt_in.i_extra * 2 ); if( !p_sys->p_sps_pps_buf ) return VLC_ENOMEM; ret = convert_sps_pps( p_dec, p_buf, i_buf_size, p_sys->p_sps_pps_buf, p_dec->fmt_in.i_extra * 2, &p_sys->i_sps_pps_size, &p_sys->i_nal_size ); if( !ret ) return ret; free( p_sys->p_sps_pps_buf ); p_sys->p_sps_pps_buf = NULL; return ret; }
IJKFF_Pipenode *ffpipenode_create_video_decoder_from_android_mediacodec(FFPlayer *ffp, IJKFF_Pipeline *pipeline, SDL_Vout *vout) { ALOGD("ffpipenode_create_video_decoder_from_android_mediacodec()\n"); if (SDL_Android_GetApiLevel() < IJK_API_16_JELLY_BEAN) return NULL; if (!ffp || !ffp->is) return NULL; IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque)); if (!node) return node; VideoState *is = ffp->is; IJKFF_Pipenode_Opaque *opaque = node->opaque; JNIEnv *env = NULL; int ret = 0; node->func_destroy = func_destroy; node->func_run_sync = func_run_sync; node->func_flush = func_flush; opaque->pipeline = pipeline; opaque->ffp = ffp; opaque->decoder = &is->viddec; opaque->weak_vout = vout; opaque->avctx = opaque->decoder->avctx; switch (opaque->avctx->profile) { case FF_PROFILE_H264_HIGH_10: case FF_PROFILE_H264_HIGH_10_INTRA: case FF_PROFILE_H264_HIGH_422: case FF_PROFILE_H264_HIGH_422_INTRA: case FF_PROFILE_H264_HIGH_444_PREDICTIVE: case FF_PROFILE_H264_HIGH_444_INTRA: case FF_PROFILE_H264_CAVLC_444: goto fail; } switch (opaque->avctx->codec_id) { case AV_CODEC_ID_H264: strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_AVC); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; default: ALOGE("%s:create: not H264\n", __func__); goto fail; } if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) { ALOGE("%s:create: SetupThreadEnv failed\n", __func__); goto fail; } opaque->acodec_mutex = SDL_CreateMutex(); opaque->acodec_cond = SDL_CreateCond(); opaque->acodec_first_dequeue_output_mutex = SDL_CreateMutex(); opaque->acodec_first_dequeue_output_cond = SDL_CreateCond(); ffp_packet_queue_init(&opaque->fake_pictq); ffp_packet_queue_start(&opaque->fake_pictq); if (!opaque->acodec_cond || !opaque->acodec_cond || !opaque->acodec_first_dequeue_output_mutex || !opaque->acodec_first_dequeue_output_cond) { ALOGE("%s:open_video_decoder: SDL_CreateCond() failed\n", __func__); goto fail; } ALOGI("AMediaFormat: %s, %dx%d\n", opaque->mcc.mime_type, opaque->avctx->width, opaque->avctx->height); opaque->input_aformat = SDL_AMediaFormatJava_createVideoFormat(env, opaque->mcc.mime_type, opaque->avctx->width, opaque->avctx->height); if (opaque->avctx->extradata && opaque->avctx->extradata_size > 0) { if (opaque->avctx->codec_id == AV_CODEC_ID_H264 && opaque->avctx->extradata[0] == 1) { #if AMC_USE_AVBITSTREAM_FILTER opaque->bsfc = av_bitstream_filter_init("h264_mp4toannexb"); if (!opaque->bsfc) { ALOGE("Cannot open the h264_mp4toannexb BSF!\n"); goto fail; } opaque->orig_extradata_size = opaque->avctx->extradata_size; opaque->orig_extradata = (uint8_t*) av_mallocz(opaque->avctx->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE); if (!opaque->orig_extradata) { goto fail; } memcpy(opaque->orig_extradata, opaque->avctx->extradata, opaque->avctx->extradata_size); for(int i = 0; i < opaque->avctx->extradata_size; i+=4) { ALOGE("csd-0[%d]: %02x%02x%02x%02x\n", opaque->avctx->extradata_size, (int)opaque->avctx->extradata[i+0], (int)opaque->avctx->extradata[i+1], (int)opaque->avctx->extradata[i+2], (int)opaque->avctx->extradata[i+3]); } SDL_AMediaFormat_setBuffer(opaque->input_aformat, "csd-0", opaque->avctx->extradata, opaque->avctx->extradata_size); #else size_t sps_pps_size = 0; size_t convert_size = opaque->avctx->extradata_size + 20; uint8_t *convert_buffer = (uint8_t *)calloc(1, convert_size); if (!convert_buffer) { ALOGE("%s:sps_pps_buffer: alloc failed\n", __func__); goto fail; } if (0 != convert_sps_pps(opaque->avctx->extradata, opaque->avctx->extradata_size, convert_buffer, convert_size, &sps_pps_size, &opaque->nal_size)) { ALOGE("%s:convert_sps_pps: failed\n", __func__); goto fail; } SDL_AMediaFormat_setBuffer(opaque->input_aformat, "csd-0", convert_buffer, sps_pps_size); for(int i = 0; i < sps_pps_size; i+=4) { ALOGE("csd-0[%d]: %02x%02x%02x%02x\n", (int)sps_pps_size, (int)convert_buffer[i+0], (int)convert_buffer[i+1], (int)convert_buffer[i+2], (int)convert_buffer[i+3]); } free(convert_buffer); #endif } else { // Codec specific data // SDL_AMediaFormat_setBuffer(opaque->aformat, "csd-0", opaque->avctx->extradata, opaque->avctx->extradata_size); ALOGE("csd-0: naked\n"); } } else { ALOGE("no buffer(%d)\n", opaque->avctx->extradata_size); } ret = reconfigure_codec_l(env, node); if (ret != 0) goto fail; ffp_set_video_codec_info(ffp, MEDIACODEC_MODULE_NAME, opaque->mcc.codec_name); opaque->off_buf_out = 0; if (opaque->n_buf_out) { int i; opaque->amc_buf_out = calloc(opaque->n_buf_out, sizeof(*opaque->amc_buf_out)); assert(opaque->amc_buf_out != NULL); for (i = 0; i < opaque->n_buf_out; i++) opaque->amc_buf_out[i].pts = AV_NOPTS_VALUE; } return node; fail: ffpipenode_free_p(&node); return NULL; }
static int InitializeMFT(decoder_t *p_dec) { decoder_sys_t *p_sys = p_dec->p_sys; HRESULT hr; IMFAttributes *attributes = NULL; hr = IMFTransform_GetAttributes(p_sys->mft, &attributes); if (hr != E_NOTIMPL && FAILED(hr)) goto error; if (SUCCEEDED(hr)) { UINT32 is_async = false; hr = IMFAttributes_GetUINT32(attributes, &MF_TRANSFORM_ASYNC, &is_async); if (hr != MF_E_ATTRIBUTENOTFOUND && FAILED(hr)) goto error; p_sys->is_async = is_async; if (p_sys->is_async) { hr = IMFAttributes_SetUINT32(attributes, &MF_TRANSFORM_ASYNC_UNLOCK, true); if (FAILED(hr)) goto error; hr = IMFTransform_QueryInterface(p_sys->mft, &IID_IMFMediaEventGenerator, (void**)&p_sys->event_generator); if (FAILED(hr)) goto error; } } DWORD input_streams_count; DWORD output_streams_count; hr = IMFTransform_GetStreamCount(p_sys->mft, &input_streams_count, &output_streams_count); if (FAILED(hr)) goto error; if (input_streams_count != 1 || output_streams_count != 1) { msg_Err(p_dec, "MFT decoder should have 1 input stream and 1 output stream."); goto error; } hr = IMFTransform_GetStreamIDs(p_sys->mft, 1, &p_sys->input_stream_id, 1, &p_sys->output_stream_id); if (hr == E_NOTIMPL) { /* * This is not an error, it happens if: * - there is a fixed number of streams. * AND * - streams are numbered consecutively from 0 to N-1. */ p_sys->input_stream_id = 0; p_sys->output_stream_id = 0; } else if (FAILED(hr)) goto error; if (SetInputType(p_dec, p_sys->input_stream_id, &p_sys->input_type)) goto error; if (SetOutputType(p_dec, p_sys->output_stream_id, &p_sys->output_type)) goto error; /* * The input type was not set by the previous call to * SetInputType, try again after setting the output type. */ if (!p_sys->input_type) if (SetInputType(p_dec, p_sys->input_stream_id, &p_sys->input_type) || !p_sys->input_type) goto error; /* This call can be a no-op for some MFT decoders, but it can potentially reduce starting time. */ hr = IMFTransform_ProcessMessage(p_sys->mft, MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, (ULONG_PTR)0); if (FAILED(hr)) goto error; /* This event is required for asynchronous MFTs, optional otherwise. */ hr = IMFTransform_ProcessMessage(p_sys->mft, MFT_MESSAGE_NOTIFY_START_OF_STREAM, (ULONG_PTR)0); if (FAILED(hr)) goto error; if (p_dec->fmt_in.i_codec == VLC_CODEC_H264) { /* It's not an error if the following call fails. */ IMFAttributes_SetUINT32(attributes, &CODECAPI_AVLowLatencyMode, true); if (p_dec->fmt_in.i_extra) { int buf_size = p_dec->fmt_in.i_extra + 20; uint32_t size = p_dec->fmt_in.i_extra; uint8_t *buf = malloc(buf_size); if (((uint8_t*)p_dec->fmt_in.p_extra)[0] == 1) { convert_sps_pps(p_dec, p_dec->fmt_in.p_extra, p_dec->fmt_in.i_extra, buf, buf_size, &size, &p_sys->nal_size); } free(buf); } } return VLC_SUCCESS; error: msg_Err(p_dec, "Error in InitializeMFT()"); DestroyMFT(p_dec); return VLC_EGENERIC; }
IJKFF_Pipenode *ffpipenode_create_video_decoder_from_android_mediacodec(FFPlayer *ffp, IJKFF_Pipeline *pipeline, SDL_Vout *vout) { ALOGD("ffpipenode_create_video_decoder_from_android_mediacodec()\n"); if (SDL_Android_GetApiLevel() < IJK_API_16_JELLY_BEAN) return NULL; if (!ffp || !ffp->is) return NULL; IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque)); if (!node) return node; VideoState *is = ffp->is; IJKFF_Pipenode_Opaque *opaque = node->opaque; JNIEnv *env = NULL; int ret = 0; int rotate_degrees = 0; jobject jsurface = NULL; node->func_destroy = func_destroy; node->func_run_sync = func_run_sync; node->func_flush = func_flush; opaque->pipeline = pipeline; opaque->ffp = ffp; opaque->decoder = &is->viddec; opaque->weak_vout = vout; opaque->avctx = opaque->decoder->avctx; switch (opaque->avctx->codec_id) { case AV_CODEC_ID_H264: if (!ffp->mediacodec_avc && !ffp->mediacodec_all_videos) { ALOGE("%s: MediaCodec: AVC/H264 is disabled. codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } switch (opaque->avctx->profile) { case FF_PROFILE_H264_BASELINE: ALOGI("%s: MediaCodec: H264_BASELINE: enabled\n", __func__); break; case FF_PROFILE_H264_CONSTRAINED_BASELINE: ALOGI("%s: MediaCodec: H264_CONSTRAINED_BASELINE: enabled\n", __func__); break; case FF_PROFILE_H264_MAIN: ALOGI("%s: MediaCodec: H264_MAIN: enabled\n", __func__); break; case FF_PROFILE_H264_EXTENDED: ALOGI("%s: MediaCodec: H264_EXTENDED: enabled\n", __func__); break; case FF_PROFILE_H264_HIGH: ALOGI("%s: MediaCodec: H264_HIGH: enabled\n", __func__); break; case FF_PROFILE_H264_HIGH_10: ALOGW("%s: MediaCodec: H264_HIGH_10: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_10_INTRA: ALOGW("%s: MediaCodec: H264_HIGH_10_INTRA: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_422: ALOGW("%s: MediaCodec: H264_HIGH_10_422: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_422_INTRA: ALOGW("%s: MediaCodec: H264_HIGH_10_INTRA: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_444: ALOGW("%s: MediaCodec: H264_HIGH_10_444: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_444_PREDICTIVE: ALOGW("%s: MediaCodec: H264_HIGH_444_PREDICTIVE: disabled\n", __func__); goto fail; case FF_PROFILE_H264_HIGH_444_INTRA: ALOGW("%s: MediaCodec: H264_HIGH_444_INTRA: disabled\n", __func__); goto fail; case FF_PROFILE_H264_CAVLC_444: ALOGW("%s: MediaCodec: H264_CAVLC_444: disabled\n", __func__); goto fail; default: ALOGW("%s: MediaCodec: (%d) unknown profile: disabled\n", __func__, opaque->avctx->profile); goto fail; } strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_AVC); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; case AV_CODEC_ID_HEVC: if (!ffp->mediacodec_hevc && !ffp->mediacodec_all_videos) { ALOGE("%s: MediaCodec/HEVC is disabled. codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_HEVC); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; case AV_CODEC_ID_MPEG2VIDEO: if (!ffp->mediacodec_mpeg2 && !ffp->mediacodec_all_videos) { ALOGE("%s: MediaCodec/MPEG2VIDEO is disabled. codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } strcpy(opaque->mcc.mime_type, SDL_AMIME_VIDEO_MPEG2VIDEO); opaque->mcc.profile = opaque->avctx->profile; opaque->mcc.level = opaque->avctx->level; break; default: ALOGE("%s:create: not H264 or H265/HEVC, codec_id:%d \n", __func__, opaque->avctx->codec_id); goto fail; } if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) { ALOGE("%s:create: SetupThreadEnv failed\n", __func__); goto fail; } opaque->acodec_mutex = SDL_CreateMutex(); opaque->acodec_cond = SDL_CreateCond(); opaque->acodec_first_dequeue_output_mutex = SDL_CreateMutex(); opaque->acodec_first_dequeue_output_cond = SDL_CreateCond(); opaque->any_input_mutex = SDL_CreateMutex(); opaque->any_input_cond = SDL_CreateCond(); if (!opaque->acodec_cond || !opaque->acodec_cond || !opaque->acodec_first_dequeue_output_mutex || !opaque->acodec_first_dequeue_output_cond) { ALOGE("%s:open_video_decoder: SDL_CreateCond() failed\n", __func__); goto fail; } ALOGI("AMediaFormat: %s, %dx%d\n", opaque->mcc.mime_type, opaque->avctx->width, opaque->avctx->height); opaque->input_aformat = SDL_AMediaFormatJava_createVideoFormat(env, opaque->mcc.mime_type, opaque->avctx->width, opaque->avctx->height); if (opaque->avctx->extradata && opaque->avctx->extradata_size > 0) { if ((opaque->avctx->codec_id == AV_CODEC_ID_H264 || opaque->avctx->codec_id == AV_CODEC_ID_HEVC) && opaque->avctx->extradata[0] == 1) { #if AMC_USE_AVBITSTREAM_FILTER if (opaque->avctx->codec_id == AV_CODEC_ID_H264) { opaque->bsfc = av_bitstream_filter_init("h264_mp4toannexb"); if (!opaque->bsfc) { ALOGE("Cannot open the h264_mp4toannexb BSF!\n"); goto fail; } } else { opaque->bsfc = av_bitstream_filter_init("hevc_mp4toannexb"); if (!opaque->bsfc) { ALOGE("Cannot open the hevc_mp4toannexb BSF!\n"); goto fail; } } opaque->orig_extradata_size = opaque->avctx->extradata_size; opaque->orig_extradata = (uint8_t*) av_mallocz(opaque->avctx->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE); if (!opaque->orig_extradata) { goto fail; } memcpy(opaque->orig_extradata, opaque->avctx->extradata, opaque->avctx->extradata_size); for(int i = 0; i < opaque->avctx->extradata_size; i+=4) { ALOGE("csd-0[%d]: %02x%02x%02x%02x\n", opaque->avctx->extradata_size, (int)opaque->avctx->extradata[i+0], (int)opaque->avctx->extradata[i+1], (int)opaque->avctx->extradata[i+2], (int)opaque->avctx->extradata[i+3]); } SDL_AMediaFormat_setBuffer(opaque->input_aformat, "csd-0", opaque->avctx->extradata, opaque->avctx->extradata_size); #else size_t sps_pps_size = 0; size_t convert_size = opaque->avctx->extradata_size + 20; uint8_t *convert_buffer = (uint8_t *)calloc(1, convert_size); if (!convert_buffer) { ALOGE("%s:sps_pps_buffer: alloc failed\n", __func__); goto fail; } if (opaque->avctx->codec_id == AV_CODEC_ID_H264) { if (0 != convert_sps_pps(opaque->avctx->extradata, opaque->avctx->extradata_size, convert_buffer, convert_size, &sps_pps_size, &opaque->nal_size)) { ALOGE("%s:convert_sps_pps: failed\n", __func__); goto fail; } } else { if (0 != convert_hevc_nal_units(opaque->avctx->extradata, opaque->avctx->extradata_size, convert_buffer, convert_size, &sps_pps_size, &opaque->nal_size)) { ALOGE("%s:convert_hevc_nal_units: failed\n", __func__); goto fail; } } SDL_AMediaFormat_setBuffer(opaque->input_aformat, "csd-0", convert_buffer, sps_pps_size); for(int i = 0; i < sps_pps_size; i+=4) { ALOGE("csd-0[%d]: %02x%02x%02x%02x\n", (int)sps_pps_size, (int)convert_buffer[i+0], (int)convert_buffer[i+1], (int)convert_buffer[i+2], (int)convert_buffer[i+3]); } free(convert_buffer); #endif } else { // Codec specific data // SDL_AMediaFormat_setBuffer(opaque->aformat, "csd-0", opaque->avctx->extradata, opaque->avctx->extradata_size); ALOGE("csd-0: naked\n"); } } else { ALOGE("no buffer(%d)\n", opaque->avctx->extradata_size); } rotate_degrees = ffp_get_video_rotate_degrees(ffp); if (ffp->mediacodec_auto_rotate && rotate_degrees != 0 && SDL_Android_GetApiLevel() >= IJK_API_21_LOLLIPOP) { ALOGI("amc: rotate in decoder: %d\n", rotate_degrees); opaque->frame_rotate_degrees = rotate_degrees; SDL_AMediaFormat_setInt32(opaque->input_aformat, "rotation-degrees", rotate_degrees); ffp_notify_msg2(ffp, FFP_MSG_VIDEO_ROTATION_CHANGED, 0); } else { ALOGI("amc: rotate notify: %d\n", rotate_degrees); ffp_notify_msg2(ffp, FFP_MSG_VIDEO_ROTATION_CHANGED, rotate_degrees); } if (!ffpipeline_select_mediacodec_l(pipeline, &opaque->mcc) || !opaque->mcc.codec_name[0]) { ALOGE("amc: no suitable codec\n"); goto fail; } jsurface = ffpipeline_get_surface_as_global_ref(env, pipeline); ret = reconfigure_codec_l(env, node, jsurface); J4A_DeleteGlobalRef__p(env, &jsurface); if (ret != 0) goto fail; ffp_set_video_codec_info(ffp, MEDIACODEC_MODULE_NAME, opaque->mcc.codec_name); opaque->off_buf_out = 0; if (opaque->n_buf_out) { int i; opaque->amc_buf_out = calloc(opaque->n_buf_out, sizeof(*opaque->amc_buf_out)); assert(opaque->amc_buf_out != NULL); for (i = 0; i < opaque->n_buf_out; i++) opaque->amc_buf_out[i].pts = AV_NOPTS_VALUE; } SDL_SpeedSamplerReset(&opaque->sampler); ffp->stat.vdec_type = FFP_PROPV_DECODER_MEDIACODEC; return node; fail: ffpipenode_free_p(&node); return NULL; }