static int Video_GetOutput(decoder_t *p_dec, picture_t **pp_out_pic, block_t **pp_out_block, bool *p_abort, mtime_t i_timeout) { decoder_sys_t *p_sys = p_dec->p_sys; mc_api_out out; picture_t *p_pic = NULL; int i_ret; assert(pp_out_pic && !pp_out_block); /* FIXME: A new picture shouldn't be created each time. If * decoder_NewPicture fails because the decoder is flushing/exiting, * GetVideoOutput will either fail (or crash in function of devices), or * never return an output buffer. Indeed, if the Decoder is flushing, * MediaCodec can be stalled since the input is waiting for the output or * vice-versa. Therefore, call decoder_NewPicture before GetVideoOutput as * a safeguard. */ if (p_sys->b_has_format) { if (p_sys->b_update_format) { p_sys->b_update_format = false; if (decoder_UpdateVideoFormat(p_dec) != 0) { msg_Err(p_dec, "decoder_UpdateVideoFormat failed"); return -1; } } p_pic = decoder_NewPicture(p_dec); if (!p_pic) { msg_Warn(p_dec, "NewPicture failed"); /* abort current Decode call */ *p_abort = true; return 0; } } i_ret = p_sys->api->get_out(p_sys->api, &out, i_timeout); if (i_ret != 1) goto end; if (out.type == MC_OUT_TYPE_BUF) { /* If the oldest input block had no PTS, the timestamp of * the frame returned by MediaCodec might be wrong so we * overwrite it with the corresponding dts. Call FifoGet * first in order to avoid a gap if buffers are released * due to an invalid format or a preroll */ int64_t forced_ts = timestamp_FifoGet(p_sys->u.video.timestamp_fifo); if (!p_sys->b_has_format) { msg_Warn(p_dec, "Buffers returned before output format is set, dropping frame"); i_ret = p_sys->api->release_out(p_sys->api, out.u.buf.i_index, false); goto end; } if (out.u.buf.i_ts <= p_sys->i_preroll_end) { i_ret = p_sys->api->release_out(p_sys->api, out.u.buf.i_index, false); goto end; } if (forced_ts == VLC_TS_INVALID) p_pic->date = out.u.buf.i_ts; else p_pic->date = forced_ts; if (p_sys->api->b_direct_rendering) { picture_sys_t *p_picsys = p_pic->p_sys; p_picsys->pf_lock_pic = NULL; p_picsys->pf_unlock_pic = UnlockPicture; p_picsys->priv.hw.p_dec = p_dec; p_picsys->priv.hw.i_index = out.u.buf.i_index; p_picsys->priv.hw.b_valid = true; vlc_mutex_lock(get_android_opaque_mutex()); InsertInflightPicture(p_dec, p_pic, out.u.buf.i_index); vlc_mutex_unlock(get_android_opaque_mutex()); } else { unsigned int chroma_div; GetVlcChromaSizes(p_dec->fmt_out.i_codec, p_dec->fmt_out.video.i_width, p_dec->fmt_out.video.i_height, NULL, NULL, &chroma_div); CopyOmxPicture(p_sys->u.video.i_pixel_format, p_pic, p_sys->u.video.i_slice_height, p_sys->u.video.i_stride, (uint8_t *)out.u.buf.p_ptr, chroma_div, &p_sys->u.video.ascd); if (p_sys->api->release_out(p_sys->api, out.u.buf.i_index, false)) i_ret = -1; } i_ret = 1; } else { assert(out.type == MC_OUT_TYPE_CONF); p_sys->u.video.i_pixel_format = out.u.conf.video.pixel_format; ArchitectureSpecificCopyHooksDestroy(p_sys->u.video.i_pixel_format, &p_sys->u.video.ascd); const char *name = "unknown"; if (p_sys->api->b_direct_rendering) p_dec->fmt_out.i_codec = VLC_CODEC_ANDROID_OPAQUE; else { if (!GetVlcChromaFormat(p_sys->u.video.i_pixel_format, &p_dec->fmt_out.i_codec, &name)) { msg_Err(p_dec, "color-format not recognized"); i_ret = -1; goto end; } } msg_Err(p_dec, "output: %d %s, %dx%d stride %d %d, crop %d %d %d %d", p_sys->u.video.i_pixel_format, name, out.u.conf.video.width, out.u.conf.video.height, out.u.conf.video.stride, out.u.conf.video.slice_height, out.u.conf.video.crop_left, out.u.conf.video.crop_top, out.u.conf.video.crop_right, out.u.conf.video.crop_bottom); p_dec->fmt_out.video.i_width = out.u.conf.video.crop_right + 1 - out.u.conf.video.crop_left; p_dec->fmt_out.video.i_height = out.u.conf.video.crop_bottom + 1 - out.u.conf.video.crop_top; if (p_dec->fmt_out.video.i_width <= 1 || p_dec->fmt_out.video.i_height <= 1) { p_dec->fmt_out.video.i_width = out.u.conf.video.width; p_dec->fmt_out.video.i_height = out.u.conf.video.height; } p_dec->fmt_out.video.i_visible_width = p_dec->fmt_out.video.i_width; p_dec->fmt_out.video.i_visible_height = p_dec->fmt_out.video.i_height; p_sys->u.video.i_stride = out.u.conf.video.stride; p_sys->u.video.i_slice_height = out.u.conf.video.slice_height; if (p_sys->u.video.i_stride <= 0) p_sys->u.video.i_stride = out.u.conf.video.width; if (p_sys->u.video.i_slice_height <= 0) p_sys->u.video.i_slice_height = out.u.conf.video.height; ArchitectureSpecificCopyHooks(p_dec, out.u.conf.video.pixel_format, out.u.conf.video.slice_height, p_sys->u.video.i_stride, &p_sys->u.video.ascd); if (p_sys->u.video.i_pixel_format == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) p_sys->u.video.i_slice_height -= out.u.conf.video.crop_top/2; if (IgnoreOmxDecoderPadding(p_sys->psz_name)) { p_sys->u.video.i_slice_height = 0; p_sys->u.video.i_stride = p_dec->fmt_out.video.i_width; } p_sys->b_update_format = true; p_sys->b_has_format = true; i_ret = 0; } end: if (p_pic) { if (i_ret == 1) *pp_out_pic = p_pic; else picture_Release(p_pic); } return i_ret; }
/***************************************************************************** * PrintOmx: print component summary *****************************************************************************/ void PrintOmx(decoder_t *p_dec, OMX_HANDLETYPE omx_handle, OMX_U32 i_port) { OMX_PARAM_PORTDEFINITIONTYPE definition; OMX_PORT_PARAM_TYPE param; OMX_ERRORTYPE omx_error; unsigned int i, j; /* Find the input / output ports */ OMX_INIT_STRUCTURE(param); OMX_INIT_STRUCTURE(definition); for(i = 0; i < 3; i++) { omx_error = OMX_GetParameter(omx_handle, OMX_IndexParamAudioInit + i, ¶m); if(omx_error != OMX_ErrorNone) continue; if(i_port == OMX_ALL) msg_Dbg( p_dec, "found %i %s ports", (int)param.nPorts, i == 0 ? "audio" : i == 1 ? "image" : "video" ); for(j = 0; j < param.nPorts; j++) { unsigned int i_samplerate, i_bitrate; unsigned int i_bitspersample, i_blockalign; uint8_t i_channels; OmxFormatParam format_param; vlc_fourcc_t i_fourcc; const char *psz_name; OMX_CONFIG_RECTTYPE crop_rect; if(i_port != OMX_ALL && i_port != param.nStartPortNumber + j) continue; /* Get port definition */ definition.nPortIndex = param.nStartPortNumber + j; omx_error = OMX_GetParameter(omx_handle, OMX_IndexParamPortDefinition, &definition); if(omx_error != OMX_ErrorNone) continue; OMX_PARAM_U32TYPE u32param; OMX_INIT_STRUCTURE(u32param); u32param.nPortIndex = param.nStartPortNumber + j; omx_error = OMX_GetParameter(omx_handle, OMX_IndexParamNumAvailableStreams, (OMX_PTR)&u32param); msg_Dbg( p_dec, "-> %s %i (%i streams) (%i:%i:%i buffers) (%i,%i) %s", definition.eDir == OMX_DirOutput ? "output" : "input", (int)definition.nPortIndex, (int)u32param.nU32, (int)definition.nBufferCountActual, (int)definition.nBufferCountMin, (int)definition.nBufferSize, (int)definition.bBuffersContiguous, (int)definition.nBufferAlignment, definition.bEnabled ? "enabled" : "disabled" ); switch(definition.eDomain) { case OMX_PortDomainVideo: if(definition.format.video.eCompressionFormat) GetVlcVideoFormat( definition.format.video.eCompressionFormat, &i_fourcc, &psz_name ); else GetVlcChromaFormat( definition.format.video.eColorFormat, &i_fourcc, &psz_name ); OMX_INIT_STRUCTURE(crop_rect); crop_rect.nPortIndex = definition.nPortIndex; omx_error = OMX_GetConfig(omx_handle, OMX_IndexConfigCommonOutputCrop, &crop_rect); if (omx_error != OMX_ErrorNone) { crop_rect.nLeft = crop_rect.nTop = 0; crop_rect.nWidth = definition.format.video.nFrameWidth; crop_rect.nHeight = definition.format.video.nFrameHeight; } msg_Dbg( p_dec, " -> video %s %ix%i@%.2f (%i,%i) (%i,%i) (%i,%i,%i,%i)", psz_name, (int)definition.format.video.nFrameWidth, (int)definition.format.video.nFrameHeight, (float)definition.format.video.xFramerate/(float)(1<<16), (int)definition.format.video.eCompressionFormat, (int)definition.format.video.eColorFormat, (int)definition.format.video.nStride, (int)definition.format.video.nSliceHeight, (int)crop_rect.nLeft, (int)crop_rect.nTop, (int)crop_rect.nWidth, (int)crop_rect.nHeight); break; case OMX_PortDomainAudio: OmxToVlcAudioFormat( definition.format.audio.eEncoding, &i_fourcc, &psz_name ); GetAudioParameters(omx_handle, &format_param, definition.nPortIndex, definition.format.audio.eEncoding, &i_channels, &i_samplerate, &i_bitrate, &i_bitspersample, &i_blockalign); msg_Dbg( p_dec, " -> audio %s (%i) %i,%i,%i,%i,%i", psz_name, (int)definition.format.audio.eEncoding, i_channels, i_samplerate, i_bitrate, i_bitspersample, i_blockalign); break; default: break; } } } }
static int Video_ProcessOutput(decoder_t *p_dec, mc_api_out *p_out, picture_t **pp_out_pic, block_t **pp_out_block) { decoder_sys_t *p_sys = p_dec->p_sys; assert(pp_out_pic && !pp_out_block); if (p_out->type == MC_OUT_TYPE_BUF) { picture_t *p_pic = NULL; /* Use the aspect ratio provided by the input (ie read from packetizer). * Don't check the current value of the aspect ratio in fmt_out, since we * want to allow changes in it to propagate. */ if (p_dec->fmt_in.video.i_sar_num != 0 && p_dec->fmt_in.video.i_sar_den != 0 && (p_dec->fmt_out.video.i_sar_num != p_dec->fmt_in.video.i_sar_num || p_dec->fmt_out.video.i_sar_den != p_dec->fmt_in.video.i_sar_den)) { p_dec->fmt_out.video.i_sar_num = p_dec->fmt_in.video.i_sar_num; p_dec->fmt_out.video.i_sar_den = p_dec->fmt_in.video.i_sar_den; p_sys->b_update_format = true; } if (p_sys->b_update_format) { p_sys->b_update_format = false; if (decoder_UpdateVideoFormat(p_dec) != 0) { msg_Err(p_dec, "decoder_UpdateVideoFormat failed"); p_sys->api->release_out(p_sys->api, p_out->u.buf.i_index, false); return -1; } } /* If the oldest input block had no PTS, the timestamp of * the frame returned by MediaCodec might be wrong so we * overwrite it with the corresponding dts. Call FifoGet * first in order to avoid a gap if buffers are released * due to an invalid format or a preroll */ int64_t forced_ts = timestamp_FifoGet(p_sys->u.video.timestamp_fifo); if (!p_sys->b_has_format) { msg_Warn(p_dec, "Buffers returned before output format is set, dropping frame"); return p_sys->api->release_out(p_sys->api, p_out->u.buf.i_index, false); } if (p_out->u.buf.i_ts <= p_sys->i_preroll_end) return p_sys->api->release_out(p_sys->api, p_out->u.buf.i_index, false); p_pic = decoder_NewPicture(p_dec); if (!p_pic) { msg_Warn(p_dec, "NewPicture failed"); return p_sys->api->release_out(p_sys->api, p_out->u.buf.i_index, false); } if (forced_ts == VLC_TS_INVALID) p_pic->date = p_out->u.buf.i_ts; else p_pic->date = forced_ts; if (p_sys->api->b_direct_rendering) { picture_sys_t *p_picsys = p_pic->p_sys; p_picsys->pf_lock_pic = NULL; p_picsys->pf_unlock_pic = UnlockPicture; p_picsys->priv.hw.p_dec = p_dec; p_picsys->priv.hw.i_index = p_out->u.buf.i_index; p_picsys->priv.hw.b_valid = true; vlc_mutex_lock(get_android_opaque_mutex()); InsertInflightPicture(p_dec, p_pic, p_out->u.buf.i_index); vlc_mutex_unlock(get_android_opaque_mutex()); } else { unsigned int chroma_div; GetVlcChromaSizes(p_dec->fmt_out.i_codec, p_dec->fmt_out.video.i_width, p_dec->fmt_out.video.i_height, NULL, NULL, &chroma_div); CopyOmxPicture(p_sys->u.video.i_pixel_format, p_pic, p_sys->u.video.i_slice_height, p_sys->u.video.i_stride, (uint8_t *)p_out->u.buf.p_ptr, chroma_div, &p_sys->u.video.ascd); if (p_sys->api->release_out(p_sys->api, p_out->u.buf.i_index, false)) { picture_Release(p_pic); return -1; } } assert(!(*pp_out_pic)); *pp_out_pic = p_pic; return 1; } else { assert(p_out->type == MC_OUT_TYPE_CONF); p_sys->u.video.i_pixel_format = p_out->u.conf.video.pixel_format; ArchitectureSpecificCopyHooksDestroy(p_sys->u.video.i_pixel_format, &p_sys->u.video.ascd); const char *name = "unknown"; if (p_sys->api->b_direct_rendering) p_dec->fmt_out.i_codec = VLC_CODEC_ANDROID_OPAQUE; else { if (!GetVlcChromaFormat(p_sys->u.video.i_pixel_format, &p_dec->fmt_out.i_codec, &name)) { msg_Err(p_dec, "color-format not recognized"); return -1; } } msg_Err(p_dec, "output: %d %s, %dx%d stride %d %d, crop %d %d %d %d", p_sys->u.video.i_pixel_format, name, p_out->u.conf.video.width, p_out->u.conf.video.height, p_out->u.conf.video.stride, p_out->u.conf.video.slice_height, p_out->u.conf.video.crop_left, p_out->u.conf.video.crop_top, p_out->u.conf.video.crop_right, p_out->u.conf.video.crop_bottom); p_dec->fmt_out.video.i_width = p_out->u.conf.video.crop_right + 1 - p_out->u.conf.video.crop_left; p_dec->fmt_out.video.i_height = p_out->u.conf.video.crop_bottom + 1 - p_out->u.conf.video.crop_top; if (p_dec->fmt_out.video.i_width <= 1 || p_dec->fmt_out.video.i_height <= 1) { p_dec->fmt_out.video.i_width = p_out->u.conf.video.width; p_dec->fmt_out.video.i_height = p_out->u.conf.video.height; } p_dec->fmt_out.video.i_visible_width = p_dec->fmt_out.video.i_width; p_dec->fmt_out.video.i_visible_height = p_dec->fmt_out.video.i_height; p_sys->u.video.i_stride = p_out->u.conf.video.stride; p_sys->u.video.i_slice_height = p_out->u.conf.video.slice_height; if (p_sys->u.video.i_stride <= 0) p_sys->u.video.i_stride = p_out->u.conf.video.width; if (p_sys->u.video.i_slice_height <= 0) p_sys->u.video.i_slice_height = p_out->u.conf.video.height; ArchitectureSpecificCopyHooks(p_dec, p_out->u.conf.video.pixel_format, p_out->u.conf.video.slice_height, p_sys->u.video.i_stride, &p_sys->u.video.ascd); if (p_sys->u.video.i_pixel_format == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) p_sys->u.video.i_slice_height -= p_out->u.conf.video.crop_top/2; if ((p_sys->i_quirks & OMXCODEC_VIDEO_QUIRKS_IGNORE_PADDING)) { p_sys->u.video.i_slice_height = 0; p_sys->u.video.i_stride = p_dec->fmt_out.video.i_width; } p_sys->b_update_format = true; p_sys->b_has_format = true; return 0; } }