static int nuv_header(AVFormatContext *s, AVFormatParameters *ap) { NUVContext *ctx = (NUVContext *)s->priv_data; ByteIOContext *pb = &s->pb; char id_string[12], version_string[5]; double aspect, fps; int is_mythtv, width, height, v_packs, a_packs; int stream_nr = 0; AVStream *vst = NULL, *ast = NULL; get_buffer(pb, id_string, 12); is_mythtv = !memcmp(id_string, "MythTVVideo", 12); get_buffer(pb, version_string, 5); url_fskip(pb, 3); // padding width = get_le32(pb); height = get_le32(pb); get_le32(pb); // unused, "desiredwidth" get_le32(pb); // unused, "desiredheight" get_byte(pb); // 'P' == progressive, 'I' == interlaced url_fskip(pb, 3); // padding aspect = av_int2dbl(get_le64(pb)); fps = av_int2dbl(get_le64(pb)); // number of packets per stream type, -1 means unknown, e.g. streaming v_packs = get_le32(pb); a_packs = get_le32(pb); get_le32(pb); // text get_le32(pb); // keyframe distance (?) if (v_packs) { ctx->v_id = stream_nr++; vst = av_new_stream(s, ctx->v_id); vst->codec->codec_type = CODEC_TYPE_VIDEO; vst->codec->codec_id = CODEC_ID_NUV; vst->codec->codec_tag = MKTAG('R', 'J', 'P', 'G'); vst->codec->width = width; vst->codec->height = height; vst->codec->bits_per_sample = 10; vst->codec->sample_aspect_ratio = av_d2q(aspect, 10000); vst->r_frame_rate = av_d2q(1.0 / fps, 10000); av_set_pts_info(vst, 32, 1, 1000); } else ctx->v_id = -1; if (a_packs) { ctx->a_id = stream_nr++; ast = av_new_stream(s, ctx->a_id); ast->codec->codec_type = CODEC_TYPE_AUDIO; ast->codec->codec_id = CODEC_ID_PCM_S16LE; ast->codec->channels = 2; ast->codec->sample_rate = 44100; ast->codec->bit_rate = 2 * 2 * 44100 * 8; ast->codec->block_align = 2 * 2; ast->codec->bits_per_sample = 16; av_set_pts_info(ast, 32, 1, 1000); } else ctx->a_id = -1; get_codec_data(pb, vst, ast, is_mythtv); return 0; }
static av_cold int init(AVFilterContext *ctx, const char *args, void *opaque) { AspectContext *aspect = ctx->priv; double ratio; int64_t gcd; if(args) { if(sscanf(args, "%d:%d", &aspect->aspect.num, &aspect->aspect.den) < 2) { if(sscanf(args, "%lf", &ratio) < 1) return -1; aspect->aspect = av_d2q(ratio, 100); } else { gcd = av_gcd(FFABS(aspect->aspect.num), FFABS(aspect->aspect.den)); if(gcd) { aspect->aspect.num /= gcd; aspect->aspect.den /= gcd; } } } if(aspect->aspect.den == 0) aspect->aspect = (AVRational) {0, 1}; return 0; }
static int microdvd_read_header(AVFormatContext *s) { AVRational pts_info = (AVRational){ 2997, 125 }; /* default: 23.976 fps */ MicroDVDContext *microdvd = s->priv_data; AVStream *st = avformat_new_stream(s, NULL); int i, frame; double fps; char c; if (!st) return -1; for (i=0; i<FF_ARRAY_ELEMS(microdvd->lines); i++) { microdvd->pos[i] = avio_tell(s->pb); ff_get_line(s->pb, microdvd->lines[i], sizeof(microdvd->lines[i])); if ((sscanf(microdvd->lines[i], "{%d}{}%6lf", &frame, &fps) == 2 || sscanf(microdvd->lines[i], "{%d}{%*d}%6lf", &frame, &fps) == 2) && frame <= 1 && fps > 3 && fps < 100) pts_info = av_d2q(fps, 100000); if (sscanf(microdvd->lines[i], "{DEFAULT}{}%c", &c) == 1) { st->codec->extradata = av_strdup(microdvd->lines[i] + 11); st->codec->extradata_size = strlen(st->codec->extradata); i--; } } avpriv_set_pts_info(st, 64, pts_info.den, pts_info.num); st->codec->codec_type = AVMEDIA_TYPE_SUBTITLE; st->codec->codec_id = CODEC_ID_MICRODVD; return 0; }
static int read_header(AVFormatContext *s) { AVIOContext *pb = s->pb; AVStream *st; AVRational fps; uint32_t chunk_size; avio_skip(pb, 4); chunk_size = avio_rb32(pb); if (chunk_size != 80) return AVERROR(EIO); avio_skip(pb, 20); st = avformat_new_stream(s, 0); if (!st) return AVERROR(ENOMEM); st->need_parsing = AVSTREAM_PARSE_HEADERS; st->start_time = 0; st->nb_frames = st->duration = avio_rb32(pb); fps = av_d2q(av_int2float(avio_rb32(pb)), INT_MAX); st->codec->width = avio_rb32(pb); st->codec->height = avio_rb32(pb); avio_skip(pb, 12); st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_tag = avio_rb32(pb); st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, st->codec->codec_tag); avpriv_set_pts_info(st, 64, fps.den, fps.num); avio_skip(pb, 20); return 0; }
static const AVOption *av_set_number(void *obj, const char *name, double num, int den, int64_t intnum){ const AVOption *o= av_find_opt(obj, name, NULL, 0, 0); void *dst; if(!o || o->offset<=0) return NULL; if(o->max*den < num*intnum || o->min*den > num*intnum) { av_log(NULL, AV_LOG_ERROR, "Value %lf for parameter '%s' out of range.\n", num, name); return NULL; } dst= ((uint8_t*)obj) + o->offset; switch(o->type){ case FF_OPT_TYPE_FLAGS: case FF_OPT_TYPE_INT: *(int *)dst= llrint(num/den)*intnum; break; case FF_OPT_TYPE_INT64: *(int64_t *)dst= llrint(num/den)*intnum; break; case FF_OPT_TYPE_FLOAT: *(float *)dst= num*intnum/den; break; case FF_OPT_TYPE_DOUBLE:*(double *)dst= num*intnum/den; break; case FF_OPT_TYPE_RATIONAL: if((int)num == num) *(AVRational*)dst= (AVRational){num*intnum, den}; else *(AVRational*)dst= av_d2q(num*intnum/den, 1<<24); break; default: return NULL; } return o; }
static int config_props(AVFilterLink *outlink) { AVFilterContext *ctx = outlink->src; NullContext *priv = ctx->priv; AVRational tb; int ret; double res; priv->var_values[VAR_E] = M_E; priv->var_values[VAR_PHI] = M_PHI; priv->var_values[VAR_PI] = M_PI; priv->var_values[VAR_AVTB] = av_q2d(AV_TIME_BASE_Q); if ((ret = av_expr_parse_and_eval(&res, priv->tb_expr, var_names, priv->var_values, NULL, NULL, NULL, NULL, NULL, 0, NULL)) < 0) { av_log(ctx, AV_LOG_ERROR, "Invalid expression '%s' for timebase.\n", priv->tb_expr); return ret; } tb = av_d2q(res, INT_MAX); if (tb.num <= 0 || tb.den <= 0) { av_log(ctx, AV_LOG_ERROR, "Invalid non-positive value for the timebase %d/%d.\n", tb.num, tb.den); return AVERROR(EINVAL); } outlink->w = priv->w; outlink->h = priv->h; outlink->time_base = tb; av_log(outlink->src, AV_LOG_VERBOSE, "w:%d h:%d tb:%d/%d\n", priv->w, priv->h, tb.num, tb.den); return 0; }
int av_parse_ratio(AVRational *q, const char *str, int max, int log_offset, void *log_ctx) { char c; int ret; int64_t gcd; if (sscanf(str, "%d:%d%c", &q->num, &q->den, &c) != 2) { double d; ret = av_expr_parse_and_eval(&d, str, NULL, NULL, NULL, NULL, NULL, NULL, NULL, log_offset, log_ctx); if (ret < 0) return ret; *q = av_d2q(d, max); } gcd = av_gcd(FFABS(q->num), FFABS(q->den)); if (gcd) { q->num /= gcd; q->den /= gcd; } return 0; }
static int get_aspect_ratio(AVFilterLink *inlink, AVRational *aspect_ratio) { AVFilterContext *ctx = inlink->dst; AspectContext *s = inlink->dst->priv; const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format); double var_values[VARS_NB], res; int ret; var_values[VAR_PI] = M_PI; var_values[VAR_PHI] = M_PHI; var_values[VAR_E] = M_E; var_values[VAR_W] = inlink->w; var_values[VAR_H] = inlink->h; var_values[VAR_A] = (double) inlink->w / inlink->h; var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ? (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1; var_values[VAR_DAR] = var_values[VAR_A] * var_values[VAR_SAR]; var_values[VAR_HSUB] = 1 << desc->log2_chroma_w; var_values[VAR_VSUB] = 1 << desc->log2_chroma_h; /* evaluate new aspect ratio*/ if ((ret = av_expr_parse_and_eval(&res, s->ratio_expr, var_names, var_values, NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0) { av_log(NULL, AV_LOG_ERROR, "Error when evaluating the expression '%s'\n", s->ratio_expr); return ret; } *aspect_ratio = av_d2q(res, INT_MAX); return 0; }
int av_parse_video_rate(AVRational *rate, const char *arg) { int i; int n = FF_ARRAY_ELEMS(video_rate_abbrs); char *cp; /* First, we check our abbreviation table */ for (i = 0; i < n; ++i) if (!strcmp(video_rate_abbrs[i].abbr, arg)) { *rate = video_rate_abbrs[i].rate; return 0; } /* Then, we try to parse it as fraction */ cp = strchr(arg, '/'); if (!cp) cp = strchr(arg, ':'); if (cp) { char *cpp; rate->num = strtol(arg, &cpp, 10); if (cpp != arg || cpp == cp) rate->den = strtol(cp+1, &cpp, 10); else rate->num = 0; } else { /* Finally we give up and parse it as double */ *rate = av_d2q(strtod(arg, 0), 1001000); } if (rate->num <= 0 || rate->den <= 0) return AVERROR(EINVAL); return 0; }
static int config_output_props(AVFilterLink *outlink) { AVFilterContext *ctx = outlink->src; SetTBContext *settb = ctx->priv; AVFilterLink *inlink = ctx->inputs[0]; AVRational time_base; int ret; double res; settb->var_values[VAR_AVTB] = av_q2d(AV_TIME_BASE_Q); settb->var_values[VAR_INTB] = av_q2d(inlink->time_base); outlink->w = inlink->w; outlink->h = inlink->h; if ((ret = av_expr_parse_and_eval(&res, settb->tb_expr, var_names, settb->var_values, NULL, NULL, NULL, NULL, NULL, 0, NULL)) < 0) { av_log(ctx, AV_LOG_ERROR, "Invalid expression '%s' for timebase.\n", settb->tb_expr); return ret; } time_base = av_d2q(res, INT_MAX); if (time_base.num <= 0 || time_base.den <= 0) { av_log(ctx, AV_LOG_ERROR, "Invalid non-positive values for the timebase num:%d or den:%d.\n", time_base.num, time_base.den); return AVERROR(EINVAL); } outlink->time_base = time_base; av_log(outlink->src, AV_LOG_INFO, "tb:%d/%d -> tb:%d/%d\n", inlink ->time_base.num, inlink ->time_base.den, outlink->time_base.num, outlink->time_base.den); return 0; }
static av_cold int init(AVFilterContext *ctx, const char *args) { AspectContext *aspect = ctx->priv; double ratio; int64_t gcd; char c = 0; if (args) { if (sscanf(args, "%d:%d%c", &aspect->aspect.num, &aspect->aspect.den, &c) != 2) if (sscanf(args, "%lf%c", &ratio, &c) == 1) aspect->aspect = av_d2q(ratio, 100); if (c || aspect->aspect.num <= 0 || aspect->aspect.den <= 0) { av_log(ctx, AV_LOG_ERROR, "Invalid string '%s' for aspect ratio.\n", args); return AVERROR(EINVAL); } gcd = av_gcd(FFABS(aspect->aspect.num), FFABS(aspect->aspect.den)); if (gcd) { aspect->aspect.num /= gcd; aspect->aspect.den /= gcd; } } if (aspect->aspect.den == 0) aspect->aspect = (AVRational) {0, 1}; av_log(ctx, AV_LOG_VERBOSE, "a:%d/%d\n", aspect->aspect.num, aspect->aspect.den); return 0; }
static AVOption *av_set_number(void *obj, const char *name, double num, int den, int64_t intnum){ AVOption *o= find_opt(obj, name, NULL); void *dst; if(!o || o->offset<=0) return NULL; if(o->max*den < num*intnum || o->min*den > num*intnum) return NULL; dst= ((uint8_t*)obj) + o->offset; switch(o->type){ case FF_OPT_TYPE_FLAGS: case FF_OPT_TYPE_INT: *(int *)dst= lrintf(num/den)*intnum; break; case FF_OPT_TYPE_INT64: *(int64_t *)dst= lrintf(num/den)*intnum; break; case FF_OPT_TYPE_FLOAT: *(float *)dst= num*intnum/den; break; case FF_OPT_TYPE_DOUBLE:*(double *)dst= num*intnum/den; break; case FF_OPT_TYPE_RATIONAL: if((int)num == num) *(AVRational*)dst= (AVRational){num*intnum, den}; else *(AVRational*)dst= av_d2q(num*intnum/den, 1<<24); default: return NULL; } return o; }
/** Set the values of the AVCodecContext or AVFormatContext structure. * They are set to the defaults specified in the according AVOption options * array default_val field. * * @param s AVCodecContext or AVFormatContext for which the defaults will be set */ void av_opt_set_defaults(void *s) { AVOption *opt = NULL; while ((opt = av_next_option(s, opt)) != NULL) { switch(opt->type) { case FF_OPT_TYPE_CONST: /* Nothing to be done here */ break; case FF_OPT_TYPE_FLAGS: case FF_OPT_TYPE_INT: { int val; val = opt->default_val; av_set_int(s, opt->name, val); } break; case FF_OPT_TYPE_FLOAT: { double val; val = opt->default_val; av_set_double(s, opt->name, val); } break; case FF_OPT_TYPE_RATIONAL: { AVRational val; val = av_d2q(opt->default_val, INT_MAX); av_set_q(s, opt->name, val); } break; case FF_OPT_TYPE_STRING: /* Cannot set default for string as default_val is of type * double */ break; default: av_log(s, AV_LOG_DEBUG, "AVOption type %d of option %s not implemented yet\n", opt->type, opt->name); } } }
static bool default_acodec_setup(struct codec_ent* dst, unsigned channels, unsigned samplerate, unsigned abr) { AVCodecContext* ctx = dst->storage.audio.context; AVCodec* codec = dst->storage.audio.codec; assert(channels == 2); assert(samplerate > 0 && samplerate <= 48000); assert(codec); ctx->channels = channels; ctx->channel_layout = av_get_default_channel_layout(channels); ctx->sample_rate = samplerate; ctx->time_base = av_d2q(1.0 / (double) samplerate, 1000000); ctx->sample_fmt = codec->sample_fmts[0]; /* kept for documentation, now we assume the swr_convert just * handles all sample formats for us, * if we prefer or require a certain on in the future, the following code * shows how: for (int i = 0; codec->sample_fmts[i] != -1 && ctx->sample_fmt == AV_SAMPLE_FMT_NONE; i++){ if (codec->sample_fmts[i] == AV_SAMPLE_FMT_S16 || codec->sample_fmts[i] == AV_SAMPLE_FMT_FLTP) ctx->sample_fmt = codec->sample_fmts[i]; } if (ctx->sample_fmt == AV_SAMPLE_FMT_NONE){ LOG("(encoder) Couldn't find supported matching sample format for codec, giving up.\n"); return false; } */ /* rough quality estimate */ if (abr <= 10) ctx->bit_rate = 1024 * ( 320 - 240 * ((float)(11.0 - abr) / 11.0) ); else ctx->bit_rate = abr; /* AAC may need this */ ctx->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL; LOG("(encode) audio encoder setup @ %d hz, requested quality: %d, " "got %d kbit/s using %s\n", samplerate, abr, ctx->bit_rate / 1000, codec->name); if (avcodec_open2(dst->storage.audio.context, dst->storage.audio.codec, NULL) != 0){ avcodec_close(dst->storage.audio.context); dst->storage.audio.context = NULL; dst->storage.audio.codec = NULL; return false; } return true; }
static void fix_image_params(struct dec_video *d_video, struct mp_image_params *params) { struct MPOpts *opts = d_video->opts; struct mp_image_params p = *params; struct mp_codec_params *c = d_video->header->codec; MP_VERBOSE(d_video, "Decoder format: %s\n", mp_image_params_to_str(params)); // While mp_image_params normally always have to have d_w/d_h set, the // decoder signals unknown bitstream aspect ratio with both set to 0. float dec_aspect = p.p_w > 0 && p.p_h > 0 ? p.p_w / (float)p.p_h : 0; if (d_video->initial_decoder_aspect == 0) d_video->initial_decoder_aspect = dec_aspect; bool use_container = true; switch (opts->aspect_method) { case 0: // We normally prefer the container aspect, unless the decoder aspect // changes at least once. if (dec_aspect > 0 && d_video->initial_decoder_aspect != dec_aspect) { MP_VERBOSE(d_video, "Using bitstream aspect ratio.\n"); // Even if the aspect switches back, don't use container aspect again. d_video->initial_decoder_aspect = -1; use_container = false; } break; case 1: use_container = false; break; } if (use_container && c->par_w > 0 && c->par_h) { MP_VERBOSE(d_video, "Using container aspect ratio.\n"); p.p_w = c->par_w; p.p_h = c->par_h; } if (opts->movie_aspect >= 0) { MP_VERBOSE(d_video, "Forcing user-set aspect ratio.\n"); if (opts->movie_aspect == 0) { p.p_w = p.p_h = 1; } else { AVRational a = av_d2q(opts->movie_aspect, INT_MAX); mp_image_params_set_dsize(&p, a.num, a.den); } } // Assume square pixels if no aspect ratio is set at all. if (p.p_w <= 0 || p.p_h <= 0) p.p_w = p.p_h = 1; // Detect colorspace from resolution. mp_image_params_guess_csp(&p); d_video->last_format = *params; d_video->fixed_format = p; }
static AVRational var_read_float(AVIOContext *pb, int size) { AVRational v; char *s = var_read_string(pb, size); if (!s) return (AVRational) { 0, 0 }; v = av_d2q(av_strtod(s, NULL), INT_MAX); av_free(s); return v; }
AVRational av_get_q(void *obj, const char *name, AVOption **o_out){ int64_t intnum=1; double num=1; int den=1; av_get_number(obj, name, o_out, &num, &den, &intnum); if(num == 1.0 && (int)intnum == intnum) return (AVRational){intnum, den}; else return av_d2q(num*intnum/den, 1<<24); }
static av_cold int init(AVFilterContext *ctx) { AspectContext *s = ctx->priv; if (s->aspect_num > 0 && s->aspect_den > 0) { av_log(ctx, AV_LOG_WARNING, "This syntax is deprecated, use " "dar=<number> or dar=num/den.\n"); s->sar = s->dar = av_d2q(s->aspect_num / s->aspect_den, INT_MAX); } return 0; }
static int microdvd_read_header(AVFormatContext *s) { AVRational pts_info = (AVRational){ 2997, 125 }; /* default: 23.976 fps */ MicroDVDContext *microdvd = s->priv_data; AVStream *st = avformat_new_stream(s, NULL); int i = 0; char line[MAX_LINESIZE]; if (!st) return AVERROR(ENOMEM); while (!url_feof(s->pb)) { AVPacket *sub; int64_t pos = avio_tell(s->pb); int len = ff_get_line(s->pb, line, sizeof(line)); if (!len) break; if (i < 3) { int frame; double fps; char c; i++; if ((sscanf(line, "{%d}{}%6lf", &frame, &fps) == 2 || sscanf(line, "{%d}{%*d}%6lf", &frame, &fps) == 2) && frame <= 1 && fps > 3 && fps < 100) pts_info = av_d2q(fps, 100000); if (!st->codec->extradata && sscanf(line, "{DEFAULT}{}%c", &c) == 1) { st->codec->extradata = av_strdup(line + 11); if (!st->codec->extradata) return AVERROR(ENOMEM); st->codec->extradata_size = strlen(st->codec->extradata) + 1; continue; } } sub = ff_subtitles_queue_insert(µdvd->q, line, len, 0); if (!sub) return AVERROR(ENOMEM); sub->pos = pos; sub->pts = get_pts(sub->data); sub->duration = get_duration(sub->data); } ff_subtitles_queue_finalize(µdvd->q); avpriv_set_pts_info(st, 64, pts_info.den, pts_info.num); st->codec->codec_type = AVMEDIA_TYPE_SUBTITLE; st->codec->codec_id = AV_CODEC_ID_MICRODVD; return 0; }
bool VideoEncoderFFmpegPrivate::open() { if (codec_name.isEmpty()) { // copy ctx from muxer AVCodec *codec = avcodec_find_decoder(avctx->codec_id); AV_ENSURE_OK(avcodec_open2(avctx, codec, &dict), false); return true; } AVCodec *codec = avcodec_find_encoder_by_name(codec_name.toUtf8().constData()); if (!codec) { qWarning() << "Can not find encoder for codec " << codec_name; return false; } if (avctx) { avcodec_free_context(&avctx); avctx = 0; } avctx = avcodec_alloc_context3(codec); qDebug("tbc: %f", av_q2d(avctx->time_base)); avctx->width = width; // coded_width works, why? avctx->height = height; avctx->pix_fmt = QTAV_PIX_FMT_C(YUV420P); avctx->time_base = av_d2q(1.0/frame_rate, frame_rate*1001.0+2); //avctx->max_b_frames = 3;//h264 qDebug("2 tbc: %f=%d/%d", av_q2d(avctx->time_base), avctx->time_base.num, avctx->time_base.den); avctx->bit_rate = bit_rate; // Set Option AVDictionary *param = 0; #if 0 //H.264 if(avctx->codec_id == QTAV_CODEC_ID(H264)) { av_dict_set(¶m, "preset", "slow", 0); av_dict_set(¶m, "tune", "zerolatency", 0); //av_dict_set(¶m, "profile", "main", 0); } //H.265 if(avctx->codec_id == AV_CODEC_ID_H265){ av_dict_set(¶m, "preset", "ultrafast", 0); av_dict_set(¶m, "tune", "zero-latency", 0); } #endif applyOptionsForContext(); AV_ENSURE_OK(avcodec_open2(avctx, codec, &dict), false); const int buffer_size = qMax<int>(qMax<int>(width*height*6+200, FF_MIN_BUFFER_SIZE), sizeof(AVPicture));//?? buffer.resize(buffer_size); return true; }
static bool ffemu_init_audio(struct ff_audio_info *audio, struct ffemu_params *param) { AVCodec *codec = avcodec_find_encoder_by_name("flac"); if (!codec) return false; audio->encoder = codec; // FFmpeg just loves to deprecate stuff :) #ifdef HAVE_FFMPEG_ALLOC_CONTEXT3 audio->codec = avcodec_alloc_context3(codec); #else audio->codec = avcodec_alloc_context(); avcodec_get_context_defaults(audio->codec); #endif audio->codec->sample_rate = (int)roundf(param->samplerate); audio->codec->time_base = av_d2q(1.0 / param->samplerate, 1000000); audio->codec->channels = param->channels; audio->codec->sample_fmt = AV_SAMPLE_FMT_S16; #ifdef HAVE_FFMPEG_AVCODEC_OPEN2 if (avcodec_open2(audio->codec, codec, NULL) != 0) #else if (avcodec_open(audio->codec, codec) != 0) #endif { return false; } audio->buffer = (int16_t*)av_malloc( audio->codec->frame_size * audio->codec->channels * sizeof(int16_t)); if (!audio->buffer) return false; audio->outbuf_size = FF_MIN_BUFFER_SIZE; audio->outbuf = (uint8_t*)av_malloc(audio->outbuf_size); if (!audio->outbuf) return false; return true; }
void COMXVideo::SetVideoRect(const CRect& SrcRect, const CRect& DestRect) { if ( !m_settings_changed || !((DestRect.x2 > DestRect.x1 && DestRect.y2 > DestRect.y1) || m_pixel_aspect != 0.0f) ) return; OMX_ERRORTYPE omx_err; OMX_CONFIG_DISPLAYREGIONTYPE configDisplay; OMX_INIT_STRUCTURE(configDisplay); configDisplay.nPortIndex = m_omx_render.GetInputPort(); // configured dest_rect takes precedence if (DestRect.x2 > DestRect.x1 && DestRect.y2 > DestRect.y1) { configDisplay.fullscreen = OMX_FALSE; configDisplay.noaspect = OMX_TRUE; configDisplay.set = (OMX_DISPLAYSETTYPE)(OMX_DISPLAY_SET_DEST_RECT|OMX_DISPLAY_SET_SRC_RECT|OMX_DISPLAY_SET_FULLSCREEN|OMX_DISPLAY_SET_NOASPECT); configDisplay.dest_rect.x_offset = (int)(DestRect.x1+0.5f); configDisplay.dest_rect.y_offset = (int)(DestRect.y1+0.5f); configDisplay.dest_rect.width = (int)(DestRect.Width()+0.5f); configDisplay.dest_rect.height = (int)(DestRect.Height()+0.5f); configDisplay.src_rect.x_offset = (int)(SrcRect.x1+0.5f); configDisplay.src_rect.y_offset = (int)(SrcRect.y1+0.5f); configDisplay.src_rect.width = (int)(SrcRect.Width()+0.5f); configDisplay.src_rect.height = (int)(SrcRect.Height()+0.5f); } else /* if (m_pixel_aspect != 0.0f) */ { AVRational aspect = av_d2q(m_pixel_aspect, 100); configDisplay.set = OMX_DISPLAY_SET_PIXEL; configDisplay.pixel_x = aspect.num; configDisplay.pixel_y = aspect.den; } omx_err = m_omx_render.SetConfig(OMX_IndexConfigDisplayRegion, &configDisplay); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error OMX_IndexConfigDisplayRegion omx_err(0x%08x)\n", omx_err); } printf("dest_rect.x_offset %d dest_rect.y_offset %d dest_rect.width %d dest_rect.height %d, pixel_aspect %.2f\n", configDisplay.dest_rect.x_offset, configDisplay.dest_rect.y_offset, configDisplay.dest_rect.width, configDisplay.dest_rect.height, m_pixel_aspect); }
/** Set the values of the AVCodecContext or AVFormatContext structure. * They are set to the defaults specified in the according AVOption options * array default_val field. * * @param s AVCodecContext or AVFormatContext for which the defaults will be set */ void av_opt_set_defaults2(void *s, int mask, int flags) { const AVOption *opt = NULL; while ((opt = av_next_option(s, opt)) != NULL) { if((opt->flags & mask) != flags) continue; switch(opt->type) { case FF_OPT_TYPE_CONST: /* Nothing to be done here */ break; case FF_OPT_TYPE_FLAGS: case FF_OPT_TYPE_INT: { int val; val = opt->default_val; av_set_int(s, opt->name, val); } break; case FF_OPT_TYPE_INT64: if((double)(opt->default_val+0.6) == opt->default_val) av_log(s, AV_LOG_DEBUG, "loss of precission in default of %s\n", opt->name); av_set_int(s, opt->name, opt->default_val); break; case FF_OPT_TYPE_FLOAT: { double val; val = opt->default_val; av_set_double(s, opt->name, val); } break; case FF_OPT_TYPE_RATIONAL: { AVRational val; val = av_d2q(opt->default_val, INT_MAX); av_set_q(s, opt->name, val); } break; case FF_OPT_TYPE_STRING: case FF_OPT_TYPE_BINARY: /* Cannot set default for string as default_val is of type * double */ break; default: av_log(s, AV_LOG_DEBUG, "AVOption type %d of option %s not implemented yet\n", opt->type, opt->name); } } }
int av_parse_ratio(AVRational *q, const char *str, int max, int log_offset, void *log_ctx) { char c; int ret; if (sscanf(str, "%d:%d%c", &q->num, &q->den, &c) != 2) { double d; ret = av_expr_parse_and_eval(&d, str, NULL, NULL, NULL, NULL, NULL, NULL, NULL, log_offset, log_ctx); if (ret < 0) return ret; *q = av_d2q(d, max); } else { av_reduce(&q->num, &q->den, q->num, q->den, max); } return 0; }
static int reconfig(struct vf_instance *vf, struct mp_image_params *in, struct mp_image_params *out) { struct vf_priv_s *p = vf->priv; *out = *in; if (p->outfmt) out->imgfmt = p->outfmt; if (p->colormatrix) out->colorspace = p->colormatrix; if (p->colorlevels) out->colorlevels = p->colorlevels; if (p->primaries) out->primaries = p->primaries; if (p->gamma) out->gamma = p->gamma; if (p->chroma_location) out->chroma_location = p->chroma_location; if (p->stereo_in) out->stereo_in = p->stereo_in; if (p->stereo_out) out->stereo_out = p->stereo_out; if (p->rotate >= 0) out->rotate = p->rotate; AVRational dsize; mp_image_params_get_dsize(out, &dsize.num, &dsize.den); if (p->dw > 0) dsize.num = p->dw; if (p->dh > 0) dsize.den = p->dh; if (p->dar > 0) dsize = av_d2q(p->dar, INT_MAX); mp_image_params_set_dsize(out, dsize.num, dsize.den); // Make sure the user-overrides are consistent (no RGB csp for YUV, etc.). mp_image_params_guess_csp(out); return 0; }
int av_parse_video_rate(AVRational *rate, const char *arg) { int i, ret; int n = FF_ARRAY_ELEMS(video_rate_abbrs); double res; /* First, we check our abbreviation table */ for (i = 0; i < n; ++i) if (!strcmp(video_rate_abbrs[i].abbr, arg)) { *rate = video_rate_abbrs[i].rate; return 0; } /* Then, we try to parse it as fraction */ if ((ret = av_expr_parse_and_eval(&res, arg, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL)) < 0) return ret; *rate = av_d2q(res, 1001000); if (rate->num <= 0 || rate->den <= 0) return AVERROR(EINVAL); return 0; }
static int get_aspect_ratio(AVFilterLink *inlink, AVRational *aspect_ratio) { AVFilterContext *ctx = inlink->dst; AspectContext *s = inlink->dst->priv; const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format); double var_values[VARS_NB], res; int ret; var_values[VAR_W] = inlink->w; var_values[VAR_H] = inlink->h; var_values[VAR_A] = (double) inlink->w / inlink->h; var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ? (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1; var_values[VAR_DAR] = var_values[VAR_A] * var_values[VAR_SAR]; var_values[VAR_HSUB] = 1 << desc->log2_chroma_w; var_values[VAR_VSUB] = 1 << desc->log2_chroma_h; /* evaluate new aspect ratio*/ ret = av_expr_parse_and_eval(&res, s->ratio_expr, var_names, var_values, NULL, NULL, NULL, NULL, NULL, 0, ctx); if (ret < 0) { ret = av_parse_ratio(aspect_ratio, s->ratio_expr, s->max, 0, ctx); } else *aspect_ratio = av_d2q(res, s->max); if (ret < 0) { av_log(ctx, AV_LOG_ERROR, "Error when evaluating the expression '%s'\n", s->ratio_expr); return ret; } if (aspect_ratio->num < 0 || aspect_ratio->den <= 0) { av_log(ctx, AV_LOG_ERROR, "Invalid string '%s' for aspect ratio\n", s->ratio_expr); return AVERROR(EINVAL); } return 0; }
// Pick a "good" timebase, which will be used to convert double timestamps // back to fractions for passing them through libavcodec. AVRational mp_get_codec_timebase(struct mp_codec_params *c) { AVRational tb = {c->native_tb_num, c->native_tb_den}; if (tb.num < 1 || tb.den < 1) { if (c->reliable_fps) tb = av_inv_q(av_d2q(c->fps, 1000000)); if (tb.num < 1 || tb.den < 1) tb = AV_TIME_BASE_Q; } // If the timebase is too coarse, raise its precision, or small adjustments // to timestamps done between decoder and demuxer could be lost. if (av_q2d(tb) > 0.001) { AVRational r = av_div_q(tb, (AVRational){1, 1000}); tb.den *= (r.num + r.den - 1) / r.den; } av_reduce(&tb.num, &tb.den, tb.num, tb.den, INT_MAX); if (tb.num < 1 || tb.den < 1) tb = AV_TIME_BASE_Q; return tb; }
static int thp_read_header(AVFormatContext *s, AVFormatParameters *ap) { ThpDemuxContext *thp = s->priv_data; AVStream *st; AVIOContext *pb = s->pb; int i; /* Read the file header. */ avio_rb32(pb); /* Skip Magic. */ thp->version = avio_rb32(pb); avio_rb32(pb); /* Max buf size. */ avio_rb32(pb); /* Max samples. */ thp->fps = av_d2q(av_int2flt(avio_rb32(pb)), INT_MAX); thp->framecnt = avio_rb32(pb); thp->first_framesz = avio_rb32(pb); avio_rb32(pb); /* Data size. */ thp->compoff = avio_rb32(pb); avio_rb32(pb); /* offsetDataOffset. */ thp->first_frame = avio_rb32(pb); thp->last_frame = avio_rb32(pb); thp->next_framesz = thp->first_framesz; thp->next_frame = thp->first_frame; /* Read the component structure. */ avio_seek (pb, thp->compoff, SEEK_SET); thp->compcount = avio_rb32(pb); /* Read the list of component types. */ avio_read(pb, thp->components, 16); for (i = 0; i < thp->compcount; i++) { if (thp->components[i] == 0) { if (thp->vst != 0) break; /* Video component. */ st = avformat_new_stream(s, NULL); if (!st) return AVERROR(ENOMEM); /* The denominator and numerator are switched because 1/fps is required. */ av_set_pts_info(st, 64, thp->fps.den, thp->fps.num); st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = CODEC_ID_THP; st->codec->codec_tag = 0; /* no fourcc */ st->codec->width = avio_rb32(pb); st->codec->height = avio_rb32(pb); st->codec->sample_rate = av_q2d(thp->fps); thp->vst = st; thp->video_stream_index = st->index; if (thp->version == 0x11000) avio_rb32(pb); /* Unknown. */ } else if (thp->components[i] == 1) { if (thp->has_audio != 0) break; /* Audio component. */ st = avformat_new_stream(s, NULL); if (!st) return AVERROR(ENOMEM); st->codec->codec_type = AVMEDIA_TYPE_AUDIO; st->codec->codec_id = CODEC_ID_ADPCM_THP; st->codec->codec_tag = 0; /* no fourcc */ st->codec->channels = avio_rb32(pb); /* numChannels. */ st->codec->sample_rate = avio_rb32(pb); /* Frequency. */ av_set_pts_info(st, 64, 1, st->codec->sample_rate); thp->audio_stream_index = st->index; thp->has_audio = 1; } } return 0; }
bool COMXVideo::Open(COMXStreamInfo &hints, OMXClock *clock, float display_aspect, bool deinterlace, bool hdmi_clock_sync, void* boblight_instance, int boblight_sizedown, int boblight_margin, int boblight_timeout) { OMX_ERRORTYPE omx_err = OMX_ErrorNone; std::string decoder_name; m_video_codec_name = ""; m_codingType = OMX_VIDEO_CodingUnused; m_decoded_width = hints.width; m_decoded_height = hints.height; m_hdmi_clock_sync = hdmi_clock_sync; //copy boblight parameter m_boblight_sizedown = boblight_sizedown; m_boblight_margin = boblight_margin; COMXVideo::m_boblight = boblight_instance; COMXVideo::m_boblight_timeout = boblight_timeout; if(!m_decoded_width || !m_decoded_height) return false; m_converter = new CBitstreamConverter(); m_video_convert = m_converter->Open(hints.codec, (uint8_t *)hints.extradata, hints.extrasize, false); if(m_video_convert) { if(m_converter->GetExtraData() != NULL && m_converter->GetExtraSize() > 0) { m_extrasize = m_converter->GetExtraSize(); m_extradata = (uint8_t *)malloc(m_extrasize); memcpy(m_extradata, m_converter->GetExtraData(), m_converter->GetExtraSize()); } } else { if(hints.extrasize > 0 && hints.extradata != NULL) { m_extrasize = hints.extrasize; m_extradata = (uint8_t *)malloc(m_extrasize); memcpy(m_extradata, hints.extradata, hints.extrasize); } } switch (hints.codec) { case CODEC_ID_H264: { switch(hints.profile) { case FF_PROFILE_H264_BASELINE: // (role name) video_decoder.avc // H.264 Baseline profile decoder_name = OMX_H264BASE_DECODER; m_codingType = OMX_VIDEO_CodingAVC; m_video_codec_name = "omx-h264"; break; case FF_PROFILE_H264_MAIN: // (role name) video_decoder.avc // H.264 Main profile decoder_name = OMX_H264MAIN_DECODER; m_codingType = OMX_VIDEO_CodingAVC; m_video_codec_name = "omx-h264"; break; case FF_PROFILE_H264_HIGH: // (role name) video_decoder.avc // H.264 Main profile decoder_name = OMX_H264HIGH_DECODER; m_codingType = OMX_VIDEO_CodingAVC; m_video_codec_name = "omx-h264"; break; case FF_PROFILE_UNKNOWN: decoder_name = OMX_H264HIGH_DECODER; m_codingType = OMX_VIDEO_CodingAVC; m_video_codec_name = "omx-h264"; break; default: decoder_name = OMX_H264HIGH_DECODER; m_codingType = OMX_VIDEO_CodingAVC; m_video_codec_name = "omx-h264"; break; } } break; case CODEC_ID_MPEG4: // (role name) video_decoder.mpeg4 // MPEG-4, DivX 4/5 and Xvid compatible decoder_name = OMX_MPEG4_DECODER; m_codingType = OMX_VIDEO_CodingMPEG4; m_video_codec_name = "omx-mpeg4"; break; case CODEC_ID_MPEG1VIDEO: case CODEC_ID_MPEG2VIDEO: // (role name) video_decoder.mpeg2 // MPEG-2 decoder_name = OMX_MPEG2V_DECODER; m_codingType = OMX_VIDEO_CodingMPEG2; m_video_codec_name = "omx-mpeg2"; break; case CODEC_ID_H263: // (role name) video_decoder.mpeg4 // MPEG-4, DivX 4/5 and Xvid compatible decoder_name = OMX_MPEG4_DECODER; m_codingType = OMX_VIDEO_CodingMPEG4; m_video_codec_name = "omx-h263"; break; case CODEC_ID_VP8: // (role name) video_decoder.vp8 // VP8 decoder_name = OMX_VP8_DECODER; m_codingType = OMX_VIDEO_CodingVP8; m_video_codec_name = "omx-vp8"; break; case CODEC_ID_VC1: case CODEC_ID_WMV3: // (role name) video_decoder.vc1 // VC-1, WMV9 decoder_name = OMX_VC1_DECODER; m_codingType = OMX_VIDEO_CodingWMV; m_video_codec_name = "omx-vc1"; break; default: printf("Vcodec id unknown: %x\n", hints.codec); return false; break; } if(deinterlace) { printf("enable deinterlace\n"); m_deinterlace = true; } else { m_deinterlace = false; } std::string componentName = ""; componentName = decoder_name; if(!m_omx_decoder.Initialize(componentName, OMX_IndexParamVideoInit)) return false; componentName = "OMX.broadcom.video_render"; if(!m_omx_render.Initialize(componentName, OMX_IndexParamVideoInit)) return false; componentName = "OMX.broadcom.video_scheduler"; if(!m_omx_sched.Initialize(componentName, OMX_IndexParamVideoInit)) return false; if(COMXVideo::m_boblight){ componentName = "OMX.broadcom.video_splitter"; if(!m_omx_split.Initialize(componentName, OMX_IndexParamVideoInit)) return false; componentName = "OMX.broadcom.resize"; if(!m_omx_resize.Initialize(componentName, OMX_IndexParamImageInit)) return false; } if(m_deinterlace) { componentName = "OMX.broadcom.image_fx"; if(!m_omx_image_fx.Initialize(componentName, OMX_IndexParamImageInit)) return false; } componentName = "OMX.broadcom.text_scheduler"; if(!m_omx_text.Initialize(componentName, OMX_IndexParamOtherInit)) return false; if(clock == NULL) return false; m_av_clock = clock; m_omx_clock = m_av_clock->GetOMXClock(); if(m_omx_clock->GetComponent() == NULL) { m_av_clock = NULL; m_omx_clock = NULL; return false; } if(m_deinterlace) { m_omx_tunnel_decoder.Initialize(&m_omx_decoder, m_omx_decoder.GetOutputPort(), &m_omx_image_fx, m_omx_image_fx.GetInputPort()); m_omx_tunnel_image_fx.Initialize(&m_omx_image_fx, m_omx_image_fx.GetOutputPort(), &m_omx_sched, m_omx_sched.GetInputPort()); } else { m_omx_tunnel_decoder.Initialize(&m_omx_decoder, m_omx_decoder.GetOutputPort(), &m_omx_sched, m_omx_sched.GetInputPort()); } if(COMXVideo::m_boblight){ m_omx_tunnel_sched.Initialize(&m_omx_sched, m_omx_sched.GetOutputPort(), &m_omx_split, m_omx_split.GetInputPort()); m_omx_tunnel_split.Initialize(&m_omx_split, m_omx_split.GetOutputPort()+1, &m_omx_render, m_omx_render.GetInputPort()); m_omx_tunnel_resize.Initialize(&m_omx_split, m_omx_split.GetOutputPort(), &m_omx_resize, m_omx_resize.GetInputPort()); } else { m_omx_tunnel_sched.Initialize(&m_omx_sched, m_omx_sched.GetOutputPort(), &m_omx_render, m_omx_render.GetInputPort()); } m_omx_tunnel_clock.Initialize(m_omx_clock, m_omx_clock->GetInputPort() + 1, &m_omx_sched, m_omx_sched.GetOutputPort() + 1); m_omx_tunnel_text.Initialize(m_omx_clock, m_omx_clock->GetInputPort() + 2, &m_omx_text, m_omx_text.GetInputPort() + 2); omx_err = m_omx_tunnel_clock.Establish(false); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open m_omx_tunnel_clock.Establish\n"); return false; } omx_err = m_omx_decoder.SetStateForComponent(OMX_StateIdle); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open m_omx_decoder.SetStateForComponent\n"); return false; } OMX_VIDEO_PARAM_PORTFORMATTYPE formatType; OMX_INIT_STRUCTURE(formatType); formatType.nPortIndex = m_omx_decoder.GetInputPort(); formatType.eCompressionFormat = m_codingType; if (hints.fpsscale > 0 && hints.fpsrate > 0) { formatType.xFramerate = (long long)(1<<16)*hints.fpsrate / hints.fpsscale; } else { formatType.xFramerate = 25 * (1<<16); } omx_err = m_omx_decoder.SetParameter(OMX_IndexParamVideoPortFormat, &formatType); if(omx_err != OMX_ErrorNone) return false; OMX_PARAM_PORTDEFINITIONTYPE portParam; OMX_INIT_STRUCTURE(portParam); portParam.nPortIndex = m_omx_decoder.GetInputPort(); omx_err = m_omx_decoder.GetParameter(OMX_IndexParamPortDefinition, &portParam); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error OMX_IndexParamPortDefinition omx_err(0x%08x)\n", omx_err); return false; } portParam.nPortIndex = m_omx_decoder.GetInputPort(); portParam.nBufferCountActual = VIDEO_BUFFERS; portParam.format.video.nFrameWidth = m_decoded_width; portParam.format.video.nFrameHeight = m_decoded_height; omx_err = m_omx_decoder.SetParameter(OMX_IndexParamPortDefinition, &portParam); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error OMX_IndexParamPortDefinition omx_err(0x%08x)\n", omx_err); return false; } OMX_PARAM_BRCMVIDEODECODEERRORCONCEALMENTTYPE concanParam; OMX_INIT_STRUCTURE(concanParam); concanParam.bStartWithValidFrame = OMX_FALSE; omx_err = m_omx_decoder.SetParameter(OMX_IndexParamBrcmVideoDecodeErrorConcealment, &concanParam); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error OMX_IndexParamBrcmVideoDecodeErrorConcealment omx_err(0x%08x)\n", omx_err); return false; } if(m_hdmi_clock_sync) { OMX_CONFIG_LATENCYTARGETTYPE latencyTarget; OMX_INIT_STRUCTURE(latencyTarget); latencyTarget.nPortIndex = m_omx_render.GetInputPort(); latencyTarget.bEnabled = OMX_TRUE; latencyTarget.nFilter = 2; latencyTarget.nTarget = 4000; latencyTarget.nShift = 3; latencyTarget.nSpeedFactor = -135; latencyTarget.nInterFactor = 500; latencyTarget.nAdjCap = 20; omx_err = m_omx_render.SetConfig(OMX_IndexConfigLatencyTarget, &latencyTarget); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open OMX_IndexConfigLatencyTarget error (0%08x)\n", omx_err); return false; } } // Alloc buffers for the omx intput port. omx_err = m_omx_decoder.AllocInputBuffers(); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open AllocOMXInputBuffers error (0%08x)\n", omx_err); return false; } OMX_INIT_STRUCTURE(portParam); portParam.nPortIndex = m_omx_text.GetInputPort(); omx_err = m_omx_text.GetParameter(OMX_IndexParamPortDefinition, &portParam); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error OMX_IndexParamPortDefinition omx_err(0x%08x)\n", omx_err); return false; } portParam.nBufferCountActual = 100; portParam.nBufferSize = MAX_TEXT_LENGTH; omx_err = m_omx_text.SetParameter(OMX_IndexParamPortDefinition, &portParam); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error OMX_IndexParamPortDefinition omx_err(0x%08x)\n", omx_err); return false; } omx_err = m_omx_text.AllocInputBuffers(); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open AllocOMXInputBuffers\n"); return false; } OMX_INIT_STRUCTURE(portParam); portParam.nPortIndex = m_omx_text.GetOutputPort(); omx_err = m_omx_text.GetParameter(OMX_IndexParamPortDefinition, &portParam); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error OMX_IndexParamPortDefinition omx_err(0x%08x)\n", omx_err); return false; } portParam.eDir = OMX_DirOutput; portParam.format.other.eFormat = OMX_OTHER_FormatText; portParam.format.other.eFormat = OMX_OTHER_FormatText; portParam.nBufferCountActual = 1; portParam.nBufferSize = MAX_TEXT_LENGTH; omx_err = m_omx_text.SetParameter(OMX_IndexParamPortDefinition, &portParam); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error OMX_IndexParamPortDefinition omx_err(0x%08x)\n", omx_err); return false; } omx_err = m_omx_text.AllocOutputBuffers(); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open AllocOutputBuffers\n"); return false; } omx_err = m_omx_tunnel_decoder.Establish(false); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open m_omx_tunnel_decoder.Establish\n"); return false; } omx_err = m_omx_decoder.SetStateForComponent(OMX_StateExecuting); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error m_omx_decoder.SetStateForComponent\n"); return false; } if(m_deinterlace) { OMX_CONFIG_IMAGEFILTERPARAMSTYPE image_filter; OMX_INIT_STRUCTURE(image_filter); image_filter.nPortIndex = m_omx_image_fx.GetOutputPort(); image_filter.nNumParams = 1; image_filter.nParams[0] = 3; image_filter.eImageFilter = OMX_ImageFilterDeInterlaceAdvanced; omx_err = m_omx_image_fx.SetConfig(OMX_IndexConfigCommonImageFilterParameters, &image_filter); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error OMX_IndexConfigCommonImageFilterParameters omx_err(0x%08x)\n", omx_err); return false; } omx_err = m_omx_tunnel_image_fx.Establish(false); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open m_omx_tunnel_image_fx.Establish\n"); return false; } omx_err = m_omx_image_fx.SetStateForComponent(OMX_StateExecuting); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error m_omx_image_fx.SetStateForComponent\n"); return false; } m_omx_image_fx.DisablePort(m_omx_image_fx.GetInputPort(), false); m_omx_image_fx.DisablePort(m_omx_image_fx.GetOutputPort(), false); } omx_err = m_omx_tunnel_sched.Establish(false); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open m_omx_tunnel_sched.Establish\n"); return false; } /** omx_err = m_omx_tunnel_write.Establish(false); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open m_omx_tunnel_write.Establish\n"); return false; } **/ omx_err = m_omx_text.SetStateForComponent(OMX_StateExecuting); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error m_omx_text.SetStateForComponent\n"); return false; } omx_err = m_omx_tunnel_text.Establish(false); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open m_omx_tunnel_text.Establish\n"); return false; } OMX_BUFFERHEADERTYPE *omx_buffer = m_omx_text.GetOutputBuffer(); if(!omx_buffer) return false; omx_err = m_omx_text.FillThisBuffer(omx_buffer); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open FillThisBuffer\n"); return false; } omx_buffer = NULL; omx_err = m_omx_sched.SetStateForComponent(OMX_StateExecuting); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error m_omx_sched.SetStateForComponent\n"); return false; } if(COMXVideo::m_boblight){ omx_err = m_omx_split.SetStateForComponent(OMX_StateExecuting); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error m_omx_split.SetStateForComponent\n"); return false; } //set up the resizer //make sure output of the splitter and input of the resize match OMX_PARAM_PORTDEFINITIONTYPE port_def; OMX_INIT_STRUCTURE(port_def); port_def.nPortIndex = m_omx_split.GetOutputPort(); m_omx_split.GetParameter(OMX_IndexParamPortDefinition, &port_def); port_def.nPortIndex = m_omx_resize.GetInputPort(); m_omx_resize.SetParameter(OMX_IndexParamPortDefinition, &port_def); omx_err = m_omx_tunnel_resize.Establish(false); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "%s::%s m_omx_tunnel_resize.Establish\n", CLASSNAME, __func__); return false; } omx_err = m_omx_resize.WaitForEvent(OMX_EventPortSettingsChanged); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "%s::%s m_omx_resize.WaitForEvent=%x\n", CLASSNAME, __func__, omx_err); return false; } port_def.nPortIndex = m_omx_resize.GetOutputPort(); m_omx_resize.GetParameter(OMX_IndexParamPortDefinition, &port_def); port_def.nPortIndex = m_omx_resize.GetOutputPort(); port_def.format.image.eCompressionFormat = OMX_IMAGE_CodingUnused; port_def.format.image.eColorFormat = OMX_COLOR_Format32bitARGB8888; //calculate the size of the sized-down image if(m_boblight_sizedown%2==1)m_boblight_sizedown--; //make sure we have even dimensions, since resize component requires it float factor; if(m_decoded_width>m_decoded_height){ factor = (float)m_boblight_sizedown / m_decoded_width; }else{ factor = (float)m_boblight_sizedown / m_decoded_height; } port_def.format.image.nFrameWidth = round(factor * m_decoded_width); port_def.format.image.nFrameHeight = round(factor * m_decoded_height); port_def.format.image.nStride = 0; port_def.format.image.nSliceHeight = 0; port_def.format.image.bFlagErrorConcealment = OMX_FALSE; omx_err = m_omx_resize.SetParameter(OMX_IndexParamPortDefinition, &port_def); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "%s::%s m_omx_resize.SetParameter result(0x%x)\n", CLASSNAME, __func__, omx_err); return false; } COMXVideo::m_boblight_width = (int)round(factor * m_decoded_width); COMXVideo::m_boblight_height = (int)round(factor * m_decoded_height); //calculate margins of processed pixels on the outer border of the image COMXVideo::m_boblight_margin_t = (int)round(m_boblight_margin*m_boblight_height/100); COMXVideo::m_boblight_margin_b = m_boblight_height - m_boblight_margin_t; COMXVideo::m_boblight_margin_l = (int)round(m_boblight_margin*m_boblight_width/100); COMXVideo::m_boblight_margin_r = m_boblight_width - m_boblight_margin_l; CLog::Log(LOGDEBUG, "Setting boblight scanrange to %ix%i, scan margin is %i percent\n", COMXVideo::m_boblight_width, COMXVideo::m_boblight_height, m_boblight_margin); boblight_setscanrange(COMXVideo::m_boblight, COMXVideo::m_boblight_width, COMXVideo::m_boblight_height); OMX_PARAM_PORTDEFINITIONTYPE m_decoded_format; OMX_INIT_STRUCTURE(m_decoded_format); m_decoded_format.nPortIndex = m_omx_resize.GetOutputPort(); omx_err = m_omx_resize.GetParameter(OMX_IndexParamPortDefinition, &m_decoded_format); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "%s::%s m_omx_resize.GetParameter result(0x%x)\n", CLASSNAME, __func__, omx_err); return false; } assert(m_decoded_format.nBufferCountActual == 1); omx_err = m_omx_resize.AllocOutputBuffers(); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "%s::%s m_omx_resize.AllocOutputBuffers result(0x%x)\n", CLASSNAME, __func__, omx_err); return false; } omx_err = m_omx_resize.SetStateForComponent(OMX_StateExecuting); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "%s::%s m_omx_resize.SetStateForComponent result(0x%x)\n", CLASSNAME, __func__, omx_err); return false; } omx_err = m_omx_tunnel_split.Establish(false); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open m_omx_tunnel_split.Establish\n"); return false; } omx_err = m_omx_tunnel_resize.Establish(false); if(omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open m_omx_tunnel_resize.Establish\n"); return false; } //setting the custom callback to broadcast a signal COMXVideo::Thread is waiting for m_omx_resize.SetCustomDecoderFillBufferDoneHandler(&COMXVideo::BufferDoneHandler); //prepare boblight client thread and start it pthread_cond_init(&COMXVideo::m_boblight_bufferdone_cond, NULL); pthread_mutex_init(&COMXVideo::m_boblight_bufferdone_mutex, NULL); COMXCoreComponent* args[2]; args[0] = &m_omx_split; args[1] = &m_omx_resize; pthread_create(&COMXVideo::m_boblight_clientthread, NULL, &COMXVideo::BoblightClientThread, (void*)&args); } omx_err = m_omx_render.SetStateForComponent(OMX_StateExecuting); if (omx_err != OMX_ErrorNone) { CLog::Log(LOGERROR, "COMXVideo::Open error m_omx_render.SetStateForComponent\n"); return false; } if(!SendDecoderConfig()) return false; m_is_open = true; m_drop_state = false; m_setStartTime = true; m_setStartTimeText = true; // only set aspect when we have a aspect and display doesn't match the aspect if(display_aspect != 0.0f && (hints.aspect != display_aspect)) { OMX_CONFIG_DISPLAYREGIONTYPE configDisplay; OMX_INIT_STRUCTURE(configDisplay); configDisplay.nPortIndex = m_omx_render.GetInputPort(); AVRational aspect; float fAspect = (float)hints.aspect / (float)m_decoded_width * (float)m_decoded_height; aspect = av_d2q(fAspect, 100); printf("Aspect : num %d den %d aspect %f display aspect %f\n", aspect.num, aspect.den, hints.aspect, display_aspect); configDisplay.set = OMX_DISPLAY_SET_PIXEL; configDisplay.pixel_x = aspect.num; configDisplay.pixel_y = aspect.den; omx_err = m_omx_render.SetConfig(OMX_IndexConfigDisplayRegion, &configDisplay); if(omx_err != OMX_ErrorNone) return false; } /* configDisplay.set = OMX_DISPLAY_SET_LAYER; configDisplay.layer = 2; omx_err = m_omx_render.SetConfig(OMX_IndexConfigDisplayRegion, &configDisplay); if(omx_err != OMX_ErrorNone) return false; configDisplay.set = OMX_DISPLAY_SET_DEST_RECT; configDisplay.dest_rect.x_offset = 100; configDisplay.dest_rect.y_offset = 100; configDisplay.dest_rect.width = 640; configDisplay.dest_rect.height = 480; omx_err = m_omx_render.SetConfig(OMX_IndexConfigDisplayRegion, &configDisplay); if(omx_err != OMX_ErrorNone) return false; configDisplay.set = OMX_DISPLAY_SET_TRANSFORM; configDisplay.transform = OMX_DISPLAY_ROT180; omx_err = m_omx_render.SetConfig(OMX_IndexConfigDisplayRegion, &configDisplay); if(omx_err != OMX_ErrorNone) return false; configDisplay.set = OMX_DISPLAY_SET_FULLSCREEN; configDisplay.fullscreen = OMX_FALSE; omx_err = m_omx_render.SetConfig(OMX_IndexConfigDisplayRegion, &configDisplay); if(omx_err != OMX_ErrorNone) return false; configDisplay.set = OMX_DISPLAY_SET_MODE; configDisplay.mode = OMX_DISPLAY_MODE_FILL; //OMX_DISPLAY_MODE_LETTERBOX; omx_err = m_omx_render.SetConfig(OMX_IndexConfigDisplayRegion, &configDisplay); if(omx_err != OMX_ErrorNone) return false; configDisplay.set = OMX_DISPLAY_SET_LAYER; configDisplay.layer = 1; omx_err = m_omx_render.SetConfig(OMX_IndexConfigDisplayRegion, &configDisplay); if(omx_err != OMX_ErrorNone) return false; configDisplay.set = OMX_DISPLAY_SET_ALPHA; configDisplay.alpha = OMX_FALSE; omx_err = m_omx_render.SetConfig(OMX_IndexConfigDisplayRegion, &configDisplay); if(omx_err != OMX_ErrorNone) return false; */ CLog::Log(LOGDEBUG, "%s::%s - decoder_component(0x%p), input_port(0x%x), output_port(0x%x) deinterlace %d hdmiclocksync %d\n", CLASSNAME, __func__, m_omx_decoder.GetComponent(), m_omx_decoder.GetInputPort(), m_omx_decoder.GetOutputPort(), m_deinterlace, m_hdmi_clock_sync); m_first_frame = true; m_first_text = true; return true; }