예제 #1
0
double get_rotation(AVStream *st)
{
    AVDictionaryEntry *rotate_tag = av_dict_get(st->metadata, "rotate", NULL, 0);
    uint8_t* displaymatrix = av_stream_get_side_data(st,
                                                     AV_PKT_DATA_DISPLAYMATRIX, NULL);
    double theta = 0;

    if (rotate_tag && *rotate_tag->value && strcmp(rotate_tag->value, "0")) {
        char *tail;
        theta = av_strtod(rotate_tag->value, &tail);
        if (*tail)
            theta = 0;
    }
    if (displaymatrix && !theta)
        theta = -av_display_rotation_get((int32_t*) displaymatrix);

    theta -= 360*floor(theta/360 + 0.9/360);

    if (fabs(theta - 90*round(theta/90)) > 2)
        av_log(NULL, AV_LOG_WARNING, "Odd rotation angle.\n"
               "If you want to help, upload a sample "
               "of this file to ftp://upload.ffmpeg.org/incoming/ "
               "and contact the ffmpeg-devel mailing list. ([email protected])");

    return theta;
}
예제 #2
0
파일: dump.c 프로젝트: mf0324/Code_Learn
static void dump_sidedata(void *ctx, AVStream *st, const char *indent)
{
    int i;

    if (st->nb_side_data)
        av_log(ctx, AV_LOG_INFO, "%sSide data:\n", indent);

    for (i = 0; i < st->nb_side_data; i++) {
        AVPacketSideData sd = st->side_data[i];
        av_log(ctx, AV_LOG_INFO, "%s  ", indent);

        switch (sd.type) {
        case AV_PKT_DATA_PALETTE:
            av_log(ctx, AV_LOG_INFO, "palette");
            break;
        case AV_PKT_DATA_NEW_EXTRADATA:
            av_log(ctx, AV_LOG_INFO, "new extradata");
            break;
        case AV_PKT_DATA_PARAM_CHANGE:
            av_log(ctx, AV_LOG_INFO, "paramchange: ");
            dump_paramchange(ctx, &sd);
            break;
        case AV_PKT_DATA_H263_MB_INFO:
            av_log(ctx, AV_LOG_INFO, "h263 macroblock info");
            break;
        case AV_PKT_DATA_REPLAYGAIN:
            av_log(ctx, AV_LOG_INFO, "replaygain: ");
            dump_replaygain(ctx, &sd);
            break;
        case AV_PKT_DATA_DISPLAYMATRIX:
            av_log(ctx, AV_LOG_INFO, "displaymatrix: rotation of %.2f degrees",
                   av_display_rotation_get((int32_t *)sd.data));
            break;
        case AV_PKT_DATA_STEREO3D:
            av_log(ctx, AV_LOG_INFO, "stereo3d: ");
            dump_stereo3d(ctx, &sd);
            break;
        case AV_PKT_DATA_AUDIO_SERVICE_TYPE:
            av_log(ctx, AV_LOG_INFO, "audio service type: ");
            dump_audioservicetype(ctx, &sd);
            break;
        case AV_PKT_DATA_QUALITY_STATS:
            av_log(ctx, AV_LOG_INFO, "quality factor: %d, pict_type: %c", AV_RL32(sd.data), av_get_picture_type_char(sd.data[4]));
            break;
        default:
            av_log(ctx, AV_LOG_WARNING,
                   "unknown side data type %d (%d bytes)", sd.type, sd.size);
            break;
        }

        av_log(ctx, AV_LOG_INFO, "\n");
    }
}
예제 #3
0
double sxpi_demuxing_probe_rotation(const struct demuxing_ctx *ctx)
{
    AVStream *st = (AVStream *)ctx->stream; // XXX: Fix FFmpeg.
    AVDictionaryEntry *rotate_tag = av_dict_get(st->metadata, "rotate", NULL, 0);
    const uint8_t *displaymatrix = av_stream_get_side_data(st, AV_PKT_DATA_DISPLAYMATRIX, NULL);
    double theta = 0;

    if (rotate_tag && *rotate_tag->value && strcmp(rotate_tag->value, "0")) {
        char *tail;
        theta = av_strtod(rotate_tag->value, &tail);
        if (*tail)
            theta = 0;
    }
    if (displaymatrix && !theta)
        theta = -av_display_rotation_get((const int32_t *)displaymatrix);
    theta -= 360*floor(theta/360 + 0.9/360);
    return theta;
}
예제 #4
0
int32_t MovieDecoder::getStreamRotation()
{
    int32_t* matrix = reinterpret_cast<int32_t*>(av_stream_get_side_data(m_pVideoStream, AV_PKT_DATA_DISPLAYMATRIX, nullptr));
    if (matrix)
    {
        auto angle = lround(av_display_rotation_get(matrix));
        if (angle > 45 && angle < 135)
        {
            return 2;
        }
        else if (angle < -45 && angle > -135)
        {
            return 1;
        }
    }

    return -1;
}
예제 #5
0
static void dump_sidedata(void *ctx, AVStream *st, const char *indent)
{
    int i;

    if (st->nb_side_data)
        av_log(ctx, AV_LOG_INFO, "%sSide data:\n", indent);

    for (i = 0; i < st->nb_side_data; i++) {
        AVPacketSideData sd = st->side_data[i];
        av_log(ctx, AV_LOG_INFO, "%s  ", indent);

        switch (sd.type) {
        case AV_PKT_DATA_PALETTE:
            av_log(ctx, AV_LOG_INFO, "palette");
            break;
        case AV_PKT_DATA_NEW_EXTRADATA:
            av_log(ctx, AV_LOG_INFO, "new extradata");
            break;
        case AV_PKT_DATA_PARAM_CHANGE:
            av_log(ctx, AV_LOG_INFO, "paramchange: ");
            dump_paramchange(ctx, &sd);
            break;
        case AV_PKT_DATA_H263_MB_INFO:
            av_log(ctx, AV_LOG_INFO, "h263 macroblock info");
            break;
        case AV_PKT_DATA_REPLAYGAIN:
            av_log(ctx, AV_LOG_INFO, "replaygain: ");
            dump_replaygain(ctx, &sd);
            break;
        case AV_PKT_DATA_DISPLAYMATRIX:
            av_log(ctx, AV_LOG_INFO, "displaymatrix: rotation of %.2f degrees",
                   av_display_rotation_get((int32_t *)sd.data));
            break;
        default:
            av_log(ctx, AV_LOG_WARNING,
                   "unknown side data type %d (%d bytes)", sd.type, sd.size);
            break;
        }

        av_log(ctx, AV_LOG_INFO, "\n");
    }
}
예제 #6
0
파일: demux.c 프로젝트: mstorsjo/vlc
static void get_rotation(es_format_t *fmt, AVStream *s)
{
    char const *kRotateKey = "rotate";
    AVDictionaryEntry *rotation = av_dict_get(s->metadata, kRotateKey, NULL, 0);
    long angle = 0;

    if( rotation )
    {
        angle = strtol(rotation->value, NULL, 10);

        if (angle > 45 && angle < 135)
            fmt->video.orientation = ORIENT_ROTATED_90;

        else if (angle > 135 && angle < 225)
            fmt->video.orientation = ORIENT_ROTATED_180;

        else if (angle > 225 && angle < 315)
            fmt->video.orientation = ORIENT_ROTATED_270;

        else
            fmt->video.orientation = ORIENT_NORMAL;
    }
    int32_t *matrix = (int32_t *)av_stream_get_side_data(s, AV_PKT_DATA_DISPLAYMATRIX, NULL);
    if( matrix ) {
        angle = lround(av_display_rotation_get(matrix));

        if (angle > 45 && angle < 135)
            fmt->video.orientation = ORIENT_ROTATED_270;

        else if (angle > 135 || angle < -135)
            fmt->video.orientation = ORIENT_ROTATED_180;

        else if (angle < -45 && angle > -135)
            fmt->video.orientation = ORIENT_ROTATED_90;

        else
            fmt->video.orientation = ORIENT_NORMAL;
    }
}
예제 #7
0
static int configure_input_video_filter(FilterGraph *fg, InputFilter *ifilter,
                                        AVFilterInOut *in)
{
    AVFilterContext *last_filter;
    const AVFilter *buffer_filt = avfilter_get_by_name("buffer");
    InputStream *ist = ifilter->ist;
    InputFile     *f = input_files[ist->file_index];
    AVRational tb = ist->framerate.num ? av_inv_q(ist->framerate) :
                                         ist->st->time_base;
    AVRational sar;
    char args[255], name[255];
    int ret, pad_idx = 0;

    sar = ist->st->sample_aspect_ratio.num ?
          ist->st->sample_aspect_ratio :
          ist->dec_ctx->sample_aspect_ratio;
    snprintf(args, sizeof(args),
             "width=%d:height=%d:pix_fmt=%d:time_base=%d/%d:sar=%d/%d",
             ist->dec_ctx->width, ist->dec_ctx->height,
             ist->hwaccel_retrieve_data ? ist->hwaccel_retrieved_pix_fmt : ist->dec_ctx->pix_fmt,
             tb.num, tb.den, sar.num, sar.den);
    snprintf(name, sizeof(name), "graph %d input from stream %d:%d", fg->index,
             ist->file_index, ist->st->index);

    if ((ret = avfilter_graph_create_filter(&ifilter->filter, buffer_filt, name,
                                            args, NULL, fg->graph)) < 0)
        return ret;
    last_filter = ifilter->filter;

    if (ist->autorotate) {
        uint8_t* displaymatrix = av_stream_get_side_data(ist->st,
                                                         AV_PKT_DATA_DISPLAYMATRIX, NULL);
        if (displaymatrix) {
            double rot = av_display_rotation_get((int32_t*) displaymatrix);
            if (rot < -135 || rot > 135) {
                ret = insert_filter(&last_filter, &pad_idx, "vflip", NULL);
                if (ret < 0)
                    return ret;
                ret = insert_filter(&last_filter, &pad_idx, "hflip", NULL);
            } else if (rot < -45) {
                ret = insert_filter(&last_filter, &pad_idx, "transpose", "dir=clock");
            } else if (rot > 45) {
                ret = insert_filter(&last_filter, &pad_idx, "transpose", "dir=cclock");
            }
            if (ret < 0)
                return ret;
        }
    }

    if (ist->framerate.num) {
        AVFilterContext *setpts;

        snprintf(name, sizeof(name), "force CFR for input from stream %d:%d",
                 ist->file_index, ist->st->index);
        if ((ret = avfilter_graph_create_filter(&setpts,
                                                avfilter_get_by_name("setpts"),
                                                name, "N", NULL,
                                                fg->graph)) < 0)
            return ret;

        if ((ret = avfilter_link(last_filter, 0, setpts, 0)) < 0)
            return ret;

        last_filter = setpts;
    }

    snprintf(name, sizeof(name), "trim for input stream %d:%d",
             ist->file_index, ist->st->index);
    ret = insert_trim(((f->start_time == AV_NOPTS_VALUE) || !f->accurate_seek) ?
                      AV_NOPTS_VALUE : 0, f->recording_time, &last_filter, &pad_idx, name);
    if (ret < 0)
        return ret;

    if ((ret = avfilter_link(last_filter, 0, in->filter_ctx, in->pad_idx)) < 0)
        return ret;
    return 0;
}
예제 #8
0
파일: avprobe.c 프로젝트: changbiao/libav
static void show_stream(AVFormatContext *fmt_ctx, int stream_idx)
{
    AVStream *stream = fmt_ctx->streams[stream_idx];
    AVCodecContext *dec_ctx;
    const AVCodec *dec;
    const char *profile;
    char val_str[128];
    AVRational display_aspect_ratio, *sar = NULL;
    const AVPixFmtDescriptor *desc;

    probe_object_header("stream");

    probe_int("index", stream->index);

    if ((dec_ctx = stream->codec)) {
        if ((dec = dec_ctx->codec)) {
            probe_str("codec_name", dec->name);
            probe_str("codec_long_name", dec->long_name);
        } else {
            probe_str("codec_name", "unknown");
        }

        probe_str("codec_type", media_type_string(dec_ctx->codec_type));
        probe_str("codec_time_base",
                  rational_string(val_str, sizeof(val_str),
                                  "/", &dec_ctx->time_base));

        /* print AVI/FourCC tag */
        av_get_codec_tag_string(val_str, sizeof(val_str), dec_ctx->codec_tag);
        probe_str("codec_tag_string", val_str);
        probe_str("codec_tag", tag_string(val_str, sizeof(val_str),
                                          dec_ctx->codec_tag));

        /* print profile, if there is one */
        if (dec && (profile = av_get_profile_name(dec, dec_ctx->profile)))
            probe_str("profile", profile);

        switch (dec_ctx->codec_type) {
        case AVMEDIA_TYPE_VIDEO:
            probe_int("width", dec_ctx->width);
            probe_int("height", dec_ctx->height);
            probe_int("coded_width", dec_ctx->coded_width);
            probe_int("coded_height", dec_ctx->coded_height);
            probe_int("has_b_frames", dec_ctx->has_b_frames);
            if (dec_ctx->sample_aspect_ratio.num)
                sar = &dec_ctx->sample_aspect_ratio;
            else if (stream->sample_aspect_ratio.num)
                sar = &stream->sample_aspect_ratio;

            if (sar) {
                probe_str("sample_aspect_ratio",
                          rational_string(val_str, sizeof(val_str), ":", sar));
                av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den,
                          dec_ctx->width  * sar->num, dec_ctx->height * sar->den,
                          1024*1024);
                probe_str("display_aspect_ratio",
                          rational_string(val_str, sizeof(val_str), ":",
                          &display_aspect_ratio));
            }
            desc = av_pix_fmt_desc_get(dec_ctx->pix_fmt);
            probe_str("pix_fmt", desc ? desc->name : "unknown");
            probe_int("level", dec_ctx->level);

            probe_str("color_range", av_color_range_name(dec_ctx->color_range));
            probe_str("color_space", av_color_space_name(dec_ctx->colorspace));
            probe_str("color_trc", av_color_transfer_name(dec_ctx->color_trc));
            probe_str("color_pri", av_color_primaries_name(dec_ctx->color_primaries));
            probe_str("chroma_loc", av_chroma_location_name(dec_ctx->chroma_sample_location));
            break;

        case AVMEDIA_TYPE_AUDIO:
            probe_str("sample_rate",
                      value_string(val_str, sizeof(val_str),
                                   dec_ctx->sample_rate,
                                   unit_hertz_str));
            probe_int("channels", dec_ctx->channels);
            probe_int("bits_per_sample",
                      av_get_bits_per_sample(dec_ctx->codec_id));
            break;
        }
    } else {
        probe_str("codec_type", "unknown");
    }

    if (fmt_ctx->iformat->flags & AVFMT_SHOW_IDS)
        probe_int("id", stream->id);
    probe_str("avg_frame_rate",
              rational_string(val_str, sizeof(val_str), "/",
              &stream->avg_frame_rate));
    if (dec_ctx->bit_rate)
        probe_str("bit_rate",
                  value_string(val_str, sizeof(val_str),
                               dec_ctx->bit_rate, unit_bit_per_second_str));
    probe_str("time_base",
              rational_string(val_str, sizeof(val_str), "/",
              &stream->time_base));
    probe_str("start_time",
              time_value_string(val_str, sizeof(val_str),
                                stream->start_time, &stream->time_base));
    probe_str("duration",
              time_value_string(val_str, sizeof(val_str),
                                stream->duration, &stream->time_base));
    if (stream->nb_frames)
        probe_int("nb_frames", stream->nb_frames);

    probe_dict(stream->metadata, "tags");

    if (stream->nb_side_data) {
        int i, j;
        probe_object_header("sidedata");
        for (i = 0; i < stream->nb_side_data; i++) {
            const AVPacketSideData* sd = &stream->side_data[i];
            switch (sd->type) {
            case AV_PKT_DATA_DISPLAYMATRIX:
                probe_object_header("displaymatrix");
                probe_array_header("matrix", 1);
                for (j = 0; j < 9; j++)
                    probe_int(NULL, ((int32_t *)sd->data)[j]);
                probe_array_footer("matrix", 1);
                probe_int("rotation",
                          av_display_rotation_get((int32_t *)sd->data));
                probe_object_footer("displaymatrix");
                break;
            }
        }
        probe_object_footer("sidedata");
    }

    probe_object_footer("stream");
}
예제 #9
0
static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
{
    AVFilterContext *ctx = inlink->dst;
    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
    uint32_t plane_checksum[4] = {0}, checksum = 0;
    int64_t sum[4] = {0}, sum2[4] = {0};
    int32_t pixelcount[4] = {0};
    int i, plane, vsub = desc->log2_chroma_h;
#ifdef IDE_COMPILE
	char tmp1[32] = {0};
	char tmp2[32] = {0};
#endif

    for (plane = 0; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++) {
        int64_t linesize = av_image_get_linesize(frame->format, frame->width, plane);
        uint8_t *data = frame->data[plane];
        int h = plane == 1 || plane == 2 ? FF_CEIL_RSHIFT(inlink->h, vsub) : inlink->h;

        if (linesize < 0)
            return linesize;

        for (i = 0; i < h; i++) {
            plane_checksum[plane] = av_adler32_update(plane_checksum[plane], data, linesize);
            checksum = av_adler32_update(checksum, data, linesize);

            update_sample_stats(data, linesize, sum+plane, sum2+plane);
            pixelcount[plane] += linesize;
            data += frame->linesize[plane];
        }
    }

#ifdef IDE_COMPILE
	av_log(ctx, AV_LOG_INFO,
           "n:%"PRId64" pts:%s pts_time:%s pos:%"PRId64" "
           "fmt:%s sar:%d/%d s:%dx%d i:%c iskey:%d type:%c "
           "checksum:%08"PRIX32" plane_checksum:[%08"PRIX32,
           inlink->frame_count,
           av_ts_make_string(tmp1, frame->pts), av_ts_make_time_string(tmp2, frame->pts, &inlink->time_base), av_frame_get_pkt_pos(frame),
           desc->name,
           frame->sample_aspect_ratio.num, frame->sample_aspect_ratio.den,
           frame->width, frame->height,
           !frame->interlaced_frame ? 'P' :         /* Progressive  */
           frame->top_field_first   ? 'T' : 'B',    /* Top / Bottom */
           frame->key_frame,
           av_get_picture_type_char(frame->pict_type),
           checksum, plane_checksum[0]);
#else
	av_log(ctx, AV_LOG_INFO,
           "n:%"PRId64" pts:%s pts_time:%s pos:%"PRId64" "
           "fmt:%s sar:%d/%d s:%dx%d i:%c iskey:%d type:%c "
           "checksum:%08"PRIX32" plane_checksum:[%08"PRIX32,
           inlink->frame_count,
           av_ts2str(frame->pts), av_ts2timestr(frame->pts, &inlink->time_base), av_frame_get_pkt_pos(frame),
           desc->name,
           frame->sample_aspect_ratio.num, frame->sample_aspect_ratio.den,
           frame->width, frame->height,
           !frame->interlaced_frame ? 'P' :         /* Progressive  */
           frame->top_field_first   ? 'T' : 'B',    /* Top / Bottom */
           frame->key_frame,
           av_get_picture_type_char(frame->pict_type),
           checksum, plane_checksum[0]);
#endif

    for (plane = 1; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++)
        av_log(ctx, AV_LOG_INFO, " %08"PRIX32, plane_checksum[plane]);
    av_log(ctx, AV_LOG_INFO, "] mean:[");
    for (plane = 0; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++)
        av_log(ctx, AV_LOG_INFO, "%"PRId64" ", (sum[plane] + pixelcount[plane]/2) / pixelcount[plane]);
    av_log(ctx, AV_LOG_INFO, "\b] stdev:[");
    for (plane = 0; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++)
        av_log(ctx, AV_LOG_INFO, "%3.1f ",
               sqrt((sum2[plane] - sum[plane]*(double)sum[plane]/pixelcount[plane])/pixelcount[plane]));
    av_log(ctx, AV_LOG_INFO, "\b]\n");

    for (i = 0; i < frame->nb_side_data; i++) {
        AVFrameSideData *sd = frame->side_data[i];

        av_log(ctx, AV_LOG_INFO, "  side data - ");
        switch (sd->type) {
        case AV_FRAME_DATA_PANSCAN:
            av_log(ctx, AV_LOG_INFO, "pan/scan");
            break;
        case AV_FRAME_DATA_A53_CC:
            av_log(ctx, AV_LOG_INFO, "A/53 closed captions (%d bytes)", sd->size);
            break;
        case AV_FRAME_DATA_STEREO3D:
            dump_stereo3d(ctx, sd);
            break;
        case AV_FRAME_DATA_DISPLAYMATRIX:
            av_log(ctx, AV_LOG_INFO, "displaymatrix: rotation of %.2f degrees",
                   av_display_rotation_get((int32_t *)sd->data));
            break;
        case AV_FRAME_DATA_AFD:
            av_log(ctx, AV_LOG_INFO, "afd: value of %"PRIu8, sd->data[0]);
            break;
        default:
            av_log(ctx, AV_LOG_WARNING, "unknown side data type %d (%d bytes)",
                   sd->type, sd->size);
            break;
        }

        av_log(ctx, AV_LOG_INFO, "\n");
    }

    return ff_filter_frame(inlink->dst->outputs[0], frame);
}
예제 #10
0
static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
{
    AVFilterContext *ctx = inlink->dst;
    ShowInfoContext *showinfo = ctx->priv;
    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
    uint32_t plane_checksum[4] = {0}, checksum = 0;
    int i, plane, vsub = desc->log2_chroma_h;

    for (plane = 0; frame->data[plane] && plane < 4; plane++) {
        size_t linesize = av_image_get_linesize(frame->format, frame->width, plane);
        uint8_t *data = frame->data[plane];
        int h = plane == 1 || plane == 2 ? inlink->h >> vsub : inlink->h;

        for (i = 0; i < h; i++) {
            plane_checksum[plane] = av_adler32_update(plane_checksum[plane], data, linesize);
            checksum = av_adler32_update(checksum, data, linesize);
            data += frame->linesize[plane];
        }
    }

    av_log(ctx, AV_LOG_INFO,
           "n:%d pts:%"PRId64" pts_time:%f "
           "fmt:%s sar:%d/%d s:%dx%d i:%c iskey:%d type:%c "
           "checksum:%"PRIu32" plane_checksum:[%"PRIu32" %"PRIu32" %"PRIu32" %"PRIu32"]\n",
           showinfo->frame,
           frame->pts, frame->pts * av_q2d(inlink->time_base),
           desc->name,
           frame->sample_aspect_ratio.num, frame->sample_aspect_ratio.den,
           frame->width, frame->height,
           !frame->interlaced_frame ? 'P' :         /* Progressive  */
           frame->top_field_first   ? 'T' : 'B',    /* Top / Bottom */
           frame->key_frame,
           av_get_picture_type_char(frame->pict_type),
           checksum, plane_checksum[0], plane_checksum[1], plane_checksum[2], plane_checksum[3]);

    for (i = 0; i < frame->nb_side_data; i++) {
        AVFrameSideData *sd = frame->side_data[i];

        av_log(ctx, AV_LOG_INFO, "  side data - ");
        switch (sd->type) {
        case AV_FRAME_DATA_PANSCAN:
            av_log(ctx, AV_LOG_INFO, "pan/scan");
            break;
        case AV_FRAME_DATA_A53_CC:
            av_log(ctx, AV_LOG_INFO, "A/53 closed captions (%d bytes)", sd->size);
            break;
        case AV_FRAME_DATA_STEREO3D:
            dump_stereo3d(ctx, sd);
            break;
        case AV_FRAME_DATA_DISPLAYMATRIX:
            av_log(ctx, AV_LOG_INFO, "displaymatrix: rotation of %.2f degrees",
                   av_display_rotation_get((int32_t *)sd->data));
            break;
        case AV_FRAME_DATA_AFD:
            av_log(ctx, AV_LOG_INFO, "afd: value of %"PRIu8, sd->data[0]);
            break;
        default:
            av_log(ctx, AV_LOG_WARNING, "unknown side data type %d (%d bytes)",
                   sd->type, sd->size);
            break;
        }

        av_log(ctx, AV_LOG_INFO, "\n");
    }

    showinfo->frame++;
    return ff_filter_frame(inlink->dst->outputs[0], frame);
}
예제 #11
0
static void dump_sidedata(void *ctx, AVStream *st, const char *indent)
{
    int i;

    if (st->nb_side_data)
        av_log(ctx, AV_LOG_INFO, "%sSide data:\n", indent);

    for (i = 0; i < st->nb_side_data; i++) {
        AVPacketSideData sd = st->side_data[i];
        av_log(ctx, AV_LOG_INFO, "%s  ", indent);

        switch (sd.type) {
        case AV_PKT_DATA_PALETTE:
            av_log(ctx, AV_LOG_INFO, "palette");
            break;
        case AV_PKT_DATA_NEW_EXTRADATA:
            av_log(ctx, AV_LOG_INFO, "new extradata");
            break;
        case AV_PKT_DATA_PARAM_CHANGE:
            av_log(ctx, AV_LOG_INFO, "paramchange: ");
            dump_paramchange(ctx, &sd);
            break;
        case AV_PKT_DATA_H263_MB_INFO:
            av_log(ctx, AV_LOG_INFO, "H.263 macroblock info");
            break;
        case AV_PKT_DATA_REPLAYGAIN:
            av_log(ctx, AV_LOG_INFO, "replaygain: ");
            dump_replaygain(ctx, &sd);
            break;
        case AV_PKT_DATA_DISPLAYMATRIX:
            av_log(ctx, AV_LOG_INFO, "displaymatrix: rotation of %.2f degrees",
                   av_display_rotation_get((int32_t *)sd.data));
            break;
        case AV_PKT_DATA_STEREO3D:
            av_log(ctx, AV_LOG_INFO, "stereo3d: ");
            dump_stereo3d(ctx, &sd);
            break;
        case AV_PKT_DATA_AUDIO_SERVICE_TYPE:
            av_log(ctx, AV_LOG_INFO, "audio service type: ");
            dump_audioservicetype(ctx, &sd);
            break;
        case AV_PKT_DATA_QUALITY_STATS:
            av_log(ctx, AV_LOG_INFO, "quality factor: %"PRId32", pict_type: %c",
                   AV_RL32(sd.data), av_get_picture_type_char(sd.data[4]));
            break;
        case AV_PKT_DATA_CPB_PROPERTIES:
            av_log(ctx, AV_LOG_INFO, "cpb: ");
            dump_cpb(ctx, &sd);
            break;
        case AV_PKT_DATA_MASTERING_DISPLAY_METADATA:
            dump_mastering_display_metadata(ctx, &sd);
            break;
        case AV_PKT_DATA_SPHERICAL:
            av_log(ctx, AV_LOG_INFO, "spherical: ");
            dump_spherical(ctx, st->codecpar, &sd);
            break;
        case AV_PKT_DATA_CONTENT_LIGHT_LEVEL:
            dump_content_light_metadata(ctx, &sd);
            break;
        default:
            av_log(ctx, AV_LOG_INFO,
                   "unknown side data type %d (%d bytes)", sd.type, sd.size);
            break;
        }

        av_log(ctx, AV_LOG_INFO, "\n");
    }
}
예제 #12
0
static int h264_metadata_filter(AVBSFContext *bsf, AVPacket *out)
{
    H264MetadataContext *ctx = bsf->priv_data;
    AVPacket *in = NULL;
    CodedBitstreamFragment *au = &ctx->access_unit;
    int err, i, j, has_sps;
    H264RawAUD aud;
    uint8_t *displaymatrix_side_data = NULL;
    size_t displaymatrix_side_data_size = 0;

    err = ff_bsf_get_packet(bsf, &in);
    if (err < 0)
        return err;

    err = ff_cbs_read_packet(ctx->cbc, au, in);
    if (err < 0) {
        av_log(bsf, AV_LOG_ERROR, "Failed to read packet.\n");
        goto fail;
    }

    if (au->nb_units == 0) {
        av_log(bsf, AV_LOG_ERROR, "No NAL units in packet.\n");
        err = AVERROR_INVALIDDATA;
        goto fail;
    }

    // If an AUD is present, it must be the first NAL unit.
    if (au->units[0].type == H264_NAL_AUD) {
        if (ctx->aud == REMOVE)
            ff_cbs_delete_unit(ctx->cbc, au, 0);
    } else {
        if (ctx->aud == INSERT) {
            static const int primary_pic_type_table[] = {
                0x084, // 2, 7
                0x0a5, // 0, 2, 5, 7
                0x0e7, // 0, 1, 2, 5, 6, 7
                0x210, // 4, 9
                0x318, // 3, 4, 8, 9
                0x294, // 2, 4, 7, 9
                0x3bd, // 0, 2, 3, 4, 5, 7, 8, 9
                0x3ff, // 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
            };
            int primary_pic_type_mask = 0xff;

            for (i = 0; i < au->nb_units; i++) {
                if (au->units[i].type == H264_NAL_SLICE ||
                    au->units[i].type == H264_NAL_IDR_SLICE) {
                    H264RawSlice *slice = au->units[i].content;
                    for (j = 0; j < FF_ARRAY_ELEMS(primary_pic_type_table); j++) {
                         if (!(primary_pic_type_table[j] &
                               (1 << slice->header.slice_type)))
                             primary_pic_type_mask &= ~(1 << j);
                    }
                }
            }
            for (j = 0; j < FF_ARRAY_ELEMS(primary_pic_type_table); j++)
                if (primary_pic_type_mask & (1 << j))
                    break;
            if (j >= FF_ARRAY_ELEMS(primary_pic_type_table)) {
                av_log(bsf, AV_LOG_ERROR, "No usable primary_pic_type: "
                       "invalid slice types?\n");
                err = AVERROR_INVALIDDATA;
                goto fail;
            }

            aud = (H264RawAUD) {
                .nal_unit_header.nal_unit_type = H264_NAL_AUD,
                .primary_pic_type = j,
            };

            err = ff_cbs_insert_unit_content(ctx->cbc, au,
                                             0, H264_NAL_AUD, &aud, NULL);
            if (err < 0) {
                av_log(bsf, AV_LOG_ERROR, "Failed to insert AUD.\n");
                goto fail;
            }
        }
    }

    has_sps = 0;
    for (i = 0; i < au->nb_units; i++) {
        if (au->units[i].type == H264_NAL_SPS) {
            err = h264_metadata_update_sps(bsf, au->units[i].content);
            if (err < 0)
                goto fail;
            has_sps = 1;
        }
    }

    // Only insert the SEI in access units containing SPSs, and also
    // unconditionally in the first access unit we ever see.
    if (ctx->sei_user_data && (has_sps || !ctx->done_first_au)) {
        H264RawSEIPayload payload = {
            .payload_type = H264_SEI_TYPE_USER_DATA_UNREGISTERED,
        };
        H264RawSEIUserDataUnregistered *udu =
            &payload.payload.user_data_unregistered;

        for (i = j = 0; j < 32 && ctx->sei_user_data[i]; i++) {
            int c, v;
            c = ctx->sei_user_data[i];
            if (c == '-') {
                continue;
            } else if (av_isxdigit(c)) {
                c = av_tolower(c);
                v = (c <= '9' ? c - '0' : c - 'a' + 10);
            } else {
                goto invalid_user_data;
            }
            if (i & 1)
                udu->uuid_iso_iec_11578[j / 2] |= v;
            else
                udu->uuid_iso_iec_11578[j / 2] = v << 4;
            ++j;
        }
        if (j == 32 && ctx->sei_user_data[i] == '+') {
            size_t len = strlen(ctx->sei_user_data + i + 1);

            udu->data_ref = av_buffer_alloc(len + 1);
            if (!udu->data_ref) {
                err = AVERROR(ENOMEM);
                goto fail;
            }

            udu->data        = udu->data_ref->data;
            udu->data_length = len + 1;
            memcpy(udu->data, ctx->sei_user_data + i + 1, len + 1);

            err = ff_cbs_h264_add_sei_message(ctx->cbc, au, &payload);
            if (err < 0) {
                av_log(bsf, AV_LOG_ERROR, "Failed to add user data SEI "
                       "message to access unit.\n");
                goto fail;
            }

        } else {
        invalid_user_data:
            av_log(bsf, AV_LOG_ERROR, "Invalid user data: "
                   "must be \"UUID+string\".\n");
            err = AVERROR(EINVAL);
            goto fail;
        }
    }

    if (ctx->delete_filler) {
        for (i = 0; i < au->nb_units; i++) {
            if (au->units[i].type == H264_NAL_FILLER_DATA) {
                // Filler NAL units.
                err = ff_cbs_delete_unit(ctx->cbc, au, i);
                if (err < 0) {
                    av_log(bsf, AV_LOG_ERROR, "Failed to delete "
                           "filler NAL.\n");
                    goto fail;
                }
                --i;
                continue;
            }

            if (au->units[i].type == H264_NAL_SEI) {
                // Filler SEI messages.
                H264RawSEI *sei = au->units[i].content;

                for (j = 0; j < sei->payload_count; j++) {
                    if (sei->payload[j].payload_type ==
                        H264_SEI_TYPE_FILLER_PAYLOAD) {
                        err = ff_cbs_h264_delete_sei_message(ctx->cbc, au,
                                                             &au->units[i], j);
                        if (err < 0) {
                            av_log(bsf, AV_LOG_ERROR, "Failed to delete "
                                   "filler SEI message.\n");
                            goto fail;
                        }
                        // Renumbering might have happened, start again at
                        // the same NAL unit position.
                        --i;
                        break;
                    }
                }
            }
        }
    }

    if (ctx->display_orientation != PASS) {
        for (i = 0; i < au->nb_units; i++) {
            H264RawSEI *sei;
            if (au->units[i].type != H264_NAL_SEI)
                continue;
            sei = au->units[i].content;

            for (j = 0; j < sei->payload_count; j++) {
                H264RawSEIDisplayOrientation *disp;
                int32_t *matrix;

                if (sei->payload[j].payload_type !=
                    H264_SEI_TYPE_DISPLAY_ORIENTATION)
                    continue;
                disp = &sei->payload[j].payload.display_orientation;

                if (ctx->display_orientation == REMOVE ||
                    ctx->display_orientation == INSERT) {
                    err = ff_cbs_h264_delete_sei_message(ctx->cbc, au,
                                                         &au->units[i], j);
                    if (err < 0) {
                        av_log(bsf, AV_LOG_ERROR, "Failed to delete "
                               "display orientation SEI message.\n");
                        goto fail;
                    }
                    --i;
                    break;
                }

                matrix = av_mallocz(9 * sizeof(int32_t));
                if (!matrix) {
                    err = AVERROR(ENOMEM);
                    goto fail;
                }

                av_display_rotation_set(matrix,
                                        disp->anticlockwise_rotation *
                                        180.0 / 65536.0);
                av_display_matrix_flip(matrix, disp->hor_flip, disp->ver_flip);

                // If there are multiple display orientation messages in an
                // access unit then ignore all but the last one.
                av_freep(&displaymatrix_side_data);

                displaymatrix_side_data      = (uint8_t*)matrix;
                displaymatrix_side_data_size = 9 * sizeof(int32_t);
            }
        }
    }
    if (ctx->display_orientation == INSERT) {
        H264RawSEIPayload payload = {
            .payload_type = H264_SEI_TYPE_DISPLAY_ORIENTATION,
        };
        H264RawSEIDisplayOrientation *disp =
            &payload.payload.display_orientation;
        uint8_t *data;
        int size;
        int write = 0;

        data = av_packet_get_side_data(in, AV_PKT_DATA_DISPLAYMATRIX, &size);
        if (data && size >= 9 * sizeof(int32_t)) {
            int32_t matrix[9];
            int hflip, vflip;
            double angle;

            memcpy(matrix, data, sizeof(matrix));

            hflip = vflip = 0;
            if (matrix[0] < 0 && matrix[4] > 0)
                hflip = 1;
            else if (matrix[0] > 0 && matrix[4] < 0)
                vflip = 1;
            av_display_matrix_flip(matrix, hflip, vflip);

            angle = av_display_rotation_get(matrix);

            if (!(angle >= -180.0 && angle <= 180.0 /* also excludes NaN */) ||
                matrix[2] != 0 || matrix[5] != 0 ||
                matrix[6] != 0 || matrix[7] != 0) {
                av_log(bsf, AV_LOG_WARNING, "Input display matrix is not "
                       "representable in H.264 parameters.\n");
            } else {
                disp->hor_flip = hflip;
                disp->ver_flip = vflip;
                disp->anticlockwise_rotation =
                    (uint16_t)rint((angle >= 0.0 ? angle
                                                 : angle + 360.0) *
                                   65536.0 / 360.0);
                write = 1;
            }
        }

        if (has_sps || !ctx->done_first_au) {
            if (!isnan(ctx->rotate)) {
                disp->anticlockwise_rotation =
                    (uint16_t)rint((ctx->rotate >= 0.0 ? ctx->rotate
                                                       : ctx->rotate + 360.0) *
                                   65536.0 / 360.0);
                write = 1;
            }
            if (ctx->flip) {
                disp->hor_flip = !!(ctx->flip & FLIP_HORIZONTAL);
                disp->ver_flip = !!(ctx->flip & FLIP_VERTICAL);
                write = 1;
            }
        }

        if (write) {
            disp->display_orientation_repetition_period = 1;

            err = ff_cbs_h264_add_sei_message(ctx->cbc, au, &payload);
            if (err < 0) {
                av_log(bsf, AV_LOG_ERROR, "Failed to add display orientation "
                       "SEI message to access unit.\n");
                goto fail;
            }
        }
    }