Exemplo n.º 1
0
static int extract_extradata_h2645(AVBSFContext *ctx, AVPacket *pkt,
                                   uint8_t **data, int *size)
{
    static const int extradata_nal_types_hevc[] = {
        HEVC_NAL_VPS, HEVC_NAL_SPS, HEVC_NAL_PPS,
    };
    static const int extradata_nal_types_h264[] = {
        H264_NAL_SPS, H264_NAL_PPS,
    };

    ExtractExtradataContext *s = ctx->priv_data;

    int extradata_size = 0, filtered_size = 0;
    const int *extradata_nal_types;
    int nb_extradata_nal_types;
    int i, has_sps = 0, has_vps = 0, ret = 0;

    if (ctx->par_in->codec_id == AV_CODEC_ID_HEVC) {
        extradata_nal_types    = extradata_nal_types_hevc;
        nb_extradata_nal_types = FF_ARRAY_ELEMS(extradata_nal_types_hevc);
    } else {
        extradata_nal_types    = extradata_nal_types_h264;
        nb_extradata_nal_types = FF_ARRAY_ELEMS(extradata_nal_types_h264);
    }

    ret = ff_h2645_packet_split(&s->h2645_pkt, pkt->data, pkt->size,
                                ctx, 0, 0, ctx->par_in->codec_id);
    if (ret < 0)
        return ret;

    for (i = 0; i < s->h2645_pkt.nb_nals; i++) {
        H2645NAL *nal = &s->h2645_pkt.nals[i];
        if (val_in_array(extradata_nal_types, nb_extradata_nal_types, nal->type)) {
            extradata_size += nal->raw_size + 3;
            if (ctx->par_in->codec_id == AV_CODEC_ID_HEVC) {
                if (nal->type == HEVC_NAL_SPS) has_sps = 1;
                if (nal->type == HEVC_NAL_VPS) has_vps = 1;
            } else {
                if (nal->type == H264_NAL_SPS) has_sps = 1;
            }
        } else if (s->remove) {
            filtered_size += nal->raw_size + 3;
        }
    }

    if (extradata_size &&
        ((ctx->par_in->codec_id == AV_CODEC_ID_HEVC && has_sps && has_vps) ||
         (ctx->par_in->codec_id == AV_CODEC_ID_H264 && has_sps))) {
        AVBufferRef *filtered_buf;
        uint8_t *extradata, *filtered_data;

        if (s->remove) {
            filtered_buf = av_buffer_alloc(filtered_size + AV_INPUT_BUFFER_PADDING_SIZE);
            if (!filtered_buf) {
                return AVERROR(ENOMEM);
            }
            memset(filtered_buf->data + filtered_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);

            filtered_data = filtered_buf->data;
        }

        extradata = av_malloc(extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
        if (!extradata) {
            av_buffer_unref(&filtered_buf);
            return AVERROR(ENOMEM);
        }
        memset(extradata + extradata_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);

        *data = extradata;
        *size = extradata_size;

        for (i = 0; i < s->h2645_pkt.nb_nals; i++) {
            H2645NAL *nal = &s->h2645_pkt.nals[i];
            if (val_in_array(extradata_nal_types, nb_extradata_nal_types,
                             nal->type)) {
                AV_WB24(extradata, 1); // startcode
                memcpy(extradata + 3, nal->raw_data, nal->raw_size);
                extradata += 3 + nal->raw_size;
            } else if (s->remove) {
                AV_WB24(filtered_data, 1); // startcode
                memcpy(filtered_data + 3, nal->raw_data, nal->raw_size);
                filtered_data += 3 + nal->raw_size;
            }
        }

        if (s->remove) {
            av_buffer_unref(&pkt->buf);
            pkt->buf  = filtered_buf;
            pkt->data = filtered_buf->data;
            pkt->size = filtered_size;
        }
    }

    return 0;
}
Exemplo n.º 2
0
// Split packets and add them to the waiting_buffers list. We don't queue them
// immediately, because it can happen that the decoder is temporarily blocked
// (due to us not reading/returning enough output buffers) and won't accept
// new input. (This wouldn't be an issue if MMAL input buffers always were
// complete frames - then the input buffer just would have to be big enough.)
// If is_extradata is set, send it as MMAL_BUFFER_HEADER_FLAG_CONFIG.
static int ffmmal_add_packet(AVCodecContext *avctx, AVPacket *avpkt,
                             int is_extradata)
{
    MMALDecodeContext *ctx = avctx->priv_data;
    AVBufferRef *buf = NULL;
    int size = 0;
    uint8_t *data = (uint8_t *)"";
    uint8_t *start;
    int ret = 0;

    if (avpkt->size) {
        if (avpkt->buf) {
            buf = av_buffer_ref(avpkt->buf);
            size = avpkt->size;
            data = avpkt->data;
        } else {
            buf = av_buffer_alloc(avpkt->size);
            if (buf) {
                memcpy(buf->data, avpkt->data, avpkt->size);
                size = buf->size;
                data = buf->data;
            }
        }
        if (!buf) {
            ret = AVERROR(ENOMEM);
            goto done;
        }
        if (!is_extradata)
            ctx->packets_sent++;
    } else {
        if (ctx->eos_sent)
            goto done;
        if (!ctx->packets_sent) {
            // Short-cut the flush logic to avoid upsetting MMAL.
            ctx->eos_sent = 1;
            ctx->eos_received = 1;
            goto done;
        }
    }

    start = data;

    do {
        FFBufferEntry *buffer = av_mallocz(sizeof(*buffer));
        if (!buffer) {
            ret = AVERROR(ENOMEM);
            goto done;
        }

        buffer->data = data;
        buffer->length = FFMIN(size, ctx->decoder->input[0]->buffer_size);

        if (is_extradata)
            buffer->flags |= MMAL_BUFFER_HEADER_FLAG_CONFIG;

        if (data == start)
            buffer->flags |= MMAL_BUFFER_HEADER_FLAG_FRAME_START;

        data += buffer->length;
        size -= buffer->length;

        buffer->pts = avpkt->pts == AV_NOPTS_VALUE ? MMAL_TIME_UNKNOWN : avpkt->pts;
        buffer->dts = avpkt->dts == AV_NOPTS_VALUE ? MMAL_TIME_UNKNOWN : avpkt->dts;

        if (!size) {
            buffer->flags |= MMAL_BUFFER_HEADER_FLAG_FRAME_END;
            avpriv_atomic_int_add_and_fetch(&ctx->packets_buffered, 1);
        }

        if (!buffer->length) {
            buffer->flags |= MMAL_BUFFER_HEADER_FLAG_EOS;
            ctx->eos_sent = 1;
        }

        if (buf) {
            buffer->ref = av_buffer_ref(buf);
            if (!buffer->ref) {
                av_free(buffer);
                ret = AVERROR(ENOMEM);
                goto done;
            }
        }

        // Insert at end of the list
        if (!ctx->waiting_buffers)
            ctx->waiting_buffers = buffer;
        if (ctx->waiting_buffers_tail)
            ctx->waiting_buffers_tail->next = buffer;
        ctx->waiting_buffers_tail = buffer;
    } while (size);

done:
    av_buffer_unref(&buf);
    return ret;
}
static int extract_extradata_av1(AVBSFContext *ctx, AVPacket *pkt,
                                 uint8_t **data, int *size)
{
    static const int extradata_obu_types[] = {
        AV1_OBU_SEQUENCE_HEADER, AV1_OBU_METADATA,
    };
    ExtractExtradataContext *s = ctx->priv_data;

    int extradata_size = 0, filtered_size = 0;
    int nb_extradata_obu_types = FF_ARRAY_ELEMS(extradata_obu_types);
    int i, has_seq = 0, ret = 0;

    ret = ff_av1_packet_split(&s->av1_pkt, pkt->data, pkt->size, ctx);
    if (ret < 0)
        return ret;

    for (i = 0; i < s->av1_pkt.nb_obus; i++) {
        AV1OBU *obu = &s->av1_pkt.obus[i];
        if (val_in_array(extradata_obu_types, nb_extradata_obu_types, obu->type)) {
            extradata_size += obu->raw_size;
            if (obu->type == AV1_OBU_SEQUENCE_HEADER)
                has_seq = 1;
        } else if (s->remove) {
            filtered_size += obu->raw_size;
        }
    }

    if (extradata_size && has_seq) {
        AVBufferRef *filtered_buf;
        uint8_t *extradata, *filtered_data;

        if (s->remove) {
            filtered_buf = av_buffer_alloc(filtered_size + AV_INPUT_BUFFER_PADDING_SIZE);
            if (!filtered_buf) {
                return AVERROR(ENOMEM);
            }
            memset(filtered_buf->data + filtered_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);

            filtered_data = filtered_buf->data;
        }

        extradata = av_malloc(extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
        if (!extradata) {
            av_buffer_unref(&filtered_buf);
            return AVERROR(ENOMEM);
        }
        memset(extradata + extradata_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);

        *data = extradata;
        *size = extradata_size;

        for (i = 0; i < s->av1_pkt.nb_obus; i++) {
            AV1OBU *obu = &s->av1_pkt.obus[i];
            if (val_in_array(extradata_obu_types, nb_extradata_obu_types,
                             obu->type)) {
                memcpy(extradata, obu->raw_data, obu->raw_size);
                extradata += obu->raw_size;
            } else if (s->remove) {
                memcpy(filtered_data, obu->raw_data, obu->raw_size);
                filtered_data += obu->raw_size;
            }
        }

        if (s->remove) {
            av_buffer_unref(&pkt->buf);
            pkt->buf  = filtered_buf;
            pkt->data = filtered_buf->data;
            pkt->size = filtered_size;
        }
    }

    return 0;
}
Exemplo n.º 4
0
static int cbs_jpeg_assemble_fragment(CodedBitstreamContext *ctx,
                                       CodedBitstreamFragment *frag)
{
    const CodedBitstreamUnit *unit;
    uint8_t *data;
    size_t size, dp, sp;
    int i;

    size = 4; // SOI + EOI.
    for (i = 0; i < frag->nb_units; i++) {
        unit = &frag->units[i];
        size += 2 + unit->data_size;
        if (unit->type == JPEG_MARKER_SOS) {
            for (sp = 0; sp < unit->data_size; sp++) {
                if (unit->data[sp] == 0xff)
                    ++size;
            }
        }
    }

    frag->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE);
    if (!frag->data_ref)
        return AVERROR(ENOMEM);
    data = frag->data_ref->data;

    dp = 0;

    data[dp++] = 0xff;
    data[dp++] = JPEG_MARKER_SOI;

    for (i = 0; i < frag->nb_units; i++) {
        unit = &frag->units[i];

        data[dp++] = 0xff;
        data[dp++] = unit->type;

        if (unit->type != JPEG_MARKER_SOS) {
            memcpy(data + dp, unit->data, unit->data_size);
            dp += unit->data_size;
        } else {
            sp = AV_RB16(unit->data);
            av_assert0(sp <= unit->data_size);
            memcpy(data + dp, unit->data, sp);
            dp += sp;

            for (; sp < unit->data_size; sp++) {
                if (unit->data[sp] == 0xff) {
                    data[dp++] = 0xff;
                    data[dp++] = 0x00;
                } else {
                    data[dp++] = unit->data[sp];
                }
            }
        }
    }

    data[dp++] = 0xff;
    data[dp++] = JPEG_MARKER_EOI;

    av_assert0(dp == size);

    memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
    frag->data      = data;
    frag->data_size = size;

    return 0;
}
Exemplo n.º 5
0
static int h264_metadata_filter(AVBSFContext *bsf, AVPacket *out)
{
    H264MetadataContext *ctx = bsf->priv_data;
    AVPacket *in = NULL;
    CodedBitstreamFragment *au = &ctx->access_unit;
    int err, i, j, has_sps;
    H264RawAUD aud;
    uint8_t *displaymatrix_side_data = NULL;
    size_t displaymatrix_side_data_size = 0;

    err = ff_bsf_get_packet(bsf, &in);
    if (err < 0)
        return err;

    err = ff_cbs_read_packet(ctx->cbc, au, in);
    if (err < 0) {
        av_log(bsf, AV_LOG_ERROR, "Failed to read packet.\n");
        goto fail;
    }

    if (au->nb_units == 0) {
        av_log(bsf, AV_LOG_ERROR, "No NAL units in packet.\n");
        err = AVERROR_INVALIDDATA;
        goto fail;
    }

    // If an AUD is present, it must be the first NAL unit.
    if (au->units[0].type == H264_NAL_AUD) {
        if (ctx->aud == REMOVE)
            ff_cbs_delete_unit(ctx->cbc, au, 0);
    } else {
        if (ctx->aud == INSERT) {
            static const int primary_pic_type_table[] = {
                0x084, // 2, 7
                0x0a5, // 0, 2, 5, 7
                0x0e7, // 0, 1, 2, 5, 6, 7
                0x210, // 4, 9
                0x318, // 3, 4, 8, 9
                0x294, // 2, 4, 7, 9
                0x3bd, // 0, 2, 3, 4, 5, 7, 8, 9
                0x3ff, // 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
            };
            int primary_pic_type_mask = 0xff;

            for (i = 0; i < au->nb_units; i++) {
                if (au->units[i].type == H264_NAL_SLICE ||
                    au->units[i].type == H264_NAL_IDR_SLICE) {
                    H264RawSlice *slice = au->units[i].content;
                    for (j = 0; j < FF_ARRAY_ELEMS(primary_pic_type_table); j++) {
                         if (!(primary_pic_type_table[j] &
                               (1 << slice->header.slice_type)))
                             primary_pic_type_mask &= ~(1 << j);
                    }
                }
            }
            for (j = 0; j < FF_ARRAY_ELEMS(primary_pic_type_table); j++)
                if (primary_pic_type_mask & (1 << j))
                    break;
            if (j >= FF_ARRAY_ELEMS(primary_pic_type_table)) {
                av_log(bsf, AV_LOG_ERROR, "No usable primary_pic_type: "
                       "invalid slice types?\n");
                err = AVERROR_INVALIDDATA;
                goto fail;
            }

            aud = (H264RawAUD) {
                .nal_unit_header.nal_unit_type = H264_NAL_AUD,
                .primary_pic_type = j,
            };

            err = ff_cbs_insert_unit_content(ctx->cbc, au,
                                             0, H264_NAL_AUD, &aud, NULL);
            if (err < 0) {
                av_log(bsf, AV_LOG_ERROR, "Failed to insert AUD.\n");
                goto fail;
            }
        }
    }

    has_sps = 0;
    for (i = 0; i < au->nb_units; i++) {
        if (au->units[i].type == H264_NAL_SPS) {
            err = h264_metadata_update_sps(bsf, au->units[i].content);
            if (err < 0)
                goto fail;
            has_sps = 1;
        }
    }

    // Only insert the SEI in access units containing SPSs, and also
    // unconditionally in the first access unit we ever see.
    if (ctx->sei_user_data && (has_sps || !ctx->done_first_au)) {
        H264RawSEIPayload payload = {
            .payload_type = H264_SEI_TYPE_USER_DATA_UNREGISTERED,
        };
        H264RawSEIUserDataUnregistered *udu =
            &payload.payload.user_data_unregistered;

        for (i = j = 0; j < 32 && ctx->sei_user_data[i]; i++) {
            int c, v;
            c = ctx->sei_user_data[i];
            if (c == '-') {
                continue;
            } else if (av_isxdigit(c)) {
                c = av_tolower(c);
                v = (c <= '9' ? c - '0' : c - 'a' + 10);
            } else {
                goto invalid_user_data;
            }
            if (i & 1)
                udu->uuid_iso_iec_11578[j / 2] |= v;
            else
                udu->uuid_iso_iec_11578[j / 2] = v << 4;
            ++j;
        }
        if (j == 32 && ctx->sei_user_data[i] == '+') {
            size_t len = strlen(ctx->sei_user_data + i + 1);

            udu->data_ref = av_buffer_alloc(len + 1);
            if (!udu->data_ref) {
                err = AVERROR(ENOMEM);
                goto fail;
            }

            udu->data        = udu->data_ref->data;
            udu->data_length = len + 1;
            memcpy(udu->data, ctx->sei_user_data + i + 1, len + 1);

            err = ff_cbs_h264_add_sei_message(ctx->cbc, au, &payload);
            if (err < 0) {
                av_log(bsf, AV_LOG_ERROR, "Failed to add user data SEI "
                       "message to access unit.\n");
                goto fail;
            }

        } else {
        invalid_user_data:
            av_log(bsf, AV_LOG_ERROR, "Invalid user data: "
                   "must be \"UUID+string\".\n");
            err = AVERROR(EINVAL);
            goto fail;
        }
    }

    if (ctx->delete_filler) {
        for (i = 0; i < au->nb_units; i++) {
            if (au->units[i].type == H264_NAL_FILLER_DATA) {
                // Filler NAL units.
                err = ff_cbs_delete_unit(ctx->cbc, au, i);
                if (err < 0) {
                    av_log(bsf, AV_LOG_ERROR, "Failed to delete "
                           "filler NAL.\n");
                    goto fail;
                }
                --i;
                continue;
            }

            if (au->units[i].type == H264_NAL_SEI) {
                // Filler SEI messages.
                H264RawSEI *sei = au->units[i].content;

                for (j = 0; j < sei->payload_count; j++) {
                    if (sei->payload[j].payload_type ==
                        H264_SEI_TYPE_FILLER_PAYLOAD) {
                        err = ff_cbs_h264_delete_sei_message(ctx->cbc, au,
                                                             &au->units[i], j);
                        if (err < 0) {
                            av_log(bsf, AV_LOG_ERROR, "Failed to delete "
                                   "filler SEI message.\n");
                            goto fail;
                        }
                        // Renumbering might have happened, start again at
                        // the same NAL unit position.
                        --i;
                        break;
                    }
                }
            }
        }
    }

    if (ctx->display_orientation != PASS) {
        for (i = 0; i < au->nb_units; i++) {
            H264RawSEI *sei;
            if (au->units[i].type != H264_NAL_SEI)
                continue;
            sei = au->units[i].content;

            for (j = 0; j < sei->payload_count; j++) {
                H264RawSEIDisplayOrientation *disp;
                int32_t *matrix;

                if (sei->payload[j].payload_type !=
                    H264_SEI_TYPE_DISPLAY_ORIENTATION)
                    continue;
                disp = &sei->payload[j].payload.display_orientation;

                if (ctx->display_orientation == REMOVE ||
                    ctx->display_orientation == INSERT) {
                    err = ff_cbs_h264_delete_sei_message(ctx->cbc, au,
                                                         &au->units[i], j);
                    if (err < 0) {
                        av_log(bsf, AV_LOG_ERROR, "Failed to delete "
                               "display orientation SEI message.\n");
                        goto fail;
                    }
                    --i;
                    break;
                }

                matrix = av_mallocz(9 * sizeof(int32_t));
                if (!matrix) {
                    err = AVERROR(ENOMEM);
                    goto fail;
                }

                av_display_rotation_set(matrix,
                                        disp->anticlockwise_rotation *
                                        180.0 / 65536.0);
                av_display_matrix_flip(matrix, disp->hor_flip, disp->ver_flip);

                // If there are multiple display orientation messages in an
                // access unit then ignore all but the last one.
                av_freep(&displaymatrix_side_data);

                displaymatrix_side_data      = (uint8_t*)matrix;
                displaymatrix_side_data_size = 9 * sizeof(int32_t);
            }
        }
    }
    if (ctx->display_orientation == INSERT) {
        H264RawSEIPayload payload = {
            .payload_type = H264_SEI_TYPE_DISPLAY_ORIENTATION,
        };
        H264RawSEIDisplayOrientation *disp =
            &payload.payload.display_orientation;
        uint8_t *data;
        int size;
        int write = 0;

        data = av_packet_get_side_data(in, AV_PKT_DATA_DISPLAYMATRIX, &size);
        if (data && size >= 9 * sizeof(int32_t)) {
            int32_t matrix[9];
            int hflip, vflip;
            double angle;

            memcpy(matrix, data, sizeof(matrix));

            hflip = vflip = 0;
            if (matrix[0] < 0 && matrix[4] > 0)
                hflip = 1;
            else if (matrix[0] > 0 && matrix[4] < 0)
                vflip = 1;
            av_display_matrix_flip(matrix, hflip, vflip);

            angle = av_display_rotation_get(matrix);

            if (!(angle >= -180.0 && angle <= 180.0 /* also excludes NaN */) ||
                matrix[2] != 0 || matrix[5] != 0 ||
                matrix[6] != 0 || matrix[7] != 0) {
                av_log(bsf, AV_LOG_WARNING, "Input display matrix is not "
                       "representable in H.264 parameters.\n");
            } else {
                disp->hor_flip = hflip;
                disp->ver_flip = vflip;
                disp->anticlockwise_rotation =
                    (uint16_t)rint((angle >= 0.0 ? angle
                                                 : angle + 360.0) *
                                   65536.0 / 360.0);
                write = 1;
            }
        }

        if (has_sps || !ctx->done_first_au) {
            if (!isnan(ctx->rotate)) {
                disp->anticlockwise_rotation =
                    (uint16_t)rint((ctx->rotate >= 0.0 ? ctx->rotate
                                                       : ctx->rotate + 360.0) *
                                   65536.0 / 360.0);
                write = 1;
            }
            if (ctx->flip) {
                disp->hor_flip = !!(ctx->flip & FLIP_HORIZONTAL);
                disp->ver_flip = !!(ctx->flip & FLIP_VERTICAL);
                write = 1;
            }
        }

        if (write) {
            disp->display_orientation_repetition_period = 1;

            err = ff_cbs_h264_add_sei_message(ctx->cbc, au, &payload);
            if (err < 0) {
                av_log(bsf, AV_LOG_ERROR, "Failed to add display orientation "
                       "SEI message to access unit.\n");
                goto fail;
            }
        }
    }
Exemplo n.º 6
0
int ff_flac_parse_picture(AVFormatContext *s, uint8_t *buf, int buf_size)
{
    const CodecMime *mime = ff_id3v2_mime_tags;
    enum  AVCodecID      id = AV_CODEC_ID_NONE;
    AVBufferRef *data = NULL;
    uint8_t mimetype[64], *desc = NULL;
    AVIOContext *pb = NULL;
    AVStream *st;
    int type, width, height;
    int len, ret = 0;

    pb = avio_alloc_context(buf, buf_size, 0, NULL, NULL, NULL, NULL);
    if (!pb)
        return AVERROR(ENOMEM);

    /* read the picture type */
    type      = avio_rb32(pb);
    if (type >= FF_ARRAY_ELEMS(ff_id3v2_picture_types) || type < 0) {
        av_log(s, AV_LOG_ERROR, "Invalid picture type: %d.\n", type);
        if (s->error_recognition & AV_EF_EXPLODE) {
            RETURN_ERROR(AVERROR_INVALIDDATA);
        }
        type = 0;
    }

    /* picture mimetype */
    len  = avio_rb32(pb);
    if (len <= 0 ||
        avio_read(pb, mimetype, FFMIN(len, sizeof(mimetype) - 1)) != len) {
        av_log(s, AV_LOG_ERROR, "Could not read mimetype from an attached "
               "picture.\n");
        if (s->error_recognition & AV_EF_EXPLODE)
            ret = AVERROR_INVALIDDATA;
        goto fail;
    }
    av_assert0(len < sizeof(mimetype));
    mimetype[len] = 0;

    while (mime->id != AV_CODEC_ID_NONE) {
        if (!strncmp(mime->str, mimetype, sizeof(mimetype))) {
            id = mime->id;
            break;
        }
        mime++;
    }
    if (id == AV_CODEC_ID_NONE) {
        av_log(s, AV_LOG_ERROR, "Unknown attached picture mimetype: %s.\n",
               mimetype);
        if (s->error_recognition & AV_EF_EXPLODE)
            ret = AVERROR_INVALIDDATA;
        goto fail;
    }

    /* picture description */
    len = avio_rb32(pb);
    if (len > 0) {
        if (!(desc = av_malloc(len + 1))) {
            RETURN_ERROR(AVERROR(ENOMEM));
        }

        if (avio_read(pb, desc, len) != len) {
            av_log(s, AV_LOG_ERROR, "Error reading attached picture description.\n");
            if (s->error_recognition & AV_EF_EXPLODE)
                ret = AVERROR(EIO);
            goto fail;
        }
        desc[len] = 0;
    }

    /* picture metadata */
    width  = avio_rb32(pb);
    height = avio_rb32(pb);
    avio_skip(pb, 8);

    /* picture data */
    len = avio_rb32(pb);
    if (len <= 0) {
        av_log(s, AV_LOG_ERROR, "Invalid attached picture size: %d.\n", len);
        if (s->error_recognition & AV_EF_EXPLODE)
            ret = AVERROR_INVALIDDATA;
        goto fail;
    }
    if (!(data = av_buffer_alloc(len))) {
        RETURN_ERROR(AVERROR(ENOMEM));
    }
    if (avio_read(pb, data->data, len) != len) {
        av_log(s, AV_LOG_ERROR, "Error reading attached picture data.\n");
        if (s->error_recognition & AV_EF_EXPLODE)
            ret = AVERROR(EIO);
        goto fail;
    }

    st = avformat_new_stream(s, NULL);
    if (!st) {
        RETURN_ERROR(AVERROR(ENOMEM));
    }

    av_init_packet(&st->attached_pic);
    st->attached_pic.buf          = data;
    st->attached_pic.data         = data->data;
    st->attached_pic.size         = len;
    st->attached_pic.stream_index = st->index;
    st->attached_pic.flags       |= AV_PKT_FLAG_KEY;

    st->disposition      |= AV_DISPOSITION_ATTACHED_PIC;
    st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
    st->codec->codec_id   = id;
    st->codec->width      = width;
    st->codec->height     = height;
    av_dict_set(&st->metadata, "comment", ff_id3v2_picture_types[type], 0);
    if (desc)
        av_dict_set(&st->metadata, "title",   desc, AV_DICT_DONT_STRDUP_VAL);

    av_freep(&pb);

    return 0;

fail:
    av_buffer_unref(&data);
    av_freep(&desc);
    av_freep(&pb);
    return ret;
}
Exemplo n.º 7
0
// Split packets and add them to the waiting_buffers list. We don't queue them
// immediately, because it can happen that the decoder is temporarily blocked
// (due to us not reading/returning enough output buffers) and won't accept
// new input. (This wouldn't be an issue if MMAL input buffers always were
// complete frames - then the input buffer just would have to be big enough.)
static int ffmmal_add_packet(AVCodecContext *avctx, AVPacket *avpkt)
{
    MMALDecodeContext *ctx = avctx->priv_data;
    AVBufferRef *buf = NULL;
    int size = 0;
    uint8_t *data = (uint8_t *)"";
    uint8_t *start;
    int ret = 0;

    if (avpkt->size) {
        if (ctx->bsfc) {
            uint8_t *tmp_data;
            int tmp_size;
            if ((ret = av_bitstream_filter_filter(ctx->bsfc, avctx, NULL,
                                                  &tmp_data, &tmp_size,
                                                  avpkt->data, avpkt->size,
                                                  avpkt->flags & AV_PKT_FLAG_KEY)) < 0)
                goto done;
            buf = av_buffer_create(tmp_data, tmp_size, NULL, NULL, 0);
        } else {
            if (avpkt->buf) {
                buf = av_buffer_ref(avpkt->buf);
            } else {
                buf = av_buffer_alloc(avpkt->size);
                if (buf)
                    memcpy(buf->data, avpkt->data, avpkt->size);
            }
        }
        if (!buf) {
            ret = AVERROR(ENOMEM);
            goto done;
        }
        size = buf->size;
        data = buf->data;
        ctx->packets_sent++;
    } else {
        if (!ctx->packets_sent) {
            // Short-cut the flush logic to avoid upsetting MMAL.
            ctx->eos_sent = 1;
            ctx->eos_received = 1;
            goto done;
        }
    }

    start = data;

    do {
        FFBufferEntry *buffer = av_mallocz(sizeof(*buffer));
        if (!buffer) {
            ret = AVERROR(ENOMEM);
            goto done;
        }

        buffer->data = data;
        buffer->length = FFMIN(size, ctx->decoder->input[0]->buffer_size);

        if (data == start)
            buffer->flags |= MMAL_BUFFER_HEADER_FLAG_FRAME_START;

        data += buffer->length;
        size -= buffer->length;

        buffer->pts = avpkt->pts == AV_NOPTS_VALUE ? MMAL_TIME_UNKNOWN : avpkt->pts;
        buffer->dts = avpkt->dts == AV_NOPTS_VALUE ? MMAL_TIME_UNKNOWN : avpkt->dts;

        if (!size)
            buffer->flags |= MMAL_BUFFER_HEADER_FLAG_FRAME_END;

        if (!buffer->length) {
            buffer->flags |= MMAL_BUFFER_HEADER_FLAG_EOS;
            ctx->eos_sent = 1;
        }

        if (buf) {
            buffer->ref = av_buffer_ref(buf);
            if (!buffer->ref) {
                av_free(buffer);
                ret = AVERROR(ENOMEM);
                goto done;
            }
        }

        // Insert at end of the list
        if (!ctx->waiting_buffers)
            ctx->waiting_buffers = buffer;
        if (ctx->waiting_buffers_tail)
            ctx->waiting_buffers_tail->next = buffer;
        ctx->waiting_buffers_tail = buffer;
    } while (size);

done:
    av_buffer_unref(&buf);
    return ret;
}