Exemplo n.º 1
0
int ff_scale_image(uint8_t *dst_data[4], int dst_linesize[4],
                   int dst_w, int dst_h, enum AVPixelFormat dst_pix_fmt,
                   uint8_t * const src_data[4], int src_linesize[4],
                   int src_w, int src_h, enum AVPixelFormat src_pix_fmt,
                   void *log_ctx)
{
  int ret;
  struct SwsContext *sws_ctx = sws_getContext(src_w, src_h, src_pix_fmt,
                                              dst_w, dst_h, dst_pix_fmt,
                                              SWS_BILINEAR, NULL, NULL, NULL);
  if (!sws_ctx) {
    av_log(log_ctx, AV_LOG_ERROR,
           "Impossible to create scale context for the conversion "
           "fmt:%s s:%dx%d -> fmt:%s s:%dx%d\n",
           av_get_pix_fmt_name(src_pix_fmt), src_w, src_h,
           av_get_pix_fmt_name(dst_pix_fmt), dst_w, dst_h);
    ret = AVERROR(EINVAL);
    goto end;
  }

  if ((ret = av_image_alloc(dst_data, dst_linesize, dst_w, dst_h, dst_pix_fmt, 16)) < 0)
    goto end;
  ret = 0;
  sws_scale(sws_ctx, (const uint8_t * const*)src_data, src_linesize, 0, src_h, dst_data, dst_linesize);

  end:
  sws_freeContext(sws_ctx);
  return ret;
}
Exemplo n.º 2
0
enum AVPixelFormat CDVDVideoCodecFFmpeg::GetFormat(struct AVCodecContext * avctx, const AVPixelFormat * fmt)
{
  ICallbackHWAccel *cb = static_cast<ICallbackHWAccel*>(avctx->opaque);
  CDVDVideoCodecFFmpeg* ctx  = dynamic_cast<CDVDVideoCodecFFmpeg*>(cb);

  const char* pixFmtName = av_get_pix_fmt_name(*fmt);

  ctx->m_processInfo.SetVideoDimensions(avctx->coded_width, avctx->coded_height);

  // if frame threading is enabled hw accel is not allowed
  // 2nd condition:
  // fix an ffmpeg issue here, it calls us with an invalid profile
  // then a 2nd call with a valid one
  if(ctx->m_decoderState != STATE_HW_SINGLE ||
     (avctx->codec_id == AV_CODEC_ID_VC1 && avctx->profile == FF_PROFILE_UNKNOWN))
  {
    AVPixelFormat defaultFmt = avcodec_default_get_format(avctx, fmt);
    pixFmtName = av_get_pix_fmt_name(defaultFmt);
    ctx->m_processInfo.SetVideoPixelFormat(pixFmtName ? pixFmtName : "");
    ctx->m_processInfo.SetSwDeinterlacingMethods();
    return defaultFmt;
  }

  // hardware decoder de-selected, restore standard ffmpeg
  if (ctx->HasHardware())
  {
    ctx->SetHardware(nullptr);
    avctx->get_buffer2 = avcodec_default_get_buffer2;
    avctx->slice_flags = 0;
    av_buffer_unref(&avctx->hw_frames_ctx);
  }

  const AVPixelFormat * cur = fmt;
  while (*cur != AV_PIX_FMT_NONE)
  {
    pixFmtName = av_get_pix_fmt_name(*cur);

    auto hwaccels = CDVDFactoryCodec::GetHWAccels();
    for (auto &hwaccel : hwaccels)
    {
      IHardwareDecoder *pDecoder(CDVDFactoryCodec::CreateVideoCodecHWAccel(hwaccel, ctx->m_hints,
                                                                           ctx->m_processInfo, *cur));
      if (pDecoder)
      {
        if (pDecoder->Open(avctx, ctx->m_pCodecContext, *cur))
        {
          ctx->m_processInfo.SetVideoPixelFormat(pixFmtName ? pixFmtName : "");
          ctx->SetHardware(pDecoder);
          return *cur;
        }
      }
      SAFE_RELEASE(pDecoder);
    }
    cur++;
  }

  ctx->m_processInfo.SetVideoPixelFormat(pixFmtName ? pixFmtName : "");
  ctx->m_decoderState = STATE_HW_FAILED;
  return avcodec_default_get_format(avctx, fmt);
}
Exemplo n.º 3
0
static char *choose_pix_fmts(OutputStream *ost)
{
    AVDictionaryEntry *strict_dict = av_dict_get(ost->encoder_opts, "strict", NULL, 0);
    if (strict_dict)
        // used by choose_pixel_fmt() and below
        av_opt_set(ost->enc_ctx, "strict", strict_dict->value, 0);

     if (ost->keep_pix_fmt) {
        if (ost->filter)
            avfilter_graph_set_auto_convert(ost->filter->graph->graph,
                                            AVFILTER_AUTO_CONVERT_NONE);
        if (ost->enc_ctx->pix_fmt == AV_PIX_FMT_NONE)
            return NULL;
        return av_strdup(av_get_pix_fmt_name(ost->enc_ctx->pix_fmt));
    }
    if (ost->enc_ctx->pix_fmt != AV_PIX_FMT_NONE) {
        return av_strdup(av_get_pix_fmt_name(choose_pixel_fmt(ost->st, ost->enc_ctx, ost->enc, ost->enc_ctx->pix_fmt)));
    } else if (ost->enc && ost->enc->pix_fmts) {
        const enum AVPixelFormat *p;
        AVIOContext *s = NULL;
        uint8_t *ret;
        int len;

        if (avio_open_dyn_buf(&s) < 0)
            exit_program(1);

        p = ost->enc->pix_fmts;
        if (ost->enc_ctx->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
            if (ost->enc_ctx->codec_id == AV_CODEC_ID_MJPEG) {
                p = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_NONE };
            } else if (ost->enc_ctx->codec_id == AV_CODEC_ID_LJPEG) {
                p = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUV420P,
                                                    AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_BGRA, AV_PIX_FMT_NONE };
            }
Exemplo n.º 4
0
enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodec *codec, enum AVPixelFormat target)
{
    if (codec && codec->pix_fmts) {
        const enum AVPixelFormat *p = codec->pix_fmts;
        const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(target);
        int has_alpha = desc ? desc->nb_components % 2 == 0 : 0;
        enum AVPixelFormat best= AV_PIX_FMT_NONE;
        const enum AVPixelFormat mjpeg_formats[] = { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_NONE };
        const enum AVPixelFormat ljpeg_formats[] = { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUV420P,
                                                     AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_BGRA, AV_PIX_FMT_NONE };

        if (st->codec->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
            if (st->codec->codec_id == AV_CODEC_ID_MJPEG) {
                p = mjpeg_formats;
            } else if (st->codec->codec_id == AV_CODEC_ID_LJPEG) {
                p =ljpeg_formats;
            }
        }
        for (; *p != AV_PIX_FMT_NONE; p++) {
            best= avcodec_find_best_pix_fmt_of_2(best, *p, target, has_alpha, NULL);
            if (*p == target)
                break;
        }
        if (*p == AV_PIX_FMT_NONE) {
            if (target != AV_PIX_FMT_NONE)
                LOGW(
                       "Incompatible pixel format '%s' for codec '%s', auto-selecting format '%s'",
                       av_get_pix_fmt_name(target),
                       codec->name,
                       av_get_pix_fmt_name(best));
            return best;
        }
    }
    return target;
}
Exemplo n.º 5
0
enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx, AVCodec *codec, enum AVPixelFormat target)
{
    if (codec && codec->pix_fmts) {
        const enum AVPixelFormat *p = codec->pix_fmts;
        const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(target);
        int has_alpha = desc ? desc->nb_components % 2 == 0 : 0;
        enum AVPixelFormat best= AV_PIX_FMT_NONE;

        if (enc_ctx->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
            p = get_compliance_unofficial_pix_fmts(enc_ctx->codec_id, p);
        }
        for (; *p != AV_PIX_FMT_NONE; p++) {
            best= avcodec_find_best_pix_fmt_of_2(best, *p, target, has_alpha, NULL);
            if (*p == target)
                break;
        }
        if (*p == AV_PIX_FMT_NONE) {
            if (target != AV_PIX_FMT_NONE)
                av_log(NULL, AV_LOG_WARNING,
                       "Incompatible pixel format '%s' for codec '%s', auto-selecting format '%s'\n",
                       av_get_pix_fmt_name(target),
                       codec->name,
                       av_get_pix_fmt_name(best));
            return best;
        }
    }
    return target;
}
Exemplo n.º 6
0
bool
HardwareAccel::extractData(VideoFrame& input)
{
    try {
        auto inFrame = input.pointer();

        if (inFrame->format != format_) {
            std::stringstream buf;
            buf << "Frame format mismatch: expected " << av_get_pix_fmt_name(format_);
            buf << ", got " << av_get_pix_fmt_name((AVPixelFormat)inFrame->format);
            throw std::runtime_error(buf.str());
        }

        // FFmpeg requires a second frame in which to transfer the data
        // from the GPU buffer to the main memory
        auto output = std::unique_ptr<VideoFrame>(new VideoFrame());
        auto outFrame = output->pointer();
        outFrame->format = AV_PIX_FMT_YUV420P;

        extractData(input, *output);

        // move outFrame into inFrame so the caller receives extracted image data
        // but we have to delete inFrame first
        av_frame_unref(inFrame);
        av_frame_move_ref(inFrame, outFrame);
    } catch (const std::runtime_error& e) {
        fail(false);
        RING_ERR("%s", e.what());
        return false;
    }

    succeed();
    return true;
}
Exemplo n.º 7
0
static int hwmap_filter_frame(AVFilterLink *link, AVFrame *input)
{
    AVFilterContext *avctx = link->dst;
    AVFilterLink  *outlink = avctx->outputs[0];
    HWMapContext      *ctx = avctx->priv;
    AVFrame *map = NULL;
    int err;

    av_log(ctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
           av_get_pix_fmt_name(input->format),
           input->width, input->height, input->pts);

    map = av_frame_alloc();
    if (!map) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    map->format = outlink->format;
    map->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
    if (!map->hw_frames_ctx) {
        err = AVERROR(ENOMEM);
        goto fail;
    }

    if (ctx->reverse && !input->hw_frames_ctx) {
        // If we mapped backwards from hardware to software, we need
        // to attach the hardware frame context to the input frame to
        // make the mapping visible to av_hwframe_map().
        input->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
        if (!input->hw_frames_ctx) {
            err = AVERROR(ENOMEM);
            goto fail;
        }
    }

    err = av_hwframe_map(map, input, ctx->mode);
    if (err < 0) {
        av_log(avctx, AV_LOG_ERROR, "Failed to map frame: %d.\n", err);
        goto fail;
    }

    err = av_frame_copy_props(map, input);
    if (err < 0)
        goto fail;

    av_frame_free(&input);

    av_log(ctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
           av_get_pix_fmt_name(map->format),
           map->width, map->height, map->pts);

    return ff_filter_frame(outlink, map);

fail:
    av_frame_free(&input);
    av_frame_free(&map);
    return err;
}
Exemplo n.º 8
0
static int pick_format(AVFilterLink *link, AVFilterLink *ref)
{
    if (!link || !link->in_formats)
        return 0;

    if (link->type == AVMEDIA_TYPE_VIDEO) {
        if(ref && ref->type == AVMEDIA_TYPE_VIDEO){
            int has_alpha= av_pix_fmt_desc_get(ref->format)->nb_components % 2 == 0;
            enum AVPixelFormat best= AV_PIX_FMT_NONE;
            int i;
            for (i=0; i<link->in_formats->format_count; i++) {
                enum AVPixelFormat p = link->in_formats->formats[i];
                best= avcodec_find_best_pix_fmt_of_2(best, p, ref->format, has_alpha, NULL);
            }
            av_log(link->src,AV_LOG_DEBUG, "picking %s out of %d ref:%s alpha:%d\n",
                   av_get_pix_fmt_name(best), link->in_formats->format_count,
                   av_get_pix_fmt_name(ref->format), has_alpha);
            link->in_formats->formats[0] = best;
        }
    }

    link->in_formats->format_count = 1;
    link->format = link->in_formats->formats[0];

    if (link->type == AVMEDIA_TYPE_AUDIO) {
        if (!link->in_samplerates->format_count) {
            av_log(link->src, AV_LOG_ERROR, "Cannot select sample rate for"
                   " the link between filters %s and %s.\n", link->src->name,
                   link->dst->name);
            return AVERROR(EINVAL);
        }
        link->in_samplerates->format_count = 1;
        link->sample_rate = link->in_samplerates->formats[0];

        if (link->in_channel_layouts->all_layouts) {
            av_log(link->src, AV_LOG_ERROR, "Cannot select channel layout for"
                   " the link between filters %s and %s.\n", link->src->name,
                   link->dst->name);
            return AVERROR(EINVAL);
        }
        link->in_channel_layouts->nb_channel_layouts = 1;
        link->channel_layout = link->in_channel_layouts->channel_layouts[0];
        if ((link->channels = FF_LAYOUT2COUNT(link->channel_layout)))
            link->channel_layout = 0;
        else
            link->channels = av_get_channel_layout_nb_channels(link->channel_layout);
    }

    ff_formats_unref(&link->in_formats);
    ff_formats_unref(&link->out_formats);
    ff_formats_unref(&link->in_samplerates);
    ff_formats_unref(&link->out_samplerates);
    ff_channel_layouts_unref(&link->in_channel_layouts);
    ff_channel_layouts_unref(&link->out_channel_layouts);

    return 0;
}
Exemplo n.º 9
0
static char *choose_pix_fmts(OutputStream *ost)
{
    AVDictionaryEntry *strict_dict = av_dict_get(ost->encoder_opts, "strict", NULL, 0);
    if(strict_dict)
        // used by choose_pixel_fmt() and below
    {
        av_opt_set(ost->enc_ctx, "strict", strict_dict->value, 0);
    }
    
    if(ost->keep_pix_fmt)
    {
        if(ost->filter)
            avfilter_graph_set_auto_convert(ost->filter->graph->graph,
                                            AVFILTER_AUTO_CONVERT_NONE);
        if(ost->enc_ctx->pix_fmt == AV_PIX_FMT_NONE)
        {
            return NULL;
        }
        return av_strdup(av_get_pix_fmt_name(ost->enc_ctx->pix_fmt));
    }
    if(ost->enc_ctx->pix_fmt != AV_PIX_FMT_NONE)
    {
        return av_strdup(av_get_pix_fmt_name(choose_pixel_fmt(ost->st, ost->enc_ctx, ost->enc, ost->enc_ctx->pix_fmt)));
    }
    else if(ost->enc && ost->enc->pix_fmts)
    {
        const enum AVPixelFormat *p;
        AVIOContext *s = NULL;
        uint8_t *ret;
        int len;
        
        if(avio_open_dyn_buf(&s) < 0)
        {
            exit_program(1);
        }
        
        p = ost->enc->pix_fmts;
        if(ost->enc_ctx->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL)
        {
            p = get_compliance_unofficial_pix_fmts(ost->enc_ctx->codec_id, p);
        }
        
        for(; *p != AV_PIX_FMT_NONE; p++)
        {
            const char *name = av_get_pix_fmt_name(*p);
            avio_printf(s, "%s|", name);
        }
        len = avio_close_dyn_buf(s, &ret);
        ret[len - 1] = 0;
        return ret;
    }
    else
    {
        return NULL;
    }
}
static int decode_packet(int *got_frame, int cached)
{
    int ret = 0;
    int decoded = pkt.size;

    *got_frame = 0;

    if (pkt.stream_index == video_stream_idx) {
        /* decode video frame */
        ret = avcodec_decode_video2(video_dec_ctx, frame, got_frame, &pkt);
        if (ret < 0) {
            fprintf(stderr, "Error decoding video frame (%s)\n", av_err2str(ret));
            return ret;
        }

        if (*got_frame) {

            if (frame->width != width || frame->height != height ||
                frame->format != pix_fmt) {
                /* To handle this change, one could call av_image_alloc again and
                 * decode the following frames into another rawvideo file. */
                fprintf(stderr, "Error: Width, height and pixel format have to be "
                        "constant in a rawvideo file, but the width, height or "
                        "pixel format of the input video changed:\n"
                        "old: width = %d, height = %d, format = %s\n"
                        "new: width = %d, height = %d, format = %s\n",
                        width, height, av_get_pix_fmt_name(pix_fmt),
                        frame->width, frame->height,
                        av_get_pix_fmt_name(frame->format));
                return -1;
            }

            printf("video_frame%s n:%d coded_n:%d pts:%s\n",
                   cached ? "(cached)" : "",
                   video_frame_count++, frame->coded_picture_number,
                   av_ts2timestr(frame->pts, &video_dec_ctx->time_base));

            /* copy decoded frame to destination buffer:
             * this is required since rawvideo expects non aligned data */
            av_image_copy(video_dst_data, video_dst_linesize,
                          (const uint8_t **)(frame->data), frame->linesize,
                          pix_fmt, width, height);

            /* write to rawvideo file */
            fwrite(video_dst_data[0], 1, video_dst_bufsize, video_dst_file);
        }
    }

    // /* If we use frame reference counting, we own the data and need
    //  * to de-reference it when we don't use it anymore */
    // // if (*got_frame && refcount)
    // if (*got_frame)
    //     av_frame_unref(frame);

    return decoded;
}
Exemplo n.º 11
0
/* {RGB,BGR}{15,16,24,32,32_1} -> {RGB,BGR}{15,16,24,32} */
static int rgbToRgbWrapper(SwsContext *c, const uint8_t *src[], int srcStride[],
                           int srcSliceY, int srcSliceH, uint8_t *dst[],
                           int dstStride[])

{
    const enum AVPixelFormat srcFormat = c->srcFormat;
    const enum AVPixelFormat dstFormat = c->dstFormat;
    const AVPixFmtDescriptor *desc_src = av_pix_fmt_desc_get(c->srcFormat);
    const AVPixFmtDescriptor *desc_dst = av_pix_fmt_desc_get(c->dstFormat);
    const int srcBpp = (c->srcFormatBpp + 7) >> 3;
    const int dstBpp = (c->dstFormatBpp + 7) >> 3;
    rgbConvFn conv = findRgbConvFn(c);

    if (!conv) {
        av_log(c, AV_LOG_ERROR, "internal error %s -> %s converter\n",
               av_get_pix_fmt_name(srcFormat), av_get_pix_fmt_name(dstFormat));
    } else {
        const uint8_t *srcPtr = src[0];
              uint8_t *dstPtr = dst[0];
        int src_bswap = IS_NOT_NE(c->srcFormatBpp, desc_src);
        int dst_bswap = IS_NOT_NE(c->dstFormatBpp, desc_dst);

        if ((srcFormat == AV_PIX_FMT_RGB32_1 || srcFormat == AV_PIX_FMT_BGR32_1) &&
            !isRGBA32(dstFormat))
            srcPtr += ALT32_CORR;

        if ((dstFormat == AV_PIX_FMT_RGB32_1 || dstFormat == AV_PIX_FMT_BGR32_1) &&
            !isRGBA32(srcFormat))
            dstPtr += ALT32_CORR;

        if (dstStride[0] * srcBpp == srcStride[0] * dstBpp && srcStride[0] > 0 &&
            !(srcStride[0] % srcBpp) && !dst_bswap && !src_bswap)
            conv(srcPtr, dstPtr + dstStride[0] * srcSliceY,
                 srcSliceH * srcStride[0]);
        else {
            int i, j;
            dstPtr += dstStride[0] * srcSliceY;

            for (i = 0; i < srcSliceH; i++) {
                if(src_bswap) {
                    for(j=0; j<c->srcW; j++)
                        ((uint16_t*)c->formatConvBuffer)[j] = av_bswap16(((uint16_t*)srcPtr)[j]);
                    conv(c->formatConvBuffer, dstPtr, c->srcW * srcBpp);
                }else
                    conv(srcPtr, dstPtr, c->srcW * srcBpp);
                if(dst_bswap)
                    for(j=0; j<c->srcW; j++)
                        ((uint16_t*)dstPtr)[j] = av_bswap16(((uint16_t*)dstPtr)[j]);
                srcPtr += srcStride[0];
                dstPtr += dstStride[0];
            }
        }
    }
    return srcSliceH;
}
Exemplo n.º 12
0
static int planarRgbToRgbWrapper(SwsContext *c, const uint8_t *src[],
                                 int srcStride[], int srcSliceY, int srcSliceH,
                                 uint8_t *dst[], int dstStride[])
{
    int alpha_first = 0;
    const uint8_t *src102[] = { src[1], src[0], src[2] };
    const uint8_t *src201[] = { src[2], src[0], src[1] };
    int stride102[] = { srcStride[1], srcStride[0], srcStride[2] };
    int stride201[] = { srcStride[2], srcStride[0], srcStride[1] };

    if (c->srcFormat != AV_PIX_FMT_GBRP) {
        av_log(c, AV_LOG_ERROR, "unsupported planar RGB conversion %s -> %s\n",
               av_get_pix_fmt_name(c->srcFormat),
               av_get_pix_fmt_name(c->dstFormat));
        return srcSliceH;
    }

    switch (c->dstFormat) {
    case AV_PIX_FMT_BGR24:
        gbr24ptopacked24(src102, stride102,
                         dst[0] + srcSliceY * dstStride[0], dstStride[0],
                         srcSliceH, c->srcW);
        break;

    case AV_PIX_FMT_RGB24:
        gbr24ptopacked24(src201, stride201,
                         dst[0] + srcSliceY * dstStride[0], dstStride[0],
                         srcSliceH, c->srcW);
        break;

    case AV_PIX_FMT_ARGB:
        alpha_first = 1;
    case AV_PIX_FMT_RGBA:
        gbr24ptopacked32(src201, stride201,
                         dst[0] + srcSliceY * dstStride[0], dstStride[0],
                         srcSliceH, alpha_first, c->srcW);
        break;

    case AV_PIX_FMT_ABGR:
        alpha_first = 1;
    case AV_PIX_FMT_BGRA:
        gbr24ptopacked32(src102, stride102,
                         dst[0] + srcSliceY * dstStride[0], dstStride[0],
                         srcSliceH, alpha_first, c->srcW);
        break;

    default:
        av_log(c, AV_LOG_ERROR,
               "unsupported planar RGB conversion %s -> %s\n",
               av_get_pix_fmt_name(c->srcFormat),
               av_get_pix_fmt_name(c->dstFormat));
    }

    return srcSliceH;
}
Exemplo n.º 13
0
static av_cold int init_processing_chain(AVFilterContext *ctx, int in_width, int in_height,
                                         int out_width, int out_height)
{
    CUDAScaleContext *s = ctx->priv;

    AVHWFramesContext *in_frames_ctx;

    enum AVPixelFormat in_format;
    enum AVPixelFormat out_format;
    int ret;

    /* check that we have a hw context */
    if (!ctx->inputs[0]->hw_frames_ctx) {
        av_log(ctx, AV_LOG_ERROR, "No hw context provided on input\n");
        return AVERROR(EINVAL);
    }
    in_frames_ctx = (AVHWFramesContext*)ctx->inputs[0]->hw_frames_ctx->data;
    in_format     = in_frames_ctx->sw_format;
    out_format    = (s->format == AV_PIX_FMT_NONE) ? in_format : s->format;

    if (!format_is_supported(in_format)) {
        av_log(ctx, AV_LOG_ERROR, "Unsupported input format: %s\n",
               av_get_pix_fmt_name(in_format));
        return AVERROR(ENOSYS);
    }
    if (!format_is_supported(out_format)) {
        av_log(ctx, AV_LOG_ERROR, "Unsupported output format: %s\n",
               av_get_pix_fmt_name(out_format));
        return AVERROR(ENOSYS);
    }

    if (in_width == out_width && in_height == out_height)
        s->passthrough = 1;

    s->in_fmt = in_format;
    s->out_fmt = out_format;

    s->planes_in[0].width   = in_width;
    s->planes_in[0].height  = in_height;
    s->planes_out[0].width  = out_width;
    s->planes_out[0].height = out_height;

    ret = init_stage(s, in_frames_ctx->device_ref);
    if (ret < 0)
        return ret;

    ctx->outputs[0]->hw_frames_ctx = av_buffer_ref(s->frames_ctx);
    if (!ctx->outputs[0]->hw_frames_ctx)
        return AVERROR(ENOMEM);

    return 0;
}
Exemplo n.º 14
0
bool ImageConverterFF::check() const
{
    if (!ImageConverter::check())
        return false;
    DPTR_D(const ImageConverterFF);
    if (sws_isSupportedInput((AVPixelFormat)d.fmt_in) <= 0) {
        qWarning("Input pixel format not supported (%s)", av_get_pix_fmt_name((AVPixelFormat)d.fmt_in));
        return false;
    }
    if (sws_isSupportedOutput((AVPixelFormat)d.fmt_out) <= 0) {
        qWarning("Output pixel format not supported (%s)", av_get_pix_fmt_name((AVPixelFormat)d.fmt_out));
        return false;
    }
    return true;
}
Exemplo n.º 15
0
static av_cold int dvvideo_encode_init(AVCodecContext *avctx)
{
    DVVideoContext *s = avctx->priv_data;
    int ret;

    s->sys = avpriv_dv_codec_profile(avctx);
    if (!s->sys) {
        av_log(avctx, AV_LOG_ERROR, "Found no DV profile for %ix%i %s video. "
               "Valid DV profiles are:\n",
               avctx->width, avctx->height, av_get_pix_fmt_name(avctx->pix_fmt));
        ff_dv_print_profiles(avctx, AV_LOG_ERROR);
        return AVERROR(EINVAL);
    }
    if (avctx->height > 576) {
        av_log(avctx, AV_LOG_ERROR, "DVCPRO HD encoding is not supported.\n");
        return AVERROR_PATCHWELCOME;
    }
    ret = ff_dv_init_dynamic_tables(s, s->sys);
    if (ret < 0) {
        av_log(avctx, AV_LOG_ERROR, "Error initializing work tables.\n");
        return ret;
    }

    avctx->coded_frame = av_frame_alloc();
    if (!avctx->coded_frame)
        return AVERROR(ENOMEM);

    dv_vlc_map_tableinit();

    return ff_dvvideo_init(avctx);
}
Exemplo n.º 16
0
static int print_link_prop(AVBPrint *buf, AVFilterLink *link)
{
    char *format;
    char layout[64];

    if (!buf)
        buf = &(AVBPrint){ 0 }; /* dummy buffer */
    switch (link->type) {
        case AVMEDIA_TYPE_VIDEO:
            format = av_x_if_null(av_get_pix_fmt_name(link->format), "?");
            av_bprintf(buf, "[%dx%d %d:%d %s]", link->w, link->h,
                    link->sample_aspect_ratio.num,
                    link->sample_aspect_ratio.den,
                    format);
            break;

        case AVMEDIA_TYPE_AUDIO:
            av_get_channel_layout_string(layout, sizeof(layout),
                                         -1, link->channel_layout);
            format = av_x_if_null(av_get_sample_fmt_name(link->format), "?");
            av_bprintf(buf, "[%dHz %s:%s:%s]",
                    (int)link->sample_rate, format, layout,
                    link->planar ? "planar" : "packed");
            break;

        default:
            av_bprintf(buf, "?");
            break;
    }
    return buf->len;
}
Exemplo n.º 17
0
static av_cold int init_video(AVFilterContext *ctx)
{
    BufferSourceContext *c = ctx->priv;

    if (!c->pix_fmt_str || !c->w || !c->h || av_q2d(c->time_base) <= 0) {
        av_log(ctx, AV_LOG_ERROR, "Invalid parameters provided.\n");
        return AVERROR(EINVAL);
    }

    if ((c->pix_fmt = av_get_pix_fmt(c->pix_fmt_str)) == AV_PIX_FMT_NONE) {
        char *tail;
        c->pix_fmt = strtol(c->pix_fmt_str, &tail, 10);
        if (*tail || c->pix_fmt < 0 || !av_pix_fmt_desc_get(c->pix_fmt)) {
            av_log(ctx, AV_LOG_ERROR, "Invalid pixel format string '%s'\n", c->pix_fmt_str);
            return AVERROR(EINVAL);
        }
    }

    if (!(c->fifo = av_fifo_alloc(sizeof(AVFrame*))))
        return AVERROR(ENOMEM);

    av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d pixfmt:%s tb:%d/%d sar:%d/%d\n",
           c->w, c->h, av_get_pix_fmt_name(c->pix_fmt),
           c->time_base.num, c->time_base.den,
           c->pixel_aspect.num, c->pixel_aspect.den);
    return 0;
}
Exemplo n.º 18
0
av_cold SwsFunc ff_yuv2rgb_init_bfin(SwsContext *c)
{
    SwsFunc f;

    switch (c->dstFormat) {
    case AV_PIX_FMT_RGB555:
        f = bfin_yuv420_rgb555;
        break;
    case AV_PIX_FMT_BGR555:
        f = bfin_yuv420_bgr555;
        break;
    case AV_PIX_FMT_RGB565:
        f = bfin_yuv420_rgb565;
        break;
    case AV_PIX_FMT_BGR565:
        f = bfin_yuv420_bgr565;
        break;
    case AV_PIX_FMT_RGB24:
        f = bfin_yuv420_rgb24;
        break;
    case AV_PIX_FMT_BGR24:
        f = bfin_yuv420_bgr24;
        break;
    default:
        return 0;
    }

    av_log(c, AV_LOG_INFO, "BlackFin accelerated color space converter %s\n",
           av_get_pix_fmt_name(c->dstFormat));

    return f;
}
Exemplo n.º 19
0
int AVYUVAdaptor::AVPixelFormatToYUVChroma(AVPixelFormat pix_fmt) throw (AVException*)
#endif
{

    switch (pix_fmt)
	{
#if LIBAVFORMAT_VERSION_MAJOR  < 57	
		case PIX_FMT_YUV420P: return Y4M_CHROMA_420MPEG2; break;
		case PIX_FMT_YUVJ422P:
		case PIX_FMT_YUV422P: return Y4M_CHROMA_422; break;
		case PIX_FMT_YUVJ444P:
		case PIX_FMT_YUV444P: return Y4M_CHROMA_444; break;
		case PIX_FMT_YUV411P: return Y4M_CHROMA_411; break;
		case PIX_FMT_YUVJ420P: return Y4M_CHROMA_420JPEG; break;
#else
		case AV_PIX_FMT_YUV420P: return Y4M_CHROMA_420MPEG2; break;
		case AV_PIX_FMT_YUVJ422P:
		case AV_PIX_FMT_YUV422P: return Y4M_CHROMA_422; break;
		case AV_PIX_FMT_YUVJ444P:
		case AV_PIX_FMT_YUV444P: return Y4M_CHROMA_444; break;
		case AV_PIX_FMT_YUV411P: return Y4M_CHROMA_411; break;
		case AV_PIX_FMT_YUVJ420P: return Y4M_CHROMA_420JPEG; break;
#endif
		default:
            
			throw new AVException("unsupported y4m chroma: " + std::string(av_get_pix_fmt_name(pix_fmt)),UNSUPPORTED_CHROMA);
			//throw new AVException("unsupported y4m chroma." ,UNSUPPORTED_CHROMA);
			break;
	}
}
Exemplo n.º 20
0
const char*
pixfmt_to_string(int pixFormat)
{
	const char* name = av_get_pix_fmt_name((enum AVPixelFormat)pixFormat);
	if (name == NULL)
		return "(unknown)";
	return name;
}
Exemplo n.º 21
0
static int palToRgbWrapper(SwsContext *c, const uint8_t *src[], int srcStride[],
                           int srcSliceY, int srcSliceH, uint8_t *dst[],
                           int dstStride[])
{
    const enum AVPixelFormat srcFormat = c->srcFormat;
    const enum AVPixelFormat dstFormat = c->dstFormat;
    void (*conv)(const uint8_t *src, uint8_t *dst, int num_pixels,
                 const uint8_t *palette) = NULL;
    int i;
    uint8_t *dstPtr = dst[0] + dstStride[0] * srcSliceY;
    const uint8_t *srcPtr = src[0];

    if (srcFormat == AV_PIX_FMT_GRAY8A) {
        switch (dstFormat) {
        case AV_PIX_FMT_RGB32  : conv = gray8aToPacked32; break;
        case AV_PIX_FMT_BGR32  : conv = gray8aToPacked32; break;
        case AV_PIX_FMT_BGR32_1: conv = gray8aToPacked32_1; break;
        case AV_PIX_FMT_RGB32_1: conv = gray8aToPacked32_1; break;
        case AV_PIX_FMT_RGB24  : conv = gray8aToPacked24; break;
        case AV_PIX_FMT_BGR24  : conv = gray8aToPacked24; break;
        }
    } else if (usePal(srcFormat)) {
        switch (dstFormat) {
        case AV_PIX_FMT_RGB32  : conv = sws_convertPalette8ToPacked32; break;
        case AV_PIX_FMT_BGR32  : conv = sws_convertPalette8ToPacked32; break;
        case AV_PIX_FMT_BGR32_1: conv = sws_convertPalette8ToPacked32; break;
        case AV_PIX_FMT_RGB32_1: conv = sws_convertPalette8ToPacked32; break;
        case AV_PIX_FMT_RGB24  : conv = sws_convertPalette8ToPacked24; break;
        case AV_PIX_FMT_BGR24  : conv = sws_convertPalette8ToPacked24; break;
        }
    }

    if (!conv)
        av_log(c, AV_LOG_ERROR, "internal error %s -> %s converter\n",
               av_get_pix_fmt_name(srcFormat), av_get_pix_fmt_name(dstFormat));
    else {
        for (i = 0; i < srcSliceH; i++) {
            conv(srcPtr, dstPtr, c->srcW, (uint8_t *) c->pal_rgb);
            srcPtr += srcStride[0];
            dstPtr += dstStride[0];
        }
    }

    return srcSliceH;
}
Exemplo n.º 22
0
void VideoBuffers::SetFormat(AVPixelFormat Format, int Width, int Height)
{
    LOG(VB_GENERAL, LOG_INFO, QString("Video buffer format: %1 %2x%3")
        .arg(av_get_pix_fmt_name(Format)).arg(Width).arg(Height));

    m_currentFormat = Format;
    m_currentWidth  = Width;
    m_currentHeight = Height;
}
Exemplo n.º 23
0
static av_cold int write_header(AVFormatContext *s1)
{
    int res = 0, flags = O_RDWR;
    struct v4l2_format fmt = {
        .type = V4L2_BUF_TYPE_VIDEO_OUTPUT
    };
    V4L2Context *s = s1->priv_data;
    AVCodecContext *enc_ctx;
    uint32_t v4l2_pixfmt;

    if (s1->flags & AVFMT_FLAG_NONBLOCK)
        flags |= O_NONBLOCK;

    s->fd = open(s1->filename, flags);
    if (s->fd < 0) {
        res = AVERROR(errno);
        av_log(s1, AV_LOG_ERROR, "Unable to open V4L2 device '%s'\n", s1->filename);
        return res;
    }

    if (s1->nb_streams != 1 ||
        s1->streams[0]->codec->codec_type != AVMEDIA_TYPE_VIDEO ||
        s1->streams[0]->codec->codec_id   != AV_CODEC_ID_RAWVIDEO) {
        av_log(s1, AV_LOG_ERROR,
               "V4L2 output device supports only a single raw video stream\n");
        return AVERROR(EINVAL);
    }

    enc_ctx = s1->streams[0]->codec;

    v4l2_pixfmt = avpriv_fmt_ff2v4l(enc_ctx->pix_fmt, AV_CODEC_ID_RAWVIDEO);
    if (!v4l2_pixfmt) { // XXX: try to force them one by one?
        av_log(s1, AV_LOG_ERROR, "Unknown V4L2 pixel format equivalent for %s\n",
               av_get_pix_fmt_name(enc_ctx->pix_fmt));
        return AVERROR(EINVAL);
    }

    if (ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
        res = AVERROR(errno);
        av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n", av_err2str(res));
        return res;
    }

    fmt.fmt.pix.width       = enc_ctx->width;
    fmt.fmt.pix.height      = enc_ctx->height;
    fmt.fmt.pix.pixelformat = v4l2_pixfmt;
    fmt.fmt.pix.sizeimage   = av_image_get_buffer_size(enc_ctx->pix_fmt, enc_ctx->width, enc_ctx->height, 1);

    if (ioctl(s->fd, VIDIOC_S_FMT, &fmt) < 0) {
        res = AVERROR(errno);
        av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_FMT): %s\n", av_err2str(res));
        return res;
    }

    return res;
}
Exemplo n.º 24
0
void ff_dv_print_profiles(void *logctx, int loglevel)
{
    int i;
    for (i = 0; i < FF_ARRAY_ELEMS(dv_profiles); i++) {
        const DVprofile *p = &dv_profiles[i];
        av_log(logctx, loglevel, "Frame size: %dx%d; pixel format: %s, "
               "framerate: %d/%d\n", p->width, p->height, av_get_pix_fmt_name(p->pix_fmt),
               p->time_base.den, p->time_base.num);
    }
}
Exemplo n.º 25
0
static int vaapi_device_init(AVHWDeviceContext *hwdev)
{
    VAAPIDeviceContext *ctx = hwdev->internal->priv;
    AVVAAPIDeviceContext *hwctx = hwdev->hwctx;
    VAImageFormat *image_list = NULL;
    VAStatus vas;
    int err, i, image_count;
    enum AVPixelFormat pix_fmt;
    unsigned int fourcc;

    image_count = vaMaxNumImageFormats(hwctx->display);
    if (image_count <= 0) {
        err = AVERROR(EIO);
        goto fail;
    }
    image_list = av_malloc(image_count * sizeof(*image_list));
    if (!image_list) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    vas = vaQueryImageFormats(hwctx->display, image_list, &image_count);
    if (vas != VA_STATUS_SUCCESS) {
        err = AVERROR(EIO);
        goto fail;
    }

    ctx->formats  = av_malloc(image_count * sizeof(*ctx->formats));
    if (!ctx->formats) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    ctx->nb_formats = 0;
    for (i = 0; i < image_count; i++) {
        fourcc  = image_list[i].fourcc;
        pix_fmt = vaapi_pix_fmt_from_fourcc(fourcc);
        if (pix_fmt == AV_PIX_FMT_NONE) {
            av_log(hwdev, AV_LOG_DEBUG, "Format %#x -> unknown.\n",
                   fourcc);
        } else {
            av_log(hwdev, AV_LOG_DEBUG, "Format %#x -> %s.\n",
                   fourcc, av_get_pix_fmt_name(pix_fmt));
            ctx->formats[ctx->nb_formats].pix_fmt      = pix_fmt;
            ctx->formats[ctx->nb_formats].image_format = image_list[i];
            ++ctx->nb_formats;
        }
    }

    av_free(image_list);
    return 0;
fail:
    av_freep(&ctx->formats);
    av_free(image_list);
    return err;
}
Exemplo n.º 26
0
std::string PixelProperties::getPixelFormatName() const
{
	if( ! _pixelFormat )
		throw std::runtime_error( "unable to find pixel format." ); 

	const char* formatName = av_get_pix_fmt_name( _pixelFormat );
	if( ! formatName )
		throw std::runtime_error( "unknown pixel format" );

	return std::string( formatName );
}
Exemplo n.º 27
0
Arquivo: video.c Projeto: Kubink/vlc
/*****************************************************************************
 * ffmpeg_CopyPicture: copy a picture from ffmpeg internal buffers to a
 *                     picture_t structure (when not in direct rendering mode).
 *****************************************************************************/
static void ffmpeg_CopyPicture( decoder_t *p_dec,
                                picture_t *p_pic, AVFrame *p_ff_pic )
{
    decoder_sys_t *p_sys = p_dec->p_sys;

    if( p_sys->p_va )
    {
        vlc_va_Extract( p_sys->p_va, p_pic, p_ff_pic->opaque,
                        p_ff_pic->data[3] );
    }
    else if( FindVlcChroma( p_sys->p_context->pix_fmt ) )
    {
        int i_plane, i_size, i_line;
        uint8_t *p_dst, *p_src;
        int i_src_stride, i_dst_stride;

        if( p_sys->p_context->pix_fmt == PIX_FMT_PAL8 )
        {
            if( !p_pic->format.p_palette )
                p_pic->format.p_palette = calloc( 1, sizeof(video_palette_t) );

            if( p_pic->format.p_palette )
            {
                p_pic->format.p_palette->i_entries = AVPALETTE_COUNT;
                memcpy( p_pic->format.p_palette->palette, p_ff_pic->data[1], AVPALETTE_SIZE );
            }
        }

        for( i_plane = 0; i_plane < p_pic->i_planes; i_plane++ )
        {
            p_src  = p_ff_pic->data[i_plane];
            p_dst = p_pic->p[i_plane].p_pixels;
            i_src_stride = p_ff_pic->linesize[i_plane];
            i_dst_stride = p_pic->p[i_plane].i_pitch;

            i_size = __MIN( i_src_stride, i_dst_stride );
            for( i_line = 0; i_line < p_pic->p[i_plane].i_visible_lines;
                 i_line++ )
            {
                memcpy( p_dst, p_src, i_size );
                p_src += i_src_stride;
                p_dst += i_dst_stride;
            }
        }
    }
    else
    {
        const char *name = av_get_pix_fmt_name( p_sys->p_context->pix_fmt );
        msg_Err( p_dec, "Unsupported decoded output format %d (%s)",
                 p_sys->p_context->pix_fmt, name ? name : "unknown" );
        p_dec->b_error = 1;
    }
}
static int config_output(AVFilterLink *outlink)
{
    AVFilterLink *inlink = outlink->src->inputs[0];

    outlink->w = inlink->w*2;
    outlink->h = inlink->h*2;

    av_log(inlink->dst, AV_LOG_VERBOSE, "fmt:%s size:%dx%d -> size:%dx%d\n",
           av_get_pix_fmt_name(inlink->format),
           inlink->w, inlink->h, outlink->w, outlink->h);

    return 0;
}
Exemplo n.º 29
0
static BCSType GuessCSType(AVPixelFormat p) {
	// guessing the colorspace type from the name is kinda hackish but libav doesn't export this kind of metadata
	if (av_pix_fmt_desc_get(p)->flags & FFMS_PIX_FMT_FLAG(HWACCEL))
		return cUNUSABLE;
	const char *n = av_get_pix_fmt_name(p);
	if (strstr(n, "gray") || strstr(n, "mono") || strstr(n, "y400a"))
		return cGRAY;
	if (strstr(n, "rgb") || strstr(n, "bgr") || strstr(n, "gbr") || strstr(n, "pal8"))
		return cRGB;
	if (strstr(n, "yuv") || strstr(n, "yv") || strstr(n, "nv12") || strstr(n, "nv21"))
		return cYUV;
	return cUNUSABLE; // should never come here
}
Exemplo n.º 30
0
int av_hwframe_ctx_init(AVBufferRef *ref)
{
    AVHWFramesContext *ctx = (AVHWFramesContext*)ref->data;
    const enum AVPixelFormat *pix_fmt;
    int ret;

    if (ctx->internal->source_frames) {
        /* A derived frame context is already initialised. */
        return 0;
    }

    /* validate the pixel format */
    for (pix_fmt = ctx->internal->hw_type->pix_fmts; *pix_fmt != AV_PIX_FMT_NONE; pix_fmt++) {
        if (*pix_fmt == ctx->format)
            break;
    }
    if (*pix_fmt == AV_PIX_FMT_NONE) {
        av_log(ctx, AV_LOG_ERROR,
               "The hardware pixel format '%s' is not supported by the device type '%s'\n",
               av_get_pix_fmt_name(ctx->format), ctx->internal->hw_type->name);
        return AVERROR(ENOSYS);
    }

    /* validate the dimensions */
    ret = av_image_check_size(ctx->width, ctx->height, 0, ctx);
    if (ret < 0)
        return ret;

    /* format-specific init */
    if (ctx->internal->hw_type->frames_init) {
        ret = ctx->internal->hw_type->frames_init(ctx);
        if (ret < 0)
            goto fail;
    }

    if (ctx->internal->pool_internal && !ctx->pool)
        ctx->pool = ctx->internal->pool_internal;

    /* preallocate the frames in the pool, if requested */
    if (ctx->initial_pool_size > 0) {
        ret = hwframe_pool_prealloc(ref);
        if (ret < 0)
            goto fail;
    }

    return 0;
fail:
    if (ctx->internal->hw_type->frames_uninit)
        ctx->internal->hw_type->frames_uninit(ctx);
    return ret;
}