static av_cold int raw_init_decoder(AVCodecContext *avctx) { RawVideoContext *context = avctx->priv_data; const AVPixFmtDescriptor *desc; if (avctx->codec_tag == MKTAG('r', 'a', 'w', ' ')) avctx->pix_fmt = find_pix_fmt(pix_fmt_bps_mov, avctx->bits_per_coded_sample); else if (avctx->codec_tag == MKTAG('W', 'R', 'A', 'W')) avctx->pix_fmt = find_pix_fmt(pix_fmt_bps_avi, avctx->bits_per_coded_sample); else if (avctx->codec_tag) avctx->pix_fmt = find_pix_fmt(ff_raw_pix_fmt_tags, avctx->codec_tag); else if (avctx->pix_fmt == AV_PIX_FMT_NONE && avctx->bits_per_coded_sample) avctx->pix_fmt = find_pix_fmt(pix_fmt_bps_avi, avctx->bits_per_coded_sample); desc = av_pix_fmt_desc_get(avctx->pix_fmt); if (!desc) { av_log(avctx, AV_LOG_ERROR, "Invalid pixel format.\n"); return AVERROR(EINVAL); } if (desc->flags & (PIX_FMT_PAL || PIX_FMT_PSEUDOPAL)) { context->palette = av_buffer_alloc(AVPALETTE_SIZE); if (!context->palette) return AVERROR(ENOMEM); if (desc->flags & PIX_FMT_PSEUDOPAL) avpriv_set_systematic_pal2((uint32_t*)context->palette->data, avctx->pix_fmt); else memset(context->palette->data, 0, AVPALETTE_SIZE); } context->frame_size = avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height); if ((avctx->bits_per_coded_sample == 4 || avctx->bits_per_coded_sample == 2) && avctx->pix_fmt == AV_PIX_FMT_PAL8 && (!avctx->codec_tag || avctx->codec_tag == MKTAG('r','a','w',' '))) context->is_2_4_bpp = 1; if ((avctx->extradata_size >= 9 && !memcmp(avctx->extradata + avctx->extradata_size - 9, "BottomUp", 9)) || avctx->codec_tag == MKTAG(3, 0, 0, 0) || avctx->codec_tag == MKTAG('W','R','A','W')) context->flip = 1; if (avctx->codec_tag == AV_RL32("yuv2") && avctx->pix_fmt == AV_PIX_FMT_YUYV422) context->is_yuv2 = 1; return 0; }
AVFrame *ff_video_frame_pool_get(FFVideoFramePool *pool) { int i; AVFrame *frame; const AVPixFmtDescriptor *desc; frame = av_frame_alloc(); if (!frame) { return NULL; } desc = av_pix_fmt_desc_get(pool->format); if (!desc) { goto fail; } frame->width = pool->width; frame->height = pool->height; frame->format = pool->format; for (i = 0; i < 4; i++) { frame->linesize[i] = pool->linesize[i]; if (!pool->pools[i]) break; frame->buf[i] = av_buffer_pool_get(pool->pools[i]); if (!frame->buf[i]) { goto fail; } frame->data[i] = frame->buf[i]->data; } if (desc->flags & AV_PIX_FMT_FLAG_PAL || desc->flags & AV_PIX_FMT_FLAG_PSEUDOPAL) { enum AVPixelFormat format = pool->format == AV_PIX_FMT_PAL8 ? AV_PIX_FMT_BGR8 : pool->format; av_assert0(frame->data[1] != NULL); if (avpriv_set_systematic_pal2((uint32_t *)frame->data[1], format) < 0) { goto fail; } } frame->extended_data = frame->data; return frame; fail: av_frame_free(&frame); return NULL; }
void ff_mp_image_alloc_planes(mp_image_t *mpi) { uint32_t temp[256]; if (avpriv_set_systematic_pal2(temp, ff_mp2ff_pix_fmt(mpi->imgfmt)) >= 0) mpi->flags |= MP_IMGFLAG_RGB_PALETTE; // IF09 - allocate space for 4. plane delta info - unused if (mpi->imgfmt == IMGFMT_IF09) { mpi->planes[0]=av_malloc(mpi->bpp*mpi->width*(mpi->height+2)/8+ mpi->chroma_width*mpi->chroma_height); } else mpi->planes[0]=av_malloc(mpi->bpp*mpi->width*(mpi->height+2)/8); if (mpi->flags&MP_IMGFLAG_PLANAR) { int bpp = IMGFMT_IS_YUVP16(mpi->imgfmt)? 2 : 1; // YV12/I420/YVU9/IF09. feel free to add other planar formats here... mpi->stride[0]=mpi->stride[3]=bpp*mpi->width; if(mpi->num_planes > 2){ mpi->stride[1]=mpi->stride[2]=bpp*mpi->chroma_width; if(mpi->flags&MP_IMGFLAG_SWAPPED){ // I420/IYUV (Y,U,V) mpi->planes[1]=mpi->planes[0]+mpi->stride[0]*mpi->height; mpi->planes[2]=mpi->planes[1]+mpi->stride[1]*mpi->chroma_height; if (mpi->num_planes > 3) mpi->planes[3]=mpi->planes[2]+mpi->stride[2]*mpi->chroma_height; } else { // YV12,YVU9,IF09 (Y,V,U) mpi->planes[2]=mpi->planes[0]+mpi->stride[0]*mpi->height; mpi->planes[1]=mpi->planes[2]+mpi->stride[1]*mpi->chroma_height; if (mpi->num_planes > 3) mpi->planes[3]=mpi->planes[1]+mpi->stride[1]*mpi->chroma_height; } } else { // NV12/NV21 mpi->stride[1]=mpi->chroma_width; mpi->planes[1]=mpi->planes[0]+mpi->stride[0]*mpi->height; } } else { mpi->stride[0]=mpi->width*mpi->bpp/8; if (mpi->flags & MP_IMGFLAG_RGB_PALETTE) { mpi->planes[1] = av_malloc(1024); memcpy(mpi->planes[1], temp, 1024); } } mpi->flags|=MP_IMGFLAG_ALLOCATED; }
static int video_get_buffer(AVCodecContext *s, AVFrame *pic) { FramePool *pool = s->internal->pool; int i; if (pic->data[0]) { av_log(s, AV_LOG_ERROR, "pic->data[0]!=NULL in avcodec_default_get_buffer\n"); return -1; } memset(pic->data, 0, sizeof(pic->data)); pic->extended_data = pic->data; for (i = 0; i < 4 && pool->pools[i]; i++) { pic->linesize[i] = pool->linesize[i]; pic->buf[i] = av_buffer_pool_get(pool->pools[i]); if (!pic->buf[i]) goto fail; pic->data[i] = pic->buf[i]->data; } for (; i < AV_NUM_DATA_POINTERS; i++) { pic->data[i] = NULL; pic->linesize[i] = 0; } if (pic->data[1] && !pic->data[2]) avpriv_set_systematic_pal2((uint32_t *)pic->data[1], s->pix_fmt); if (s->debug & FF_DEBUG_BUFFERS) av_log(s, AV_LOG_DEBUG, "default_get_buffer called on pic %p\n", pic); return 0; fail: av_frame_unref(pic); return AVERROR(ENOMEM); }
static int bmp_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet) { const AVFrame * const p = pict; int n_bytes_image, n_bytes_per_row, n_bytes, i, n, hsize, ret; const uint32_t *pal = NULL; uint32_t palette256[256]; int pad_bytes_per_row, pal_entries = 0, compression = BMP_RGB; int bit_count = avctx->bits_per_coded_sample; uint8_t *ptr, *buf; avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I; avctx->coded_frame->key_frame = 1; switch (avctx->pix_fmt) { case AV_PIX_FMT_RGB444: compression = BMP_BITFIELDS; pal = rgb444_masks; // abuse pal to hold color masks pal_entries = 3; break; case AV_PIX_FMT_RGB565: compression = BMP_BITFIELDS; pal = rgb565_masks; // abuse pal to hold color masks pal_entries = 3; break; case AV_PIX_FMT_RGB8: case AV_PIX_FMT_BGR8: case AV_PIX_FMT_RGB4_BYTE: case AV_PIX_FMT_BGR4_BYTE: case AV_PIX_FMT_GRAY8: av_assert1(bit_count == 8); avpriv_set_systematic_pal2(palette256, avctx->pix_fmt); pal = palette256; break; case AV_PIX_FMT_PAL8: pal = (uint32_t *)p->data[1]; break; case AV_PIX_FMT_MONOBLACK: pal = monoblack_pal; break; } if (pal && !pal_entries) pal_entries = 1 << bit_count; n_bytes_per_row = ((int64_t)avctx->width * (int64_t)bit_count + 7LL) >> 3LL; pad_bytes_per_row = (4 - n_bytes_per_row) & 3; n_bytes_image = avctx->height * (n_bytes_per_row + pad_bytes_per_row); // STRUCTURE.field refer to the MSVC documentation for BITMAPFILEHEADER // and related pages. #define SIZE_BITMAPFILEHEADER 14 #define SIZE_BITMAPINFOHEADER 40 hsize = SIZE_BITMAPFILEHEADER + SIZE_BITMAPINFOHEADER + (pal_entries << 2); n_bytes = n_bytes_image + hsize; if ((ret = ff_alloc_packet2(avctx, pkt, n_bytes)) < 0) return ret; buf = pkt->data; bytestream_put_byte(&buf, 'B'); // BITMAPFILEHEADER.bfType bytestream_put_byte(&buf, 'M'); // do. bytestream_put_le32(&buf, n_bytes); // BITMAPFILEHEADER.bfSize bytestream_put_le16(&buf, 0); // BITMAPFILEHEADER.bfReserved1 bytestream_put_le16(&buf, 0); // BITMAPFILEHEADER.bfReserved2 bytestream_put_le32(&buf, hsize); // BITMAPFILEHEADER.bfOffBits bytestream_put_le32(&buf, SIZE_BITMAPINFOHEADER); // BITMAPINFOHEADER.biSize bytestream_put_le32(&buf, avctx->width); // BITMAPINFOHEADER.biWidth bytestream_put_le32(&buf, avctx->height); // BITMAPINFOHEADER.biHeight bytestream_put_le16(&buf, 1); // BITMAPINFOHEADER.biPlanes bytestream_put_le16(&buf, bit_count); // BITMAPINFOHEADER.biBitCount bytestream_put_le32(&buf, compression); // BITMAPINFOHEADER.biCompression bytestream_put_le32(&buf, n_bytes_image); // BITMAPINFOHEADER.biSizeImage bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biXPelsPerMeter bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biYPelsPerMeter bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biClrUsed bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biClrImportant for (i = 0; i < pal_entries; i++) bytestream_put_le32(&buf, pal[i] & 0xFFFFFF); // BMP files are bottom-to-top so we start from the end... ptr = p->data[0] + (avctx->height - 1) * p->linesize[0]; buf = pkt->data + hsize; for(i = 0; i < avctx->height; i++) { if (bit_count == 16) { const uint16_t *src = (const uint16_t *) ptr; uint16_t *dst = (uint16_t *) buf; for(n = 0; n < avctx->width; n++) AV_WL16(dst + n, src[n]); } else { memcpy(buf, ptr, n_bytes_per_row); } buf += n_bytes_per_row; memset(buf, 0, pad_bytes_per_row); buf += pad_bytes_per_row; ptr -= p->linesize[0]; // ... and go back } pkt->flags |= AV_PKT_FLAG_KEY; *got_packet = 1; return 0; }
static int pcx_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame, int *got_packet) { const uint8_t *buf_end; uint8_t *buf; int bpp, nplanes, i, y, line_bytes, written, ret, max_pkt_size, sw, sh; const uint32_t *pal = NULL; uint32_t palette256[256]; const uint8_t *src; if (avctx->width > 65535 || avctx->height > 65535) { av_log(avctx, AV_LOG_ERROR, "image dimensions do not fit in 16 bits\n"); return -1; } switch (avctx->pix_fmt) { case AV_PIX_FMT_RGB24: bpp = 8; nplanes = 3; break; case AV_PIX_FMT_RGB8: case AV_PIX_FMT_BGR8: case AV_PIX_FMT_RGB4_BYTE: case AV_PIX_FMT_BGR4_BYTE: case AV_PIX_FMT_GRAY8: bpp = 8; nplanes = 1; avpriv_set_systematic_pal2(palette256, avctx->pix_fmt); pal = palette256; break; case AV_PIX_FMT_PAL8: bpp = 8; nplanes = 1; pal = (uint32_t *)frame->data[1]; break; case AV_PIX_FMT_MONOBLACK: bpp = 1; nplanes = 1; pal = monoblack_pal; break; default: av_log(avctx, AV_LOG_ERROR, "unsupported pixfmt\n"); return -1; } line_bytes = (avctx->width * bpp + 7) >> 3; line_bytes = (line_bytes + 1) & ~1; max_pkt_size = 128 + avctx->height * 2 * line_bytes * nplanes + (pal ? 256*3 + 1 : 0); if ((ret = ff_alloc_packet2(avctx, pkt, max_pkt_size)) < 0) return ret; buf = pkt->data; buf_end = pkt->data + pkt->size; sw = avctx->sample_aspect_ratio.num; sh = avctx->sample_aspect_ratio.den; if (sw > 0xFFFFu || sh > 0xFFFFu) av_reduce(&sw, &sh, sw, sh, 0xFFFFu); bytestream_put_byte(&buf, 10); // manufacturer bytestream_put_byte(&buf, 5); // version bytestream_put_byte(&buf, 1); // encoding bytestream_put_byte(&buf, bpp); // bits per pixel per plane bytestream_put_le16(&buf, 0); // x min bytestream_put_le16(&buf, 0); // y min bytestream_put_le16(&buf, avctx->width - 1); // x max bytestream_put_le16(&buf, avctx->height - 1); // y max bytestream_put_le16(&buf, sw); // horizontal DPI bytestream_put_le16(&buf, sh); // vertical DPI for (i = 0; i < 16; i++) bytestream_put_be24(&buf, pal ? pal[i] : 0);// palette (<= 16 color only) bytestream_put_byte(&buf, 0); // reserved bytestream_put_byte(&buf, nplanes); // number of planes bytestream_put_le16(&buf, line_bytes); // scanline plane size in bytes while (buf - pkt->data < 128) *buf++= 0; src = frame->data[0]; for (y = 0; y < avctx->height; y++) { if ((written = pcx_rle_encode(buf, buf_end - buf, src, line_bytes, nplanes)) < 0) { av_log(avctx, AV_LOG_ERROR, "buffer too small\n"); return -1; } buf += written; src += frame->linesize[0]; } if (nplanes == 1 && bpp == 8) { if (buf_end - buf < 257) { av_log(avctx, AV_LOG_ERROR, "buffer too small\n"); return -1; } bytestream_put_byte(&buf, 12); for (i = 0; i < 256; i++) { bytestream_put_be24(&buf, pal[i]); } } pkt->size = buf - pkt->data; pkt->flags |= AV_PKT_FLAG_KEY; *got_packet = 1; return 0; }