int ff_rtmp_packet_write(URLContext *h, RTMPPacket *pkt, int chunk_size, RTMPPacket *prev_pkt) { uint8_t pkt_hdr[16], *p = pkt_hdr; int mode = RTMP_PS_TWELVEBYTES; int off = 0; //TODO: header compression bytestream_put_byte(&p, pkt->channel_id | (mode << 6)); if (mode != RTMP_PS_ONEBYTE) { bytestream_put_be24(&p, pkt->timestamp); if (mode != RTMP_PS_FOURBYTES) { bytestream_put_be24(&p, pkt->data_size); bytestream_put_byte(&p, pkt->type); if (mode == RTMP_PS_TWELVEBYTES) bytestream_put_le32(&p, pkt->extra); } } url_write(h, pkt_hdr, p-pkt_hdr); while (off < pkt->data_size) { int towrite = FFMIN(chunk_size, pkt->data_size - off); url_write(h, pkt->data + off, towrite); off += towrite; if (off < pkt->data_size) { uint8_t marker = 0xC0 | pkt->channel_id; url_write(h, &marker, 1); } } return 0; }
static int ogg_build_flac_headers(const uint8_t *extradata, int extradata_size, OGGStreamContext *oggstream, int bitexact) { const char *vendor = bitexact ? "ffmpeg" : LIBAVFORMAT_IDENT; uint8_t *p; if (extradata_size != 34) return -1; oggstream->header_len[0] = 51; oggstream->header[0] = av_mallocz(51); // per ogg flac specs p = oggstream->header[0]; bytestream_put_byte(&p, 0x7F); bytestream_put_buffer(&p, "FLAC", 4); bytestream_put_byte(&p, 1); // major version bytestream_put_byte(&p, 0); // minor version bytestream_put_be16(&p, 1); // headers packets without this one bytestream_put_buffer(&p, "fLaC", 4); bytestream_put_byte(&p, 0x00); // streaminfo bytestream_put_be24(&p, 34); bytestream_put_buffer(&p, extradata, 34); oggstream->header_len[1] = 1+3+4+strlen(vendor)+4; oggstream->header[1] = av_mallocz(oggstream->header_len[1]); p = oggstream->header[1]; bytestream_put_byte(&p, 0x84); // last metadata block and vorbis comment bytestream_put_be24(&p, oggstream->header_len[1] - 4); bytestream_put_le32(&p, strlen(vendor)); bytestream_put_buffer(&p, vendor, strlen(vendor)); bytestream_put_le32(&p, 0); // user comment list length return 0; }
static int flac_write_block_comment(ByteIOContext *pb, AVMetadata **m, int last_block, int bitexact) { const char *vendor = bitexact ? "ffmpeg" : LIBAVFORMAT_IDENT; unsigned int len, count; uint8_t *p, *p0; ff_metadata_conv(m, ff_vorbiscomment_metadata_conv, NULL); len = ff_vorbiscomment_length(*m, vendor, &count); p0 = av_malloc(len+4); if (!p0) return AVERROR(ENOMEM); p = p0; bytestream_put_byte(&p, last_block ? 0x84 : 0x04); bytestream_put_be24(&p, len); ff_vorbiscomment_write(&p, m, vendor, count); put_buffer(pb, p0, len+4); av_freep(&p0); p = NULL; return 0; }
void ff_amf_write_object_end(uint8_t **dst) { /* first two bytes are field name length = 0, * AMF object should end with it and end marker */ bytestream_put_be24(dst, AMF_DATA_TYPE_OBJECT_END); }
static int flac_write_block_comment(AVIOContext *pb, AVDictionary **m, int last_block, int bitexact) { const char *vendor = bitexact ? "ffmpeg" : LIBAVFORMAT_IDENT; int64_t len; uint8_t *p, *p0; ff_metadata_conv(m, ff_vorbiscomment_metadata_conv, NULL); len = ff_vorbiscomment_length(*m, vendor); if (len >= ((1<<24) - 4)) return AVERROR(EINVAL); p0 = av_malloc(len+4); if (!p0) return AVERROR(ENOMEM); p = p0; bytestream_put_byte(&p, last_block ? 0x84 : 0x04); bytestream_put_be24(&p, len); ff_vorbiscomment_write(&p, m, vendor); avio_write(pb, p0, len+4); av_freep(&p0); p = NULL; return 0; }
static int ogg_build_flac_headers(AVCodecContext *avctx, OGGStreamContext *oggstream, int bitexact) { enum FLACExtradataFormat format; uint8_t *streaminfo; uint8_t *p; if (!ff_flac_is_extradata_valid(avctx, &format, &streaminfo)) return -1; // first packet: STREAMINFO oggstream->header_len[0] = 51; oggstream->header[0] = av_mallocz(51); // per ogg flac specs p = oggstream->header[0]; if (!p) return AVERROR_NOMEM; bytestream_put_byte(&p, 0x7F); bytestream_put_buffer(&p, "FLAC", 4); bytestream_put_byte(&p, 1); // major version bytestream_put_byte(&p, 0); // minor version bytestream_put_be16(&p, 1); // headers packets without this one bytestream_put_buffer(&p, "fLaC", 4); bytestream_put_byte(&p, 0x00); // streaminfo bytestream_put_be24(&p, 34); bytestream_put_buffer(&p, streaminfo, FLAC_STREAMINFO_SIZE); // second packet: VorbisComment p = ogg_write_vorbiscomment(4, bitexact, &oggstream->header_len[1]); if (!p) return AVERROR_NOMEM; oggstream->header[1] = p; bytestream_put_byte(&p, 0x84); // last metadata block and vorbis comment bytestream_put_be24(&p, oggstream->header_len[1] - 4); return 0; }
/* GIF header */ static int gif_image_write_header(AVCodecContext *avctx, uint8_t **bytestream, uint32_t *palette) { int i; unsigned int v; bytestream_put_buffer(bytestream, "GIF", 3); bytestream_put_buffer(bytestream, "89a", 3); bytestream_put_le16(bytestream, avctx->width); bytestream_put_le16(bytestream, avctx->height); bytestream_put_byte(bytestream, 0xf7); /* flags: global clut, 256 entries */ bytestream_put_byte(bytestream, 0x1f); /* background color index */ bytestream_put_byte(bytestream, 0); /* aspect ratio */ /* the global palette */ for(i=0;i<256;i++) { v = palette[i]; bytestream_put_be24(bytestream, v); } return 0; }
static int imx_dump_header(AVBitStreamFilterContext *bsfc, AVCodecContext *avctx, const char *args, uint8_t **poutbuf, int *poutbuf_size, const uint8_t *buf, int buf_size, int keyframe) { /* MXF essence element key */ static const uint8_t imx_header[16] = { 0x06,0x0e,0x2b,0x34,0x01,0x02,0x01,0x01,0x0d,0x01,0x03,0x01,0x05,0x01,0x01,0x00 }; uint8_t *poutbufp; if (avctx->codec_id != AV_CODEC_ID_MPEG2VIDEO) { av_log(avctx, AV_LOG_ERROR, "imx bitstream filter only applies to mpeg2video codec\n"); return 0; } *poutbuf = av_malloc(buf_size + 20 + FF_INPUT_BUFFER_PADDING_SIZE); if (!*poutbuf) return AVERROR(ENOMEM); poutbufp = *poutbuf; bytestream_put_buffer(&poutbufp, imx_header, 16); bytestream_put_byte(&poutbufp, 0x83); /* KLV BER long form */ bytestream_put_be24(&poutbufp, buf_size); bytestream_put_buffer(&poutbufp, buf, buf_size); *poutbuf_size = poutbufp - *poutbuf; return 1; }
void ff_rtp_send_jpeg(AVFormatContext *s1, const uint8_t *buf, int size) { RTPMuxContext *s = s1->priv_data; const uint8_t *qtables[4] = { NULL }; int nb_qtables = 0; uint8_t type; uint8_t w, h; uint8_t *p; int off = 0; /* fragment offset of the current JPEG frame */ int len; int i; int default_huffman_tables = 0; s->buf_ptr = s->buf; s->timestamp = s->cur_timestamp; /* convert video pixel dimensions from pixels to blocks */ w = AV_CEIL_RSHIFT(s1->streams[0]->codecpar->width, 3); h = AV_CEIL_RSHIFT(s1->streams[0]->codecpar->height, 3); /* get the pixel format type or fail */ if (s1->streams[0]->codecpar->format == AV_PIX_FMT_YUVJ422P || (s1->streams[0]->codecpar->color_range == AVCOL_RANGE_JPEG && s1->streams[0]->codecpar->format == AV_PIX_FMT_YUV422P)) { type = 0; } else if (s1->streams[0]->codecpar->format == AV_PIX_FMT_YUVJ420P || (s1->streams[0]->codecpar->color_range == AVCOL_RANGE_JPEG && s1->streams[0]->codecpar->format == AV_PIX_FMT_YUV420P)) { type = 1; } else { av_log(s1, AV_LOG_ERROR, "Unsupported pixel format\n"); return; } /* preparse the header for getting some infos */ for (i = 0; i < size; i++) { if (buf[i] != 0xff) continue; if (buf[i + 1] == DQT) { int tables, j; if (buf[i + 4] & 0xF0) av_log(s1, AV_LOG_WARNING, "Only 8-bit precision is supported.\n"); /* a quantization table is 64 bytes long */ tables = AV_RB16(&buf[i + 2]) / 65; if (i + 5 + tables * 65 > size) { av_log(s1, AV_LOG_ERROR, "Too short JPEG header. Aborted!\n"); return; } if (nb_qtables + tables > 4) { av_log(s1, AV_LOG_ERROR, "Invalid number of quantisation tables\n"); return; } for (j = 0; j < tables; j++) qtables[nb_qtables + j] = buf + i + 5 + j * 65; nb_qtables += tables; } else if (buf[i + 1] == SOF0) { if (buf[i + 14] != 17 || buf[i + 17] != 17) { av_log(s1, AV_LOG_ERROR, "Only 1x1 chroma blocks are supported. Aborted!\n"); return; } } else if (buf[i + 1] == DHT) { int dht_size = AV_RB16(&buf[i + 2]); default_huffman_tables |= 1 << 4; i += 3; dht_size -= 2; if (i + dht_size >= size) continue; while (dht_size > 0) switch (buf[i + 1]) { case 0x00: if ( dht_size >= 29 && !memcmp(buf + i + 2, avpriv_mjpeg_bits_dc_luminance + 1, 16) && !memcmp(buf + i + 18, avpriv_mjpeg_val_dc, 12)) { default_huffman_tables |= 1; i += 29; dht_size -= 29; } else { i += dht_size; dht_size = 0; } break; case 0x01: if ( dht_size >= 29 && !memcmp(buf + i + 2, avpriv_mjpeg_bits_dc_chrominance + 1, 16) && !memcmp(buf + i + 18, avpriv_mjpeg_val_dc, 12)) { default_huffman_tables |= 1 << 1; i += 29; dht_size -= 29; } else { i += dht_size; dht_size = 0; } break; case 0x10: if ( dht_size >= 179 && !memcmp(buf + i + 2, avpriv_mjpeg_bits_ac_luminance + 1, 16) && !memcmp(buf + i + 18, avpriv_mjpeg_val_ac_luminance, 162)) { default_huffman_tables |= 1 << 2; i += 179; dht_size -= 179; } else { i += dht_size; dht_size = 0; } break; case 0x11: if ( dht_size >= 179 && !memcmp(buf + i + 2, avpriv_mjpeg_bits_ac_chrominance + 1, 16) && !memcmp(buf + i + 18, avpriv_mjpeg_val_ac_chrominance, 162)) { default_huffman_tables |= 1 << 3; i += 179; dht_size -= 179; } else { i += dht_size; dht_size = 0; } break; default: i += dht_size; dht_size = 0; continue; } } else if (buf[i + 1] == SOS) { /* SOS is last marker in the header */ i += AV_RB16(&buf[i + 2]) + 2; if (i > size) { av_log(s1, AV_LOG_ERROR, "Insufficient data. Aborted!\n"); return; } break; } } if (default_huffman_tables && default_huffman_tables != 31) { av_log(s1, AV_LOG_ERROR, "RFC 2435 requires standard Huffman tables for jpeg\n"); return; } if (nb_qtables && nb_qtables != 2) av_log(s1, AV_LOG_WARNING, "RFC 2435 suggests two quantization tables, %d provided\n", nb_qtables); /* skip JPEG header */ buf += i; size -= i; for (i = size - 2; i >= 0; i--) { if (buf[i] == 0xff && buf[i + 1] == EOI) { /* Remove the EOI marker */ size = i; break; } } p = s->buf_ptr; while (size > 0) { int hdr_size = 8; if (off == 0 && nb_qtables) hdr_size += 4 + 64 * nb_qtables; /* payload max in one packet */ len = FFMIN(size, s->max_payload_size - hdr_size); /* set main header */ bytestream_put_byte(&p, 0); bytestream_put_be24(&p, off); bytestream_put_byte(&p, type); bytestream_put_byte(&p, 255); bytestream_put_byte(&p, w); bytestream_put_byte(&p, h); if (off == 0 && nb_qtables) { /* set quantization tables header */ bytestream_put_byte(&p, 0); bytestream_put_byte(&p, 0); bytestream_put_be16(&p, 64 * nb_qtables); for (i = 0; i < nb_qtables; i++) bytestream_put_buffer(&p, qtables[i], 64); } /* copy payload data */ memcpy(p, buf, len); /* marker bit is last packet in frame */ ff_rtp_send_data(s1, s->buf, len + hdr_size, size == len); buf += len; size -= len; off += len; p = s->buf; } }
int ff_rtmp_packet_write(URLContext *h, RTMPPacket *pkt, int chunk_size, RTMPPacket *prev_pkt) { uint8_t pkt_hdr[16], *p = pkt_hdr; int mode = RTMP_PS_TWELVEBYTES; int off = 0; int size = 0; pkt->ts_delta = pkt->timestamp - prev_pkt[pkt->channel_id].timestamp; //if channel_id = 0, this is first presentation of prev_pkt, send full hdr. if (prev_pkt[pkt->channel_id].channel_id && pkt->extra == prev_pkt[pkt->channel_id].extra) { if (pkt->type == prev_pkt[pkt->channel_id].type && pkt->data_size == prev_pkt[pkt->channel_id].data_size) { mode = RTMP_PS_FOURBYTES; if (pkt->ts_delta == prev_pkt[pkt->channel_id].ts_delta) mode = RTMP_PS_ONEBYTE; } else { mode = RTMP_PS_EIGHTBYTES; } } if (pkt->channel_id < 64) { bytestream_put_byte(&p, pkt->channel_id | (mode << 6)); } else if (pkt->channel_id < 64 + 256) { bytestream_put_byte(&p, 0 | (mode << 6)); bytestream_put_byte(&p, pkt->channel_id - 64); } else { bytestream_put_byte(&p, 1 | (mode << 6)); bytestream_put_le16(&p, pkt->channel_id - 64); } if (mode != RTMP_PS_ONEBYTE) { uint32_t timestamp = pkt->timestamp; if (mode != RTMP_PS_TWELVEBYTES) timestamp = pkt->ts_delta; bytestream_put_be24(&p, timestamp >= 0xFFFFFF ? 0xFFFFFF : timestamp); if (mode != RTMP_PS_FOURBYTES) { bytestream_put_be24(&p, pkt->data_size); bytestream_put_byte(&p, pkt->type); if (mode == RTMP_PS_TWELVEBYTES) bytestream_put_le32(&p, pkt->extra); } if (timestamp >= 0xFFFFFF) bytestream_put_be32(&p, timestamp); } // save history prev_pkt[pkt->channel_id].channel_id = pkt->channel_id; prev_pkt[pkt->channel_id].type = pkt->type; prev_pkt[pkt->channel_id].data_size = pkt->data_size; prev_pkt[pkt->channel_id].timestamp = pkt->timestamp; if (mode != RTMP_PS_TWELVEBYTES) { prev_pkt[pkt->channel_id].ts_delta = pkt->ts_delta; } else { prev_pkt[pkt->channel_id].ts_delta = pkt->timestamp; } prev_pkt[pkt->channel_id].extra = pkt->extra; ffurl_write(h, pkt_hdr, p-pkt_hdr); size = p - pkt_hdr + pkt->data_size; while (off < pkt->data_size) { int towrite = FFMIN(chunk_size, pkt->data_size - off); ffurl_write(h, pkt->data + off, towrite); off += towrite; if (off < pkt->data_size) { uint8_t marker = 0xC0 | pkt->channel_id; ffurl_write(h, &marker, 1); size++; } } return size; }
static int pcx_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame, int *got_packet) { PCXContext *s = avctx->priv_data; AVFrame *const pict = &s->picture; const uint8_t *buf_end; uint8_t *buf; int bpp, nplanes, i, y, line_bytes, written, ret, max_pkt_size; const uint32_t *pal = NULL; uint32_t palette256[256]; const uint8_t *src; *pict = *frame; pict->pict_type = AV_PICTURE_TYPE_I; pict->key_frame = 1; if (avctx->width > 65535 || avctx->height > 65535) { av_log(avctx, AV_LOG_ERROR, "image dimensions do not fit in 16 bits\n"); return -1; } switch (avctx->pix_fmt) { case PIX_FMT_RGB24: bpp = 8; nplanes = 3; break; case PIX_FMT_RGB8: case PIX_FMT_BGR8: case PIX_FMT_RGB4_BYTE: case PIX_FMT_BGR4_BYTE: case PIX_FMT_GRAY8: bpp = 8; nplanes = 1; ff_set_systematic_pal2(palette256, avctx->pix_fmt); pal = palette256; break; case PIX_FMT_PAL8: bpp = 8; nplanes = 1; pal = (uint32_t *)pict->data[1]; break; case PIX_FMT_MONOBLACK: bpp = 1; nplanes = 1; pal = monoblack_pal; break; default: av_log(avctx, AV_LOG_ERROR, "unsupported pixfmt\n"); return -1; } line_bytes = (avctx->width * bpp + 7) >> 3; line_bytes = (line_bytes + 1) & ~1; max_pkt_size = 128 + avctx->height * 2 * line_bytes * nplanes + (pal ? 256*3 + 1 : 0); if ((ret = ff_alloc_packet2(avctx, pkt, max_pkt_size)) < 0) return ret; buf = pkt->data; buf_end = pkt->data + pkt->size; bytestream_put_byte(&buf, 10); // manufacturer bytestream_put_byte(&buf, 5); // version bytestream_put_byte(&buf, 1); // encoding bytestream_put_byte(&buf, bpp); // bits per pixel per plane bytestream_put_le16(&buf, 0); // x min bytestream_put_le16(&buf, 0); // y min bytestream_put_le16(&buf, avctx->width - 1); // x max bytestream_put_le16(&buf, avctx->height - 1); // y max bytestream_put_le16(&buf, 0); // horizontal DPI bytestream_put_le16(&buf, 0); // vertical DPI for (i = 0; i < 16; i++) bytestream_put_be24(&buf, pal ? pal[i] : 0);// palette (<= 16 color only) bytestream_put_byte(&buf, 0); // reserved bytestream_put_byte(&buf, nplanes); // number of planes bytestream_put_le16(&buf, line_bytes); // scanline plane size in bytes while (buf - pkt->data < 128) *buf++= 0; src = pict->data[0]; for (y = 0; y < avctx->height; y++) { if ((written = pcx_rle_encode(buf, buf_end - buf, src, line_bytes, nplanes)) < 0) { av_log(avctx, AV_LOG_ERROR, "buffer too small\n"); return -1; } buf += written; src += pict->linesize[0]; } if (nplanes == 1 && bpp == 8) { if (buf_end - buf < 257) { av_log(avctx, AV_LOG_ERROR, "buffer too small\n"); return -1; } bytestream_put_byte(&buf, 12); for (i = 0; i < 256; i++) { bytestream_put_be24(&buf, pal[i]); } } pkt->size = buf - pkt->data; pkt->flags |= AV_PKT_FLAG_KEY; *got_packet = 1; return 0; }
void ff_rtp_send_jpeg(AVFormatContext *s1, const uint8_t *buf, int size) { RTPMuxContext *s = s1->priv_data; const uint8_t *qtables = NULL; int nb_qtables = 0; uint8_t type; uint8_t w, h; uint8_t *p; int off = 0; /* fragment offset of the current JPEG frame */ int len; int i; s->buf_ptr = s->buf; s->timestamp = s->cur_timestamp; /* convert video pixel dimensions from pixels to blocks */ w = (s1->streams[0]->codec->width + 7) >> 3; h = (s1->streams[0]->codec->height + 7) >> 3; /* get the pixel format type or fail */ if (s1->streams[0]->codec->pix_fmt == AV_PIX_FMT_YUVJ422P || (s1->streams[0]->codec->color_range == AVCOL_RANGE_JPEG && s1->streams[0]->codec->pix_fmt == AV_PIX_FMT_YUV422P)) { type = 0; } else if (s1->streams[0]->codec->pix_fmt == AV_PIX_FMT_YUVJ420P || (s1->streams[0]->codec->color_range == AVCOL_RANGE_JPEG && s1->streams[0]->codec->pix_fmt == AV_PIX_FMT_YUV420P)) { type = 1; } else { av_log(s1, AV_LOG_ERROR, "Unsupported pixel format\n"); return; } /* preparse the header for getting some infos */ for (i = 0; i < size; i++) { if (buf[i] != 0xff) continue; if (buf[i + 1] == DQT) { if (buf[i + 4]) av_log(s1, AV_LOG_WARNING, "Only 8-bit precision is supported.\n"); /* a quantization table is 64 bytes long */ nb_qtables = AV_RB16(&buf[i + 2]) / 65; if (i + 4 + nb_qtables * 65 > size) { av_log(s1, AV_LOG_ERROR, "Too short JPEG header. Aborted!\n"); return; } qtables = &buf[i + 4]; } else if (buf[i + 1] == SOF0) { if (buf[i + 14] != 17 || buf[i + 17] != 17) { av_log(s1, AV_LOG_ERROR, "Only 1x1 chroma blocks are supported. Aborted!\n"); return; } } else if (buf[i + 1] == SOS) { /* SOS is last marker in the header */ i += AV_RB16(&buf[i + 2]) + 2; break; } } /* skip JPEG header */ buf += i; size -= i; for (i = size - 2; i >= 0; i--) { if (buf[i] == 0xff && buf[i + 1] == EOI) { /* Remove the EOI marker */ size = i; break; } } p = s->buf_ptr; while (size > 0) { int hdr_size = 8; if (off == 0 && nb_qtables) hdr_size += 4 + 64 * nb_qtables; /* payload max in one packet */ len = FFMIN(size, s->max_payload_size - hdr_size); /* set main header */ bytestream_put_byte(&p, 0); bytestream_put_be24(&p, off); bytestream_put_byte(&p, type); bytestream_put_byte(&p, 255); bytestream_put_byte(&p, w); bytestream_put_byte(&p, h); if (off == 0 && nb_qtables) { /* set quantization tables header */ bytestream_put_byte(&p, 0); bytestream_put_byte(&p, 0); bytestream_put_be16(&p, 64 * nb_qtables); for (i = 0; i < nb_qtables; i++) bytestream_put_buffer(&p, &qtables[65 * i + 1], 64); } /* copy payload data */ memcpy(p, buf, len); /* marker bit is last packet in frame */ ff_rtp_send_data(s1, s->buf, len + hdr_size, size == len); buf += len; size -= len; off += len; p = s->buf; } }
static int pam_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int buf_size, void *data) { PNMContext *s = avctx->priv_data; AVFrame *pict = data; AVFrame * const p = (AVFrame*)&s->picture; int i, h, w, n, linesize, depth, maxval; const char *tuple_type; uint8_t *ptr; if (buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200) { av_log(avctx, AV_LOG_ERROR, "encoded frame too large\n"); return -1; } *p = *pict; p->pict_type = AV_PICTURE_TYPE_I; p->key_frame = 1; s->bytestream_start = s->bytestream = outbuf; s->bytestream_end = outbuf+buf_size; h = avctx->height; w = avctx->width; switch (avctx->pix_fmt) { case PIX_FMT_MONOBLACK: n = (w + 7) >> 3; depth = 1; maxval = 1; tuple_type = "BLACKANDWHITE"; break; case PIX_FMT_GRAY8: n = w; depth = 1; maxval = 255; tuple_type = "GRAYSCALE"; break; case PIX_FMT_GRAY16BE: n = w * 2; depth = 1; maxval = 0xFFFF; tuple_type = "GRAYSCALE"; break; case PIX_FMT_GRAY8A: n = w * 2; depth = 2; maxval = 255; tuple_type = "GRAYSCALE_ALPHA"; break; case PIX_FMT_RGB24: n = w * 3; depth = 3; maxval = 255; tuple_type = "RGB"; break; case PIX_FMT_RGB32: n = w * 4; depth = 4; maxval = 255; tuple_type = "RGB_ALPHA"; break; case PIX_FMT_RGB48BE: n = w * 6; depth = 3; maxval = 0xFFFF; tuple_type = "RGB"; break; default: return -1; } snprintf(s->bytestream, s->bytestream_end - s->bytestream, "P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\nTUPLTYPE %s\nENDHDR\n", w, h, depth, maxval, tuple_type); s->bytestream += strlen(s->bytestream); ptr = p->data[0]; linesize = p->linesize[0]; if (avctx->pix_fmt == PIX_FMT_RGB32) { int j; unsigned int v; for (i = 0; i < h; i++) { for (j = 0; j < w; j++) { v = ((uint32_t *)ptr)[j]; bytestream_put_be24(&s->bytestream, v); *s->bytestream++ = v >> 24; } ptr += linesize; } } else if (avctx->pix_fmt == PIX_FMT_MONOBLACK){
static int pam_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet) { uint8_t *bytestream_start, *bytestream, *bytestream_end; const AVFrame * const p = pict; int i, h, w, n, linesize, depth, maxval, ret; const char *tuple_type; uint8_t *ptr; if ((ret = ff_alloc_packet(pkt, avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200)) < 0) { av_log(avctx, AV_LOG_ERROR, "encoded frame too large\n"); return ret; } bytestream_start = bytestream = pkt->data; bytestream_end = pkt->data + pkt->size; h = avctx->height; w = avctx->width; switch (avctx->pix_fmt) { case AV_PIX_FMT_MONOWHITE: n = (w + 7) >> 3; depth = 1; maxval = 1; tuple_type = "BLACKANDWHITE"; break; case AV_PIX_FMT_GRAY8: n = w; depth = 1; maxval = 255; tuple_type = "GRAYSCALE"; break; case AV_PIX_FMT_RGB24: n = w * 3; depth = 3; maxval = 255; tuple_type = "RGB"; break; case AV_PIX_FMT_RGB32: n = w * 4; depth = 4; maxval = 255; tuple_type = "RGB_ALPHA"; break; default: return -1; } snprintf(bytestream, bytestream_end - bytestream, "P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\nTUPLTYPE %s\nENDHDR\n", w, h, depth, maxval, tuple_type); bytestream += strlen(bytestream); ptr = p->data[0]; linesize = p->linesize[0]; if (avctx->pix_fmt == AV_PIX_FMT_RGB32) { int j; unsigned int v; for (i = 0; i < h; i++) { for (j = 0; j < w; j++) { v = ((uint32_t *)ptr)[j]; bytestream_put_be24(&bytestream, v); *bytestream++ = v >> 24; } ptr += linesize; } } else {
int ff_rtmp_packet_write(URLContext *h, RTMPPacket *pkt, int chunk_size, RTMPPacket **prev_pkt_ptr, int *nb_prev_pkt) { uint8_t pkt_hdr[16], *p = pkt_hdr; int mode = RTMP_PS_TWELVEBYTES; int off = 0; int written = 0; int ret; RTMPPacket *prev_pkt; int use_delta; // flag if using timestamp delta, not RTMP_PS_TWELVEBYTES uint32_t timestamp; // full 32-bit timestamp or delta value if ((ret = ff_rtmp_check_alloc_array(prev_pkt_ptr, nb_prev_pkt, pkt->channel_id)) < 0) return ret; prev_pkt = *prev_pkt_ptr; //if channel_id = 0, this is first presentation of prev_pkt, send full hdr. use_delta = prev_pkt[pkt->channel_id].channel_id && pkt->extra == prev_pkt[pkt->channel_id].extra && pkt->timestamp >= prev_pkt[pkt->channel_id].timestamp; timestamp = pkt->timestamp; if (use_delta) { timestamp -= prev_pkt[pkt->channel_id].timestamp; } if (timestamp >= 0xFFFFFF) { pkt->ts_field = 0xFFFFFF; } else { pkt->ts_field = timestamp; } if (use_delta) { if (pkt->type == prev_pkt[pkt->channel_id].type && pkt->size == prev_pkt[pkt->channel_id].size) { mode = RTMP_PS_FOURBYTES; if (pkt->ts_field == prev_pkt[pkt->channel_id].ts_field) mode = RTMP_PS_ONEBYTE; } else { mode = RTMP_PS_EIGHTBYTES; } } if (pkt->channel_id < 64) { bytestream_put_byte(&p, pkt->channel_id | (mode << 6)); } else if (pkt->channel_id < 64 + 256) { bytestream_put_byte(&p, 0 | (mode << 6)); bytestream_put_byte(&p, pkt->channel_id - 64); } else { bytestream_put_byte(&p, 1 | (mode << 6)); bytestream_put_le16(&p, pkt->channel_id - 64); } if (mode != RTMP_PS_ONEBYTE) { bytestream_put_be24(&p, pkt->ts_field); if (mode != RTMP_PS_FOURBYTES) { bytestream_put_be24(&p, pkt->size); bytestream_put_byte(&p, pkt->type); if (mode == RTMP_PS_TWELVEBYTES) bytestream_put_le32(&p, pkt->extra); } } if (pkt->ts_field == 0xFFFFFF) bytestream_put_be32(&p, timestamp); // save history prev_pkt[pkt->channel_id].channel_id = pkt->channel_id; prev_pkt[pkt->channel_id].type = pkt->type; prev_pkt[pkt->channel_id].size = pkt->size; prev_pkt[pkt->channel_id].timestamp = pkt->timestamp; prev_pkt[pkt->channel_id].ts_field = pkt->ts_field; prev_pkt[pkt->channel_id].extra = pkt->extra; if ((ret = ffurl_write(h, pkt_hdr, p - pkt_hdr)) < 0) return ret; written = p - pkt_hdr + pkt->size; while (off < pkt->size) { int towrite = FFMIN(chunk_size, pkt->size - off); if ((ret = ffurl_write(h, pkt->data + off, towrite)) < 0) return ret; off += towrite; if (off < pkt->size) { uint8_t marker = 0xC0 | pkt->channel_id; if ((ret = ffurl_write(h, &marker, 1)) < 0) return ret; written++; } } return written; }
/* GIF header */ static int gif_image_write_header(uint8_t **bytestream, int width, int height, int loop_count, uint32_t *palette) { int i; unsigned int v; bytestream_put_buffer(bytestream, "GIF", 3); bytestream_put_buffer(bytestream, "89a", 3); bytestream_put_le16(bytestream, width); bytestream_put_le16(bytestream, height); bytestream_put_byte(bytestream, 0xf7); /* flags: global clut, 256 entries */ bytestream_put_byte(bytestream, 0x1f); /* background color index */ bytestream_put_byte(bytestream, 0); /* aspect ratio */ /* the global palette */ if (!palette) { bytestream_put_buffer(bytestream, (const unsigned char *)gif_clut, 216*3); for(i=0;i<((256-216)*3);i++) bytestream_put_byte(bytestream, 0); } else { for(i=0;i<256;i++) { v = palette[i]; bytestream_put_be24(bytestream, v); } } /* update: this is the 'NETSCAPE EXTENSION' that allows for looped animated gif see http://members.aol.com/royalef/gifabout.htm#net-extension byte 1 : 33 (hex 0x21) GIF Extension code byte 2 : 255 (hex 0xFF) Application Extension Label byte 3 : 11 (hex (0x0B) Length of Application Block (eleven bytes of data to follow) bytes 4 to 11 : "NETSCAPE" bytes 12 to 14 : "2.0" byte 15 : 3 (hex 0x03) Length of Data Sub-Block (three bytes of data to follow) byte 16 : 1 (hex 0x01) bytes 17 to 18 : 0 to 65535, an unsigned integer in lo-hi byte format. This indicate the number of iterations the loop should be executed. bytes 19 : 0 (hex 0x00) a Data Sub-block Terminator */ /* application extension header */ #ifdef GIF_ADD_APP_HEADER if (loop_count >= 0 && loop_count <= 65535) { bytestream_put_byte(bytestream, 0x21); bytestream_put_byte(bytestream, 0xff); bytestream_put_byte(bytestream, 0x0b); bytestream_put_buffer(bytestream, "NETSCAPE2.0", 11); // bytes 4 to 14 bytestream_put_byte(bytestream, 0x03); // byte 15 bytestream_put_byte(bytestream, 0x01); // byte 16 bytestream_put_le16(bytestream, (uint16_t)loop_count); bytestream_put_byte(bytestream, 0x00); // byte 19 } #endif return 0; }
static int encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame, int *got_packet) { int width, height, bits_pixel, i, j, length, ret; uint8_t *in_buf, *buf; avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I; avctx->coded_frame->key_frame = 1; width = avctx->width; height = avctx->height; if (width > 65535 || height > 65535 || width * height >= INT_MAX / 4 - ALIAS_HEADER_SIZE) { av_log(avctx, AV_LOG_ERROR, "Invalid image size %dx%d.\n", width, height); return AVERROR_INVALIDDATA; } switch (avctx->pix_fmt) { case AV_PIX_FMT_GRAY8: bits_pixel = 8; break; case AV_PIX_FMT_BGR24: bits_pixel = 24; break; default: return AVERROR(EINVAL); } length = ALIAS_HEADER_SIZE + 4 * width * height; // max possible if ((ret = ff_alloc_packet(pkt, length)) < 0) { av_log(avctx, AV_LOG_ERROR, "Error getting output packet of size %d.\n", length); return ret; } buf = pkt->data; /* Encode header. */ bytestream_put_be16(&buf, width); bytestream_put_be16(&buf, height); bytestream_put_be32(&buf, 0); /* X, Y offset */ bytestream_put_be16(&buf, bits_pixel); for (j = 0; j < height; j++) { in_buf = frame->data[0] + frame->linesize[0] * j; for (i = 0; i < width; ) { int count = 0; int pixel; if (avctx->pix_fmt == AV_PIX_FMT_GRAY8) { pixel = *in_buf; while (count < 255 && count + i < width && pixel == *in_buf) { count++; in_buf++; } bytestream_put_byte(&buf, count); bytestream_put_byte(&buf, pixel); } else { /* AV_PIX_FMT_BGR24 */ pixel = AV_RB24(in_buf); while (count < 255 && count + i < width && pixel == AV_RB24(in_buf)) { count++; in_buf += 3; } bytestream_put_byte(&buf, count); bytestream_put_be24(&buf, pixel); } i += count; } } /* Total length */ av_shrink_packet(pkt, buf - pkt->data); pkt->flags |= AV_PKT_FLAG_KEY; *got_packet = 1; return 0; }
static int pcm_encode_frame(AVCodecContext *avctx, unsigned char *frame, int buf_size, void *data) { int n, sample_size, v; const short *samples; unsigned char *dst; const uint8_t *srcu8; const int16_t *samples_int16_t; const int32_t *samples_int32_t; const int64_t *samples_int64_t; const uint16_t *samples_uint16_t; const uint32_t *samples_uint32_t; sample_size = av_get_bits_per_sample(avctx->codec->id)/8; n = buf_size / sample_size; samples = data; dst = frame; if (avctx->sample_fmt!=avctx->codec->sample_fmts[0]) { av_log(avctx, AV_LOG_ERROR, "invalid sample_fmt\n"); return -1; } switch(avctx->codec->id) { case CODEC_ID_PCM_U32LE: ENCODE(uint32_t, le32, samples, dst, n, 0, 0x80000000) break; case CODEC_ID_PCM_U32BE: ENCODE(uint32_t, be32, samples, dst, n, 0, 0x80000000) break; case CODEC_ID_PCM_S24LE: ENCODE(int32_t, le24, samples, dst, n, 8, 0) break; case CODEC_ID_PCM_S24BE: ENCODE(int32_t, be24, samples, dst, n, 8, 0) break; case CODEC_ID_PCM_U24LE: ENCODE(uint32_t, le24, samples, dst, n, 8, 0x800000) break; case CODEC_ID_PCM_U24BE: ENCODE(uint32_t, be24, samples, dst, n, 8, 0x800000) break; case CODEC_ID_PCM_S24DAUD: for(;n>0;n--) { uint32_t tmp = av_reverse[(*samples >> 8) & 0xff] + (av_reverse[*samples & 0xff] << 8); tmp <<= 4; // sync flags would go here bytestream_put_be24(&dst, tmp); samples++; } break; case CODEC_ID_PCM_U16LE: ENCODE(uint16_t, le16, samples, dst, n, 0, 0x8000) break; case CODEC_ID_PCM_U16BE: ENCODE(uint16_t, be16, samples, dst, n, 0, 0x8000) break; case CODEC_ID_PCM_S8: srcu8= data; for(;n>0;n--) { v = *srcu8++; *dst++ = v - 128; } break; #if HAVE_BIGENDIAN case CODEC_ID_PCM_F64LE: ENCODE(int64_t, le64, samples, dst, n, 0, 0) break; case CODEC_ID_PCM_S32LE: case CODEC_ID_PCM_F32LE: ENCODE(int32_t, le32, samples, dst, n, 0, 0) break; case CODEC_ID_PCM_S16LE: ENCODE(int16_t, le16, samples, dst, n, 0, 0) break; case CODEC_ID_PCM_F64BE: case CODEC_ID_PCM_F32BE: case CODEC_ID_PCM_S32BE: case CODEC_ID_PCM_S16BE: #else case CODEC_ID_PCM_F64BE: ENCODE(int64_t, be64, samples, dst, n, 0, 0) break; case CODEC_ID_PCM_F32BE: case CODEC_ID_PCM_S32BE: ENCODE(int32_t, be32, samples, dst, n, 0, 0) break; case CODEC_ID_PCM_S16BE: ENCODE(int16_t, be16, samples, dst, n, 0, 0) break; case CODEC_ID_PCM_F64LE: case CODEC_ID_PCM_F32LE: case CODEC_ID_PCM_S32LE: case CODEC_ID_PCM_S16LE: #endif /* HAVE_BIGENDIAN */ case CODEC_ID_PCM_U8: memcpy(dst, samples, n*sample_size); dst += n*sample_size; break; case CODEC_ID_PCM_ZORK: for(;n>0;n--) { v= *samples++ >> 8; if(v<0) v = -v; else v+= 128; *dst++ = v; } break; case CODEC_ID_PCM_ALAW: for(;n>0;n--) { v = *samples++; *dst++ = linear_to_alaw[(v + 32768) >> 2]; } break; case CODEC_ID_PCM_MULAW: for(;n>0;n--) { v = *samples++; *dst++ = linear_to_ulaw[(v + 32768) >> 2]; } break; default: return -1; } //avctx->frame_size = (dst - frame) / (sample_size * avctx->channels); return dst - frame; }