static void v210_convert(void *frame_bytes, picture_t *pic, int dst_stride) { int width = pic->format.i_width; int height = pic->format.i_height; int line_padding = dst_stride - ((width * 8 + 11) / 12) * 4; int h, w; uint8_t *data = (uint8_t*)frame_bytes; const uint16_t *y = (const uint16_t*)pic->p[0].p_pixels; const uint16_t *u = (const uint16_t*)pic->p[1].p_pixels; const uint16_t *v = (const uint16_t*)pic->p[2].p_pixels; #define WRITE_PIXELS(a, b, c) \ do { \ val = clip(*a++); \ val |= (clip(*b++) << 10) | \ (clip(*c++) << 20); \ put_le32(&data, val); \ } while (0) for (h = 0; h < height; h++) { uint32_t val = 0; for (w = 0; w < width - 5; w += 6) { WRITE_PIXELS(u, y, v); WRITE_PIXELS(y, u, y); WRITE_PIXELS(v, y, u); WRITE_PIXELS(y, v, y); } if (w < width - 1) { WRITE_PIXELS(u, y, v); val = clip(*y++); if (w == width - 2) put_le32(&data, val); #undef WRITE_PIXELS } if (w < width - 3) { val |= (clip(*u++) << 10) | (clip(*y++) << 20); put_le32(&data, val); val = clip(*v++) | (clip(*y++) << 10); put_le32(&data, val); } memset(data, 0, line_padding); data += line_padding; y += pic->p[0].i_pitch / 2 - width; u += pic->p[1].i_pitch / 2 - width / 2; v += pic->p[2].i_pitch / 2 - width / 2; } }
static int avi_write_idx1(AVFormatContext *s) { ByteIOContext *pb = s->pb; AVIContext *avi = s->priv_data; int64_t idx_chunk; int i; char tag[5]; if (!url_is_streamed(pb)) { AVIStream *avist; AVIIentry* ie = 0, *tie; int empty, stream_id = -1; idx_chunk = ff_start_tag(pb, "idx1"); for(i=0; i<s->nb_streams; i++){ avist= s->streams[i]->priv_data; avist->entry=0; } do { empty = 1; for (i=0; i<s->nb_streams; i++) { avist= s->streams[i]->priv_data; if (avist->indexes.entry <= avist->entry) continue; tie = avi_get_ientry(&avist->indexes, avist->entry); if (empty || tie->pos < ie->pos) { ie = tie; stream_id = i; } empty = 0; } if (!empty) { avist= s->streams[stream_id]->priv_data; avi_stream2fourcc(&tag[0], stream_id, s->streams[stream_id]->codec->codec_type); put_tag(pb, &tag[0]); put_le32(pb, ie->flags); put_le32(pb, ie->pos); put_le32(pb, ie->len); avist->entry++; } } while (!empty); ff_end_tag(pb, idx_chunk); avi_write_counters(s, avi->riff_id); } return 0; }
static int gxf_write_umf_media_description(AVFormatContext *s) { GXFContext *gxf = s->priv_data; ByteIOContext *pb = s->pb; int64_t pos; int i, j; pos = url_ftell(pb); gxf->umf_media_offset = pos - gxf->umf_start_offset; for (i = 0; i <= s->nb_streams; ++i) { GXFStreamContext *sc; int64_t startpos, curpos; if (i == s->nb_streams) sc = &gxf->timecode_track; else sc = s->streams[i]->priv_data; startpos = url_ftell(pb); put_le16(pb, 0); /* length */ put_le16(pb, sc->media_info); put_le16(pb, 0); /* reserved */ put_le16(pb, 0); /* reserved */ put_le32(pb, gxf->nb_fields); put_le32(pb, 0); /* attributes rw, ro */ put_le32(pb, 0); /* mark in */ put_le32(pb, gxf->nb_fields); /* mark out */ put_buffer(pb, ES_NAME_PATTERN, strlen(ES_NAME_PATTERN)); put_be16(pb, sc->media_info); for (j = strlen(ES_NAME_PATTERN)+2; j < 88; j++) put_byte(pb, 0); put_le32(pb, sc->track_type); put_le32(pb, sc->sample_rate); put_le32(pb, sc->sample_size); put_le32(pb, 0); /* reserved */ if (sc == &gxf->timecode_track) gxf_write_umf_media_timecode(pb, sc); /* 8 0bytes */ else { AVStream *st = s->streams[i]; switch (st->codec->codec_id) { case CODEC_ID_MPEG2VIDEO: gxf_write_umf_media_mpeg(pb, st); break; case CODEC_ID_PCM_S16LE: gxf_write_umf_media_audio(pb, sc); break; case CODEC_ID_DVVIDEO: gxf_write_umf_media_dv(pb, sc); break; } } curpos = url_ftell(pb); url_fseek(pb, startpos, SEEK_SET); put_le16(pb, curpos - startpos); url_fseek(pb, curpos, SEEK_SET); } return url_ftell(pb) - pos; }
static int gxf_write_umf_user_data(ByteIOContext *pb, GXFContext *ctx) { int64_t pos = url_ftell(pb); ctx->umf_user_data_offset = pos - ctx->umf_start_offset; put_le32(pb, 20); put_le32(pb, 0); put_le16(pb, 0); put_le16(pb, 0); put_le32(pb, 0); put_byte(pb, 0); put_byte(pb, 0); put_byte(pb, 0); put_byte(pb, 0); return 20; }
static int gxf_write_flt_packet(ByteIOContext *pb, GXFContext *ctx) { int64_t pos = url_ftell(pb); int i; gxf_write_packet_header(pb, PKT_FLT); put_le32(pb, 1000); /* number of fields */ put_le32(pb, 0); /* number of active flt entries */ for (i = 0; i < 1000; ++i) { put_le32(pb, 0); } return updatePacketSize(pb, pos); }
static int swf_write_trailer(AVFormatContext *s) { SWFContext *swf = s->priv_data; ByteIOContext *pb = s->pb; AVCodecContext *enc, *video_enc; int file_size, i; video_enc = NULL; for(i=0;i<s->nb_streams;i++) { enc = s->streams[i]->codec; if (enc->codec_type == CODEC_TYPE_VIDEO) video_enc = enc; else av_fifo_free(&swf->audio_fifo); } put_swf_tag(s, TAG_END); put_swf_end_tag(s); put_flush_packet(s->pb); /* patch file size and number of frames if not streamed */ if (!url_is_streamed(s->pb) && video_enc) { file_size = url_ftell(pb); url_fseek(pb, 4, SEEK_SET); put_le32(pb, file_size); url_fseek(pb, swf->duration_pos, SEEK_SET); put_le16(pb, swf->video_frame_number); url_fseek(pb, swf->vframes_pos, SEEK_SET); put_le16(pb, swf->video_frame_number); url_fseek(pb, file_size, SEEK_SET); } return 0; }
static DBusMessage *set_cumulative_wheel_rev(DBusConnection *conn, DBusMessage *msg, void *data) { struct csc *csc = data; dbus_uint32_t value; struct controlpoint_req *req; uint8_t att_val[5]; /* uint8 opcode + uint32 value */ if (!dbus_message_get_args(msg, NULL, DBUS_TYPE_UINT32, &value, DBUS_TYPE_INVALID)) return btd_error_invalid_args(msg); if (csc->pending_req != NULL) return btd_error_in_progress(msg); req = g_new(struct controlpoint_req, 1); req->csc = csc; req->opcode = SET_CUMULATIVE_VALUE; req->msg = dbus_message_ref(msg); csc->pending_req = req; att_val[0] = SET_CUMULATIVE_VALUE; put_le32(value, att_val + 1); gatt_write_char(csc->attrib, csc->controlpoint_val_handle, att_val, sizeof(att_val), controlpoint_write_cb, req); return NULL; }
static int avi_write_ix(AVFormatContext *s) { ByteIOContext *pb = s->pb; AVIContext *avi = s->priv_data; char tag[5]; char ix_tag[] = "ix00"; int i, j; assert(!url_is_streamed(pb)); if (avi->riff_id > AVI_MASTER_INDEX_SIZE) return -1; for (i=0;i<s->nb_streams;i++) { AVIStream *avist= s->streams[i]->priv_data; int64_t ix, pos; avi_stream2fourcc(&tag[0], i, s->streams[i]->codec->codec_type); ix_tag[3] = '0' + i; /* Writing AVI OpenDML leaf index chunk */ ix = url_ftell(pb); put_tag(pb, &ix_tag[0]); /* ix?? */ put_le32(pb, avist->indexes.entry * 8 + 24); /* chunk size */ put_le16(pb, 2); /* wLongsPerEntry */ put_byte(pb, 0); /* bIndexSubType (0 == frame index) */ put_byte(pb, 1); /* bIndexType (1 == AVI_INDEX_OF_CHUNKS) */ put_le32(pb, avist->indexes.entry); /* nEntriesInUse */ put_tag(pb, &tag[0]); /* dwChunkId */ put_le64(pb, avi->movi_list);/* qwBaseOffset */ put_le32(pb, 0); /* dwReserved_3 (must be 0) */ for (j=0; j<avist->indexes.entry; j++) { AVIIentry* ie = avi_get_ientry(&avist->indexes, j); put_le32(pb, ie->pos + 8); put_le32(pb, ((uint32_t)ie->len & ~0x80000000) | (ie->flags & 0x10 ? 0 : 0x80000000)); } put_flush_packet(pb); pos = url_ftell(pb); /* Updating one entry in the AVI OpenDML master index */ url_fseek(pb, avist->indexes.indx_start - 8, SEEK_SET); put_tag(pb, "indx"); /* enabling this entry */ url_fskip(pb, 8); put_le32(pb, avi->riff_id); /* nEntriesInUse */ url_fskip(pb, 16*avi->riff_id); put_le64(pb, ix); /* qwOffset */ put_le32(pb, pos - ix); /* dwSize */ put_le32(pb, avist->indexes.entry); /* dwDuration */ url_fseek(pb, pos, SEEK_SET); } return 0; }
static int gxf_write_umf_material_description(ByteIOContext *pb, GXFContext *ctx) { // XXX drop frame uint32_t timecode = ctx->nb_fields / (ctx->sample_rate * 3600) % 24 << 24 | // hours ctx->nb_fields / (ctx->sample_rate * 60) % 60 << 16 | // minutes ctx->nb_fields / ctx->sample_rate % 60 << 8 | // seconds ctx->nb_fields % ctx->sample_rate; // fields put_le32(pb, ctx->flags); put_le32(pb, ctx->nb_fields); /* length of the longest track */ put_le32(pb, ctx->nb_fields); /* length of the shortest track */ put_le32(pb, 0); /* mark in */ put_le32(pb, ctx->nb_fields); /* mark out */ put_le32(pb, 0); /* timecode mark in */ put_le32(pb, timecode); /* timecode mark out */ put_le64(pb, ctx->fc->timestamp); /* modification time */ put_le64(pb, ctx->fc->timestamp); /* creation time */ put_le16(pb, 0); /* reserved */ put_le16(pb, 0); /* reserved */ put_le16(pb, ctx->audio_tracks); put_le16(pb, 0); /* timecode track count */ put_le16(pb, 0); /* reserved */ put_le16(pb, ctx->mpeg_tracks); return 48; }
static int gxf_write_umf_material_description(AVFormatContext *s) { GXFContext *gxf = s->priv_data; ByteIOContext *pb = s->pb; int timecode_base = gxf->time_base.den == 60000 ? 60 : 50; // XXX drop frame uint32_t timecode = gxf->nb_fields / (timecode_base * 3600) % 24 << 24 | // hours gxf->nb_fields / (timecode_base * 60) % 60 << 16 | // minutes gxf->nb_fields / timecode_base % 60 << 8 | // seconds gxf->nb_fields % timecode_base; // fields put_le32(pb, gxf->flags); put_le32(pb, gxf->nb_fields); /* length of the longest track */ put_le32(pb, gxf->nb_fields); /* length of the shortest track */ put_le32(pb, 0); /* mark in */ put_le32(pb, gxf->nb_fields); /* mark out */ put_le32(pb, 0); /* timecode mark in */ put_le32(pb, timecode); /* timecode mark out */ put_le64(pb, s->timestamp); /* modification time */ put_le64(pb, s->timestamp); /* creation time */ put_le16(pb, 0); /* reserved */ put_le16(pb, 0); /* reserved */ put_le16(pb, gxf->audio_tracks); put_le16(pb, 1); /* timecode track count */ put_le16(pb, 0); /* reserved */ put_le16(pb, gxf->mpeg_tracks); return 48; }
static int asf_write_index(AVFormatContext *s, ASFIndex *index, uint16_t max, uint32_t count) { ByteIOContext *pb = s->pb; int i; put_guid(pb, &ff_asf_simple_index_header); put_le64(pb, 24 + 16 + 8 + 4 + 4 + (4 + 2)*count); put_guid(pb, &ff_asf_my_guid); put_le64(pb, ASF_INDEXED_INTERVAL); put_le32(pb, max); put_le32(pb, count); for(i=0; i<count; i++) { put_le32(pb, index[i].packet_number); put_le16(pb, index[i].packet_count); } return 0; }
void end_tag(ByteIOContext *pb, offset_t start) { offset_t pos; pos = url_ftell(pb); url_fseek(pb, start - 4, SEEK_SET); put_le32(pb, (uint32_t)(pos - start)); url_fseek(pb, pos, SEEK_SET); }
static int avi_write_idx1(AVFormatContext *s) { ByteIOContext *pb = &s->pb; AVIContext *avi = s->priv_data; offset_t idx_chunk; int i; char tag[5]; if (!url_is_streamed(pb)) { AVIIentry* ie = 0, *tie; int entry[MAX_STREAMS]; int empty, stream_id = -1; idx_chunk = start_tag(pb, "idx1"); memset(&entry[0], 0, sizeof(entry)); do { empty = 1; for (i=0; i<s->nb_streams; i++) { if (avi->indexes[i].entry <= entry[i]) continue; tie = avi_get_ientry(&avi->indexes[i], entry[i]); if (empty || tie->pos < ie->pos) { ie = tie; stream_id = i; } empty = 0; } if (!empty) { avi_stream2fourcc(&tag[0], stream_id, s->streams[stream_id]->codec->codec_type); put_tag(pb, &tag[0]); put_le32(pb, ie->flags); put_le32(pb, ie->pos); put_le32(pb, ie->len); entry[stream_id]++; } } while (!empty); end_tag(pb, idx_chunk); avi_write_counters(s, avi->riff_id); } return 0; }
static int sox_write_header(AVFormatContext *s) { SoXContext *sox = s->priv_data; ByteIOContext *pb = s->pb; AVCodecContext *enc = s->streams[0]->codec; AVMetadataTag *comment; size_t comment_len = 0, comment_size; comment = av_metadata_get(s->metadata, "comment", NULL, 0); if (comment) comment_len = strlen(comment->value); comment_size = (comment_len + 7) & ~7; sox->header_size = SOX_FIXED_HDR + comment_size; if (enc->codec_id == CODEC_ID_PCM_S32LE) { put_tag(pb, ".SoX"); put_le32(pb, sox->header_size); put_le64(pb, 0); /* number of samples */ put_le64(pb, av_dbl2int(enc->sample_rate)); put_le32(pb, enc->channels); put_le32(pb, comment_size); } else if (enc->codec_id == CODEC_ID_PCM_S32BE) { put_tag(pb, "XoS."); put_be32(pb, sox->header_size); put_be64(pb, 0); /* number of samples */ put_be64(pb, av_dbl2int(enc->sample_rate)); put_be32(pb, enc->channels); put_be32(pb, comment_size); } else { av_log(s, AV_LOG_ERROR, "invalid codec; use pcm_s32le or pcm_s32be\n"); return -1; } if (comment_len) put_buffer(pb, comment->value, comment_len); for ( ; comment_size > comment_len; comment_len++) put_byte(pb, 0); put_flush_packet(pb); return 0; }
static inline void put_uuid_le(const bt_uuid_t *src, void *dst) { if (src->type == BT_UUID16) put_le16(src->value.u16, dst); else if (src->type == BT_UUID32) put_le32(src->value.u32, dst); else /* Convert from 128-bit BE to LE */ bswap_128(&src->value.u128, dst); }
static int gxf_write_umf_media_timecode(ByteIOContext *pb, GXFStreamContext *sc) { put_le32(pb, 1); /* non drop frame */ put_le32(pb, 0); /* reserved */ put_le32(pb, 0); /* reserved */ put_le32(pb, 0); /* reserved */ put_le32(pb, 0); /* reserved */ put_le32(pb, 0); /* reserved */ put_le32(pb, 0); /* reserved */ put_le32(pb, 0); /* reserved */ return 32; }
static int ivf_write_packet(AVFormatContext *s, AVPacket *pkt) { ByteIOContext *pb = s->pb; put_le32(pb, pkt->size); put_le64(pb, pkt->pts); put_buffer(pb, pkt->data, pkt->size); put_flush_packet(pb); return 0; }
static void put_guid(ByteIOContext *s, const GUID *g) { int i; put_le32(s, g->v1); put_le16(s, g->v2); put_le16(s, g->v3); for(i=0;i<8;i++) put_byte(s, g->v4[i]); }
static int avi_write_trailer(AVFormatContext *s) { AVIContext *avi = s->priv_data; ByteIOContext *pb = s->pb; int res = 0; int i, j, n, nb_frames; int64_t file_size; if (!url_is_streamed(pb)){ if (avi->riff_id == 1) { ff_end_tag(pb, avi->movi_list); res = avi_write_idx1(s); ff_end_tag(pb, avi->riff_start); } else { avi_write_ix(s); ff_end_tag(pb, avi->movi_list); ff_end_tag(pb, avi->riff_start); file_size = url_ftell(pb); url_fseek(pb, avi->odml_list - 8, SEEK_SET); put_tag(pb, "LIST"); /* Making this AVI OpenDML one */ url_fskip(pb, 16); for (n=nb_frames=0;n<s->nb_streams;n++) { AVCodecContext *stream = s->streams[n]->codec; AVIStream *avist= s->streams[n]->priv_data; if (stream->codec_type == AVMEDIA_TYPE_VIDEO) { if (nb_frames < avist->packet_count) nb_frames = avist->packet_count; } else { if (stream->codec_id == CODEC_ID_MP2 || stream->codec_id == CODEC_ID_MP3) { nb_frames += avist->packet_count; } } } put_le32(pb, nb_frames); url_fseek(pb, file_size, SEEK_SET); avi_write_counters(s, avi->riff_id); } } put_flush_packet(pb); for (i=0; i<s->nb_streams; i++) { AVIStream *avist= s->streams[i]->priv_data; for (j=0; j<avist->indexes.ents_allocated/AVI_INDEX_CLUSTER_SIZE; j++) av_free(avist->indexes.cluster[j]); av_freep(&avist->indexes.cluster); avist->indexes.ents_allocated = avist->indexes.entry = 0; } return res; }
static int gxf_write_umf_media_description(ByteIOContext *pb, GXFContext *ctx) { int64_t pos; int i; pos = url_ftell(pb); ctx->umf_media_offset = pos - ctx->umf_start_offset; for (i = 0; i < ctx->fc->nb_streams; ++i) { GXFStreamContext *sc = &ctx->streams[i]; char buffer[88]; int64_t startpos, curpos; int path_size = strlen(ES_NAME_PATTERN); memset(buffer, 0, 88); startpos = url_ftell(pb); put_le16(pb, 0); /* length */ put_le16(pb, sc->media_info); put_le16(pb, 0); /* reserved */ put_le16(pb, 0); /* reserved */ put_le32(pb, ctx->nb_frames); put_le32(pb, 0); /* attributes rw, ro */ put_le32(pb, 0); /* mark in */ put_le32(pb, ctx->nb_frames); /* mark out */ strncpy(buffer, ES_NAME_PATTERN, path_size); put_buffer(pb, (uint8_t *)buffer, path_size); put_be16(pb, sc->media_info); put_buffer(pb, (uint8_t *)buffer + path_size + 2, 88 - path_size - 2); put_le32(pb, sc->track_type); put_le32(pb, sc->sample_rate); put_le32(pb, sc->sample_size); put_le32(pb, 0); /* reserved */ switch (sc->codec->codec_id) { case CODEC_ID_MPEG2VIDEO: gxf_write_umf_media_mpeg(pb, sc); break; case CODEC_ID_PCM_S16LE: gxf_write_umf_media_audio(pb, sc); break; case CODEC_ID_DVVIDEO: gxf_write_umf_media_dv(pb, sc); break; default: gxf_write_umf_media_timecode(pb, sc); /* 8 0bytes */ } curpos = url_ftell(pb); url_fseek(pb, startpos, SEEK_SET); put_le16(pb, curpos - startpos); url_fseek(pb, curpos, SEEK_SET); } return url_ftell(pb) - pos; }
static void avi_write_info_tag(ByteIOContext *pb, const char *tag, const char *str) { int len = strlen(str); if (len > 0) { len++; put_tag(pb, tag); put_le32(pb, len); put_strz(pb, str); if (len & 1) put_byte(pb, 0); } }
static int put_payload_parsing_info( AVFormatContext *s, unsigned int sendtime, unsigned int duration, int nb_payloads, int padsize ) { ASFContext *asf = s->priv_data; ByteIOContext *pb = s->pb; int ppi_size, i; int64_t start= url_ftell(pb); int iLengthTypeFlags = ASF_PPI_LENGTH_TYPE_FLAGS; padsize -= PACKET_HEADER_MIN_SIZE; if(asf->multi_payloads_present) padsize--; assert(padsize>=0); put_byte(pb, ASF_PACKET_ERROR_CORRECTION_FLAGS); for (i = 0; i < ASF_PACKET_ERROR_CORRECTION_DATA_SIZE; i++){ put_byte(pb, 0x0); } if (asf->multi_payloads_present) iLengthTypeFlags |= ASF_PPI_FLAG_MULTIPLE_PAYLOADS_PRESENT; if (padsize > 0) { if (padsize < 256) iLengthTypeFlags |= ASF_PPI_FLAG_PADDING_LENGTH_FIELD_IS_BYTE; else iLengthTypeFlags |= ASF_PPI_FLAG_PADDING_LENGTH_FIELD_IS_WORD; } put_byte(pb, iLengthTypeFlags); put_byte(pb, ASF_PPI_PROPERTY_FLAGS); if (iLengthTypeFlags & ASF_PPI_FLAG_PADDING_LENGTH_FIELD_IS_WORD) put_le16(pb, padsize - 2); if (iLengthTypeFlags & ASF_PPI_FLAG_PADDING_LENGTH_FIELD_IS_BYTE) put_byte(pb, padsize - 1); put_le32(pb, sendtime); put_le16(pb, duration); if (asf->multi_payloads_present) put_byte(pb, nb_payloads | ASF_PAYLOAD_FLAGS); ppi_size = url_ftell(pb) - start; return ppi_size; }
static int voc_write_packet(AVFormatContext *s, AVPacket *pkt) { voc_enc_context_t *voc = s->priv_data; AVCodecContext *enc = s->streams[0]->codec; ByteIOContext *pb = &s->pb; if (!voc->param_written) { int format = codec_get_tag(voc_codec_tags, enc->codec_id); if (format > 0xFF) { put_byte(pb, VOC_TYPE_NEW_VOICE_DATA); put_le24(pb, pkt->size + 12); put_le32(pb, enc->sample_rate); put_byte(pb, enc->bits_per_sample); put_byte(pb, enc->channels); put_le16(pb, format); put_le32(pb, 0); } else { if (s->streams[0]->codec->channels > 1) { put_byte(pb, VOC_TYPE_EXTENDED); put_le24(pb, 4); put_le16(pb, 65536-256000000/(enc->sample_rate*enc->channels)); put_byte(pb, format); put_byte(pb, enc->channels - 1); } put_byte(pb, VOC_TYPE_VOICE_DATA); put_le24(pb, pkt->size + 2); put_byte(pb, 256 - 1000000 / enc->sample_rate); put_byte(pb, format); } voc->param_written = 1; } else { put_byte(pb, VOC_TYPE_VOICE_DATA_CONT); put_le24(pb, pkt->size); } put_buffer(pb, pkt->data, pkt->size); return 0; }
static int wav_write_header(AVFormatContext *s) { WAVContext *wav = s->priv_data; ByteIOContext *pb = s->pb; int64_t fmt, fact; put_tag(pb, "RIFF"); put_le32(pb, 0); /* file length */ put_tag(pb, "WAVE"); /* format header */ fmt = ff_start_tag(pb, "fmt "); if (ff_put_wav_header(pb, s->streams[0]->codec) < 0) { av_log(s, AV_LOG_ERROR, "%s codec not supported in WAVE format\n", s->streams[0]->codec->codec ? s->streams[0]->codec->codec->name : "NONE"); av_free(wav); return -1; } ff_end_tag(pb, fmt); if (s->streams[0]->codec->codec_tag != 0x01 /* hence for all other than PCM */ && !url_is_streamed(s->pb)) { fact = ff_start_tag(pb, "fact"); put_le32(pb, 0); ff_end_tag(pb, fact); } av_set_pts_info(s->streams[0], 64, 1, s->streams[0]->codec->sample_rate); wav->maxpts = wav->last_duration = 0; wav->minpts = INT64_MAX; /* data header */ wav->data = ff_start_tag(pb, "data"); put_flush_packet(pb); return 0; }
/* write an asf chunk (only used in streaming case) */ static void put_chunk(AVFormatContext *s, int type, int payload_length, int flags) { ASFContext *asf = s->priv_data; ByteIOContext *pb = s->pb; int length; length = payload_length + 8; put_le16(pb, type); put_le16(pb, length); //size put_le32(pb, asf->seqno);//sequence number put_le16(pb, flags); /* unknown bytes */ put_le16(pb, length); //size_confirm asf->seqno++; }
static int ogg_write_page(AVFormatContext *s, OGGPage *page, int extra_flags) { OGGStreamContext *oggstream = s->streams[page->stream_index]->priv_data; ByteIOContext *pb; int64_t crc_offset; int ret, size; uint8_t *buf; ret = url_open_dyn_buf(&pb); if (ret < 0) return ret; init_checksum(pb, ff_crc04C11DB7_update, 0); put_tag(pb, "OggS"); put_byte(pb, 0); put_byte(pb, page->flags | extra_flags); put_le64(pb, page->granule); put_le32(pb, oggstream->serial_num); put_le32(pb, oggstream->page_counter++); crc_offset = url_ftell(pb); put_le32(pb, 0); // crc put_byte(pb, page->segments_count); put_buffer(pb, page->segments, page->segments_count); put_buffer(pb, page->data, page->size); ogg_update_checksum(s, pb, crc_offset); put_flush_packet(pb); size = url_close_dyn_buf(pb, &buf); if (size < 0) return size; put_buffer(s->pb, buf, size); put_flush_packet(s->pb); av_free(buf); oggstream->page_count--; return 0; }
static void put_swf_tag(AVFormatContext *s, int tag) { SWFContext *swf = s->priv_data; ByteIOContext *pb = s->pb; swf->tag_pos = url_ftell(pb); swf->tag = tag; /* reserve some room for the tag */ if (tag & TAG_LONG) { put_le16(pb, 0); put_le32(pb, 0); } else { put_le16(pb, 0); } }
static int gxf_write_flt_packet(AVFormatContext *s) { GXFContext *gxf = s->priv_data; ByteIOContext *pb = s->pb; int64_t pos = url_ftell(pb); int fields_per_flt = (gxf->nb_fields+1) / 1000 + 1; int flt_entries = gxf->nb_fields / fields_per_flt - 1; int i = 0; gxf_write_packet_header(pb, PKT_FLT); put_le32(pb, fields_per_flt); /* number of fields */ put_le32(pb, flt_entries); /* number of active flt entries */ if (gxf->flt_entries) { for (i = 0; i < flt_entries; i++) put_le32(pb, gxf->flt_entries[(i*fields_per_flt)>>1]); } for (; i < 1000; i++) put_le32(pb, 0); return updatePacketSize(pb, pos); }
static void put_payload_header( AVFormatContext *s, ASFStream *stream, int presentation_time, int m_obj_size, int m_obj_offset, int payload_len, int flags ) { ASFContext *asf = s->priv_data; ByteIOContext *pb = &asf->pb; int val; val = stream->num; if (flags & PKT_FLAG_KEY) val |= ASF_PL_FLAG_KEY_FRAME; put_byte(pb, val); put_byte(pb, stream->seq); //Media object number put_le32(pb, m_obj_offset); //Offset Into Media Object // Replicated Data shall be at least 8 bytes long. // The first 4 bytes of data shall contain the // Size of the Media Object that the payload belongs to. // The next 4 bytes of data shall contain the // Presentation Time for the media object that the payload belongs to. put_byte(pb, ASF_PAYLOAD_REPLICATED_DATA_LENGTH); put_le32(pb, m_obj_size); //Replicated Data - Media Object Size put_le32(pb, presentation_time);//Replicated Data - Presentation Time if (asf->multi_payloads_present){ put_le16(pb, payload_len); //payload length } }
static int wav_write_header(AVFormatContext *s) { WAVContext *wav = s->priv_data; ByteIOContext *pb = &s->pb; offset_t fmt, fact; put_tag(pb, "RIFF"); put_le32(pb, 0); /* file length */ put_tag(pb, "WAVE"); /* format header */ fmt = start_tag(pb, "fmt "); if (put_wav_header(pb, s->streams[0]->codec) < 0) { av_free(wav); return -1; } end_tag(pb, fmt); if(s->streams[0]->codec->codec_tag != 0x01 /* hence for all other than PCM */ && !url_is_streamed(&s->pb)) { fact = start_tag(pb, "fact"); put_le32(pb, 0); end_tag(pb, fact); } av_set_pts_info(s->streams[0], 64, 1, s->streams[0]->codec->sample_rate); wav->maxpts = wav->last_duration = 0; wav->minpts = INT64_MAX; /* data header */ wav->data = start_tag(pb, "data"); put_flush_packet(pb); return 0; }