Esempio n. 1
0
static void flush_packet(AVFormatContext *s)
{
    FFMContext *ffm = s->priv_data;
    int fill_size, h;
    ByteIOContext *pb = s->pb;

    fill_size = ffm->packet_end - ffm->packet_ptr;
    memset(ffm->packet_ptr, 0, fill_size);

    if (url_ftell(pb) % ffm->packet_size)
        av_abort();

    /* put header */
    put_be16(pb, PACKET_ID);
    put_be16(pb, fill_size);
    put_be64(pb, ffm->dts);
    h = ffm->frame_offset;
    if (ffm->first_packet)
        h |= 0x8000;
    put_be16(pb, h);
    put_buffer(pb, ffm->packet, ffm->packet_end - ffm->packet);
    put_flush_packet(pb);

    /* prepare next packet */
    ffm->frame_offset = 0; /* no key frame */
    ffm->packet_ptr = ffm->packet;
    ffm->first_packet = 0;
}
Esempio n. 2
0
static void write_sgi_header(ByteIOContext *f, const SGIInfo *info)
{
    int i;

    put_be16(f, SGI_MAGIC);
    put_byte(f, info->rle);
    put_byte(f, info->bytes_per_channel);
    put_be16(f, info->dimension);
    put_be16(f, info->xsize);
    put_be16(f, info->ysize);
    put_be16(f, info->zsize);

    /* The rest are constant in this implementation */
    put_be32(f, 0L); /* pixmin */
    put_be32(f, 255L); /* pixmax */
    put_be32(f, 0L); /* dummy */

    /* name */
    for (i = 0; i < 80; i++) {
        put_byte(f, 0);
    }

    put_be32(f, 0L); /* colormap */

    /* The rest of the 512 byte header is unused. */
    for (i = 0; i < 404; i++) {
        put_byte(f, 0);
    }
}
Esempio n. 3
0
static int gxf_write_media_preamble(ByteIOContext *pb, GXFContext *ctx, AVPacket *pkt, int size)
{
    GXFStreamContext *sc = &ctx->streams[pkt->stream_index];
    int64_t dts = av_rescale_rnd(pkt->dts, ctx->sample_rate, sc->codec->time_base.den, AV_ROUND_UP);

    put_byte(pb, sc->media_type);
    put_byte(pb, sc->index);
    put_be32(pb, dts);
    if (sc->codec->codec_type == CODEC_TYPE_AUDIO) {
        put_be16(pb, 0);
        put_be16(pb, size / 2);
    } else if (sc->codec->codec_id == CODEC_ID_MPEG2VIDEO) {
        int frame_type = gxf_parse_mpeg_frame(sc, pkt->data, pkt->size);
        if (frame_type == FF_I_TYPE) {
            put_byte(pb, 0x0d);
            sc->iframes++;
        } else if (frame_type == FF_B_TYPE) {
            put_byte(pb, 0x0f);
            sc->bframes++;
        } else {
            put_byte(pb, 0x0e);
            sc->pframes++;
        }
        put_be24(pb, size);
    } else if (sc->codec->codec_id == CODEC_ID_DVVIDEO) {
        put_byte(pb, size / 4096);
        put_be24(pb, 0);
    } else
        put_be32(pb, size);
    put_be32(pb, dts);
    put_byte(pb, 1); /* flags */
    put_byte(pb, 0); /* reserved */
    return 16;
}
Esempio n. 4
0
static bool free_eraseblk(eraseblock& eb)
{
    if (   eb.e_type == eraseblock_type::dentry_inode
        || eb.e_type == eraseblock_type::dentry_clin
        || eb.e_type == eraseblock_type::file_inode
        || eb.e_type == eraseblock_type::file_clin)
    {
        // The given erase block contains inodes or indirect pointers
        // and therefore tracks it's valid cluster count.
        // Set it to "free" if it doesn't contain any valid clusters.
        if (get_be16(eb.e_cvalid) == 0)
        {
            eb.e_type = eraseblock_type::empty;
            eb.e_lastwrite = put_be16(0);
            eb.e_writeops = put_be16(0);
            return true;
        }
    }
    else if (eb.e_type == eraseblock_type::ebin)
    {
        // TODO: implement me!
        log().error("Unable to free ebin erase block {}", eb);
    }
    // the given erase block cannot be freed
    return false;
}
Esempio n. 5
0
void close_eraseblks(fs_context& fs)
{
    /* TODO: Error handling missing! */

    for (eb_id_t eb_id = 1; eb_id < fs.neraseblocks; ++eb_id)
    {
        if (fs.eb_usage[eb_id].e_type == eraseblock_type::ebin)
            continue; /* can never be "open" */
        if (fs.eb_usage[eb_id].e_type == eraseblock_type::empty)
            continue; /* can never be "open" */

        eraseblock_type eb_type = fs.eb_usage[eb_id].e_type;
        unsigned int writeops = get_be16(fs.eb_usage[eb_id].e_writeops);
        unsigned int max_writeops = fs.erasesize / fs.clustersize;

        if (writeops == max_writeops)
            continue; /* erase block is already finalized/closed */

        fs.eb_usage[eb_id].e_writeops = put_be16(max_writeops);

        if (!summary_required(fs, eb_type))
            continue;

        summary* eb_summary = summary_get(*fs.summary_cache, eb_type);

        summary_write(fs, eb_summary, eb_id);
        summary_close(*fs.summary_cache, eb_summary);

        /* tell gcinfo an erase block of a specific type was written */
        unsigned int write_time = gcinfo_update_writetime(fs, eb_type);
        fs.eb_usage[eb_id].e_lastwrite = put_be16(write_time);
    }
}
Esempio n. 6
0
int ff_isom_write_avcc(ByteIOContext *pb, const uint8_t *data, int len)
{
    if (len > 6) {
        /* check for h264 start code */
        if (AV_RB32(data) == 0x00000001 ||
            AV_RB24(data) == 0x000001) {
            uint8_t *buf=NULL, *end, *start;
            uint32_t sps_size=0, pps_size=0;
            uint8_t *sps=0, *pps=0;

            int ret = ff_avc_parse_nal_units_buf(data, &buf, &len);
            if (ret < 0)
                return ret;
            start = buf;
            end = buf + len;

            /* look for sps and pps */
            while (buf < end) {
                unsigned int size;
                uint8_t nal_type;
                size = AV_RB32(buf);
                nal_type = buf[4] & 0x1f;
                if (nal_type == 7) { /* SPS */
                    sps = buf + 4;
                    sps_size = size;
                } else if (nal_type == 8) { /* PPS */
                    pps = buf + 4;
                    pps_size = size;
                }
                buf += size + 4;
            }

			if ( !sps || !pps )
			{
				av_log( 0, AV_LOG_ERROR, "thinkingl Can't parse h264 nal!!!\n" );
			}
			
            assert(sps);
            assert(pps);

            put_byte(pb, 1); /* version */
            put_byte(pb, sps[1]); /* profile */
            put_byte(pb, sps[2]); /* profile compat */
            put_byte(pb, sps[3]); /* level */
            put_byte(pb, 0xff); /* 6 bits reserved (111111) + 2 bits nal size length - 1 (11) */
            put_byte(pb, 0xe1); /* 3 bits reserved (111) + 5 bits number of sps (00001) */

            put_be16(pb, sps_size);
            put_buffer(pb, sps, sps_size);
            put_byte(pb, 1); /* number of pps */
            put_be16(pb, pps_size);
            put_buffer(pb, pps, pps_size);
            av_free(start);
        } else {
            put_buffer(pb, data, len);
        }
    }
    return 0;
}
Esempio n. 7
0
static int daud_write_packet(struct AVFormatContext *s, AVPacket *pkt)
{
    put_be16(s->pb, pkt->size);
    put_be16(s->pb, 0x8010); // unknown
    put_buffer(s->pb, pkt->data, pkt->size);
    put_flush_packet(s->pb);
    return 0;
}
Esempio n. 8
0
static int gxf_write_track_description(AVFormatContext *s, GXFStreamContext *sc, int index)
{
    ByteIOContext *pb = s->pb;
    int64_t pos;
    int mpeg = sc->track_type == 4 || sc->track_type == 9;

    /* track description section */
    put_byte(pb, sc->media_type + 0x80);
    put_byte(pb, index + 0xC0);

    pos = url_ftell(pb);
    put_be16(pb, 0); /* size */

    /* media file name */
    put_byte(pb, TRACK_NAME);
    put_byte(pb, strlen(ES_NAME_PATTERN) + 3);
    put_tag(pb, ES_NAME_PATTERN);
    put_be16(pb, sc->media_info);
    put_byte(pb, 0);

    if (!mpeg) {
        /* auxiliary information */
        put_byte(pb, TRACK_AUX);
        put_byte(pb, 8);
        if (sc->track_type == 3)
            gxf_write_timecode_auxiliary(pb, sc);
        else
            put_le64(pb, 0);
    }

    /* file system version */
    put_byte(pb, TRACK_VER);
    put_byte(pb, 4);
    put_be32(pb, 0);

    if (mpeg)
        gxf_write_mpeg_auxiliary(pb, s->streams[index]);

    /* frame rate */
    put_byte(pb, TRACK_FPS);
    put_byte(pb, 4);
    put_be32(pb, sc->frame_rate_index);

    /* lines per frame */
    put_byte(pb, TRACK_LINES);
    put_byte(pb, 4);
    put_be32(pb, sc->lines_index);

    /* fields per frame */
    put_byte(pb, TRACK_FPF);
    put_byte(pb, 4);
    put_be32(pb, sc->fields);

    return updateSize(pb, pos);
}
Esempio n. 9
0
static void output_match(ByteIOContext *out, int match_sample,
                         int match_offset, int match_len, int *entries)
{
    put_byte(out, 2); /* sample constructor */
    put_byte(out, 0); /* track reference */
    put_be16(out, match_len);
    put_be32(out, match_sample);
    put_be32(out, match_offset);
    put_be16(out, 1); /* bytes per block */
    put_be16(out, 1); /* samples per block */
    (*entries)++;
}
Esempio n. 10
0
static int gxf_write_track_description(ByteIOContext *pb, GXFStreamContext *stream)
{
    int64_t pos;

    /* track description section */
    put_byte(pb, stream->media_type + 0x80);
    put_byte(pb, stream->index + 0xC0);

    pos = url_ftell(pb);
    put_be16(pb, 0); /* size */

    /* media file name */
    put_byte(pb, TRACK_NAME);
    put_byte(pb, strlen(ES_NAME_PATTERN) + 3);
    put_tag(pb, ES_NAME_PATTERN);
    put_be16(pb, stream->media_info);
    put_byte(pb, 0);

    if (stream->codec->codec_id != CODEC_ID_MPEG2VIDEO) {
        /* auxiliary information */
        put_byte(pb, TRACK_AUX);
        put_byte(pb, 8);
        if (stream->codec->codec_id == CODEC_ID_NONE)
            gxf_write_timecode_auxiliary(pb, stream);
        else
            put_le64(pb, 0);
    }

    /* file system version */
    put_byte(pb, TRACK_VER);
    put_byte(pb, 4);
    put_be32(pb, 0);

    if (stream->codec->codec_id == CODEC_ID_MPEG2VIDEO)
        gxf_write_mpeg_auxiliary(pb, stream);

    /* frame rate */
    put_byte(pb, TRACK_FPS);
    put_byte(pb, 4);
    put_be32(pb, stream->frame_rate_index);

    /* lines per frame */
    put_byte(pb, TRACK_LINES);
    put_byte(pb, 4);
    put_be32(pb, stream->lines_index);

    /* fields per frame */
    put_byte(pb, TRACK_FPF);
    put_byte(pb, 4);
    put_be32(pb, stream->fields);

    return updateSize(pb, pos);
}
Esempio n. 11
0
static int gxf_write_umf_media_description(AVFormatContext *s)
{
    GXFContext *gxf = s->priv_data;
    ByteIOContext *pb = s->pb;
    int64_t pos;
    int i, j;

    pos = url_ftell(pb);
    gxf->umf_media_offset = pos - gxf->umf_start_offset;
    for (i = 0; i <= s->nb_streams; ++i) {
        GXFStreamContext *sc;
        int64_t startpos, curpos;

        if (i == s->nb_streams)
            sc = &gxf->timecode_track;
        else
            sc = s->streams[i]->priv_data;

        startpos = url_ftell(pb);
        put_le16(pb, 0); /* length */
        put_le16(pb, sc->media_info);
        put_le16(pb, 0); /* reserved */
        put_le16(pb, 0); /* reserved */
        put_le32(pb, gxf->nb_fields);
        put_le32(pb, 0); /* attributes rw, ro */
        put_le32(pb, 0); /* mark in */
        put_le32(pb, gxf->nb_fields); /* mark out */
        put_buffer(pb, ES_NAME_PATTERN, strlen(ES_NAME_PATTERN));
        put_be16(pb, sc->media_info);
        for (j = strlen(ES_NAME_PATTERN)+2; j < 88; j++)
            put_byte(pb, 0);
        put_le32(pb, sc->track_type);
        put_le32(pb, sc->sample_rate);
        put_le32(pb, sc->sample_size);
        put_le32(pb, 0); /* reserved */

        if (sc == &gxf->timecode_track)
            gxf_write_umf_media_timecode(pb, sc); /* 8 0bytes */
        else {
            AVStream *st = s->streams[i];
            switch (st->codec->codec_id) {
            case CODEC_ID_MPEG2VIDEO:
                gxf_write_umf_media_mpeg(pb, st);
                break;
            case CODEC_ID_PCM_S16LE:
                gxf_write_umf_media_audio(pb, sc);
                break;
            case CODEC_ID_DVVIDEO:
                gxf_write_umf_media_dv(pb, sc);
                break;
            }
        }

        curpos = url_ftell(pb);
        url_fseek(pb, startpos, SEEK_SET);
        put_le16(pb, curpos - startpos);
        url_fseek(pb, curpos, SEEK_SET);
    }
    return url_ftell(pb) - pos;
}
Esempio n. 12
0
File: rtpenc.c Progetto: kaone3/vsmm
/* send an rtcp sender report packet */
static void rtcp_send_sr(AVFormatContext *s1, int64_t ntp_time)
{
    RTPMuxContext *s = s1->priv_data;
    uint32_t rtp_ts;
#ifdef _MSC_VER
	AVRational rational = {1, 1000000};
#endif

    dprintf(s1, "RTCP: %02x %"PRIx64" %x\n", s->payload_type, ntp_time, s->timestamp);

    s->last_rtcp_ntp_time = ntp_time;
#ifdef _MSC_VER
    rtp_ts = av_rescale_q(ntp_time - s->first_rtcp_ntp_time, rational,
                          s1->streams[0]->time_base) + s->base_timestamp;
#else
    rtp_ts = av_rescale_q(ntp_time - s->first_rtcp_ntp_time, (AVRational){1, 1000000},
                          s1->streams[0]->time_base) + s->base_timestamp;
#endif
    put_byte(s1->pb, (RTP_VERSION << 6));
    put_byte(s1->pb, RTCP_SR);
    put_be16(s1->pb, 6); /* length in words - 1 */
    put_be32(s1->pb, s->ssrc);
    put_be32(s1->pb, ntp_time / 1000000);
    put_be32(s1->pb, ((ntp_time % 1000000) << 32) / 1000000);
    put_be32(s1->pb, rtp_ts);
    put_be32(s1->pb, s->packet_count);
    put_be32(s1->pb, s->octet_count);
    put_flush_packet(s1->pb);
}
Esempio n. 13
0
File: rsoenc.c Progetto: kaone3/vsmm
static int rso_write_trailer(AVFormatContext *s)
{
    ByteIOContext *pb = s->pb;
    int64_t file_size;
    uint16_t coded_file_size;

    file_size = url_ftell(pb);

    if (file_size < 0)
        return file_size;

    if (file_size > 0xffff + RSO_HEADER_SIZE) {
        av_log(s, AV_LOG_WARNING,
               "Output file is too big (%"PRId64" bytes >= 64kB)\n", file_size);
        coded_file_size = 0xffff;
    } else {
        coded_file_size = file_size - RSO_HEADER_SIZE;
    }

    /* update file size */
    url_fseek(pb, 2, SEEK_SET);
    put_be16(pb, coded_file_size);
    url_fseek(pb, file_size, SEEK_SET);

    put_flush_packet(pb);

    return 0;
}
Esempio n. 14
0
File: rmenc.c Progetto: AndyA/ffmbc
static void put_str(ByteIOContext *s, const char *tag)
{
    put_be16(s,strlen(tag));
    while (*tag) {
        put_byte(s, *tag++);
    }
}
Esempio n. 15
0
 void
 RTMPSession::sendConnectPacket()
 {
     RTMPChunk_0 metadata = {{0}};
     metadata.msg_stream_id = kControlChannelStreamId;
     metadata.msg_type_id = RTMP_PT_INVOKE;
     std::vector<uint8_t> buff;
     std::stringstream url ;
     if(m_uri.port > 0) {
         url << m_uri.protocol << "://" << m_uri.host << ":" << m_uri.port << "/" << m_app;
     } else {
         url << m_uri.protocol << "://" << m_uri.host << "/" << m_app;
     }
     put_string(buff, "connect");
     put_double(buff, ++m_numberOfInvokes);
     m_trackedCommands[m_numberOfInvokes] = "connect";
     put_byte(buff, kAMFObject);
     put_named_string(buff, "app", m_app.c_str());
     put_named_string(buff,"type", "nonprivate");
     put_named_string(buff, "tcUrl", url.str().c_str());
     put_named_bool(buff, "fpad", false);
     put_named_double(buff, "capabilities", 15.);
     put_named_double(buff, "audioCodecs", 10. );
     put_named_double(buff, "videoCodecs", 7.);
     put_named_double(buff, "videoFunction", 1.);
     put_be16(buff, 0);
     put_byte(buff, kAMFObjectEnd);
     
     metadata.msg_length.data = static_cast<int>( buff.size() );
     sendPacket(&buff[0], buff.size(), metadata);
 }
int sap_connect_rsp(void *sap_device, uint8_t status)
{
	struct sap_server *server = sap_device;
	struct sap_connection *conn = server->conn;
	char buf[SAP_BUF_SIZE];
	struct sap_message *msg = (struct sap_message *) buf;
	struct sap_parameter *param = (struct sap_parameter *) msg->param;
	size_t size = sizeof(struct sap_message);

	if (!conn)
		return -EINVAL;

	DBG("state %d pr 0x%02x status 0x%02x", conn->state,
						conn->processing_req, status);

	if (conn->state != SAP_STATE_CONNECT_IN_PROGRESS)
		return -EPERM;

	memset(buf, 0, sizeof(buf));
	msg->id = SAP_CONNECT_RESP;
	msg->nparam = 0x01;

	/* Add connection status */
	param->id = SAP_PARAM_ID_CONN_STATUS;
	param->len = htons(SAP_PARAM_ID_CONN_STATUS_LEN);
	*param->val = status;
	size += PARAMETER_SIZE(SAP_PARAM_ID_CONN_STATUS_LEN);


	switch (status) {
	case SAP_STATUS_OK:
		sap_set_connected(server);
		break;
	case SAP_STATUS_OK_ONGOING_CALL:
		DBG("ongoing call. Wait for reset indication!");
		conn->state = SAP_STATE_CONNECT_MODEM_BUSY;
		break;
	case SAP_STATUS_MAX_MSG_SIZE_NOT_SUPPORTED: /* Add MaxMsgSize */
		msg->nparam++;
		param = (struct sap_parameter *) &buf[size];
		param->id = SAP_PARAM_ID_MAX_MSG_SIZE;
		param->len = htons(SAP_PARAM_ID_MAX_MSG_SIZE_LEN);
		put_be16(SAP_BUF_SIZE, &param->val);
		size += PARAMETER_SIZE(SAP_PARAM_ID_MAX_MSG_SIZE_LEN);

		/* fall */
	default:
		conn->state = SAP_STATE_DISCONNECTED;

		/* Timer will shutdown channel if client doesn't send
		 * CONNECT_REQ or doesn't shutdown channel itself.*/
		start_guard_timer(server, SAP_TIMER_NO_ACTIVITY);
		break;
	}

	conn->processing_req = SAP_NO_REQ;

	return send_message(conn, buf, size);
}
Esempio n. 17
0
static int gxf_write_media_preamble(AVFormatContext *s, AVPacket *pkt, int size)
{
    GXFContext *gxf = s->priv_data;
    ByteIOContext *pb = s->pb;
    AVStream *st = s->streams[pkt->stream_index];
    GXFStreamContext *sc = st->priv_data;
    unsigned field_nb;
    /* If the video is frame-encoded, the frame numbers shall be represented by
     * even field numbers.
     * see SMPTE360M-2004  6.4.2.1.3 Media field number */
    if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
        field_nb = gxf->nb_fields;
    } else {
        field_nb = av_rescale_rnd(pkt->dts, gxf->time_base.den,
                                  (int64_t)48000*gxf->time_base.num, AV_ROUND_UP);
    }

    put_byte(pb, sc->media_type);
    put_byte(pb, st->index);
    put_be32(pb, field_nb);
    if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
        put_be16(pb, 0);
        put_be16(pb, size / 2);
    } else if (st->codec->codec_id == CODEC_ID_MPEG2VIDEO) {
        int frame_type = gxf_parse_mpeg_frame(sc, pkt->data, pkt->size);
        if (frame_type == FF_I_TYPE) {
            put_byte(pb, 0x0d);
            sc->iframes++;
        } else if (frame_type == FF_B_TYPE) {
            put_byte(pb, 0x0f);
            sc->bframes++;
        } else {
            put_byte(pb, 0x0e);
            sc->pframes++;
        }
        put_be24(pb, size);
    } else if (st->codec->codec_id == CODEC_ID_DVVIDEO) {
        put_byte(pb, size / 4096);
        put_be24(pb, 0);
    } else
        put_be32(pb, size);
    put_be32(pb, field_nb);
    put_byte(pb, 1); /* flags */
    put_byte(pb, 0); /* reserved */
    return 16;
}
Esempio n. 18
0
File: rmenc.c Progetto: AndyA/ffmbc
static int rm_write_video(AVFormatContext *s, const uint8_t *buf, int size, int flags)
{
    RMMuxContext *rm = s->priv_data;
    ByteIOContext *pb = s->pb;
    StreamInfo *stream = rm->video_stream;
    int key_frame = !!(flags & AV_PKT_FLAG_KEY);

    /* XXX: this is incorrect: should be a parameter */

    /* Well, I spent some time finding the meaning of these bits. I am
       not sure I understood everything, but it works !! */
#if 1
    write_packet_header(s, stream, size + 7 + (size >= 0x4000)*4, key_frame);
    /* bit 7: '1' if final packet of a frame converted in several packets */
    put_byte(pb, 0x81);
    /* bit 7: '1' if I frame. bits 6..0 : sequence number in current
       frame starting from 1 */
    if (key_frame) {
        put_byte(pb, 0x81);
    } else {
        put_byte(pb, 0x01);
    }
    if(size >= 0x4000){
        put_be32(pb, size); /* total frame size */
        put_be32(pb, size); /* offset from the start or the end */
    }else{
        put_be16(pb, 0x4000 | size); /* total frame size */
        put_be16(pb, 0x4000 | size); /* offset from the start or the end */
    }
#else
    /* full frame */
    write_packet_header(s, size + 6);
    put_byte(pb, 0xc0);
    put_be16(pb, 0x4000 + size); /* total frame size */
    put_be16(pb, 0x4000 + packet_number * 126); /* position in stream */
#endif
    put_byte(pb, stream->nb_frames & 0xff);

    put_buffer(pb, buf, size);
    put_flush_packet(pb);

    stream->nb_frames++;
    return 0;
}
Esempio n. 19
0
static void put_amf_string( ByteIOContext *pb, const char *str )
{
    LogStr("Init");

    size_t len = strlen(str);
    put_be16(pb, len);
    put_buffer(pb, str, len);

    LogStr("Exit");
}
Esempio n. 20
0
File: rmenc.c Progetto: AndyA/ffmbc
static void write_packet_header(AVFormatContext *ctx, StreamInfo *stream,
                                int length, int key_frame)
{
    int timestamp;
    ByteIOContext *s = ctx->pb;

    stream->nb_packets++;
    stream->packet_total_size += length;
    if (length > stream->packet_max_size)
        stream->packet_max_size =  length;

    put_be16(s,0); /* version */
    put_be16(s,length + 12);
    put_be16(s, stream->num); /* stream number */
    timestamp = (1000 * (float)stream->nb_frames) / stream->frame_rate;
    put_be32(s, timestamp); /* timestamp */
    put_byte(s, 0); /* reserved */
    put_byte(s, key_frame ? 2 : 0); /* flags */
}
Esempio n. 21
0
static int64_t updateSize(ByteIOContext *pb, int64_t pos)
{
    int64_t curpos;

    curpos = url_ftell(pb);
    url_fseek(pb, pos, SEEK_SET);
    put_be16(pb, curpos - pos - 2);
    url_fseek(pb, curpos, SEEK_SET);
    return curpos - pos;
}
Esempio n. 22
0
static int gxf_write_track_description_section(ByteIOContext *pb, GXFContext *ctx)
{
    int64_t pos;
    int i;

    pos = url_ftell(pb);
    put_be16(pb, 0); /* size */
    for (i = 0; i < ctx->fc->nb_streams; ++i)
        gxf_write_track_description(pb, &ctx->streams[i]);
    return updateSize(pb, pos);
}
Esempio n. 23
0
 std::vector<uint8_t>
 H264Packetizer::configurationFromSpsAndPps()
 {
     std::vector<uint8_t> conf;
     
     put_byte(conf, 1); // version
     put_byte(conf, m_sps[1]); // profile
     put_byte(conf, m_sps[2]); // compat
     put_byte(conf, m_sps[3]); // level
     put_byte(conf, 0xff);   // 6 bits reserved + 2 bits nal size length - 1 (11)
     put_byte(conf, 0xe1);   // 3 bits reserved + 5 bits number of sps (00001)
     put_be16(conf, m_sps.size());
     put_buff(conf, &m_sps[0], m_sps.size());
     put_byte(conf, 1);
     put_be16(conf, m_pps.size());
     put_buff(conf, &m_pps[0], m_pps.size());
     
     return conf;
     
 }
Esempio n. 24
0
void CMp4Builder::put_VideoTrackBox(AM_UINT TrackId, AM_UINT Duration)
{
  PRINT_FUNCTION_NAME;
  //TrackBox
  put_be32(VideoTrackBox_SIZE);//uint32 size
  put_boxtype("trak");         //'trak'

  //TrackHeaderBox
  put_be32(TrackHeaderBox_SIZE);//uint32 size
  put_boxtype("tkhd");          //'tkhd'
  put_byte(0);                  //uint8 version
  //0x01:track_enabled, 0x02:track_in_movie, 0x04:track_in_preview
  put_be24(0x07);               //bits24 flags
  //uint32 creation_time [version==0] uint64 creation_time [version==1]
  put_be32(_create_time);
  //uint32 modification_time [version==0] uint64 modification_time [version==1]
  put_be32(_create_time);
  put_be32(TrackId);            //uint32 track_ID
  put_be32(0);                  //uint32 reserved
  //uint32 duration [version==0] uint64 duration [version==1]
  put_be32(Duration);
  put_be32(0);                  //uint32 reserved[2]
  put_be32(0);
  put_be16(0);                  //int16 layer
  put_be16(0);                  //int16 alternate_group
  put_be16(0x0000);             //int16 volume
  put_be16(0);                  //uint16 reserved
  put_be32(0x00010000);         //int32 matrix[9]
  put_be32(0);
  put_be32(0);
  put_be32(0);
  put_be32(0x00010000);
  put_be32(0);
  put_be32(0);
  put_be32(0);
  put_be32(0x40000000);
  put_be32(mH264Info.width<<16); //uint32 width  //16.16 fixed-point
  put_be32(mH264Info.height<<16);//uint32 height //16.16 fixed-point

  put_VideoMediaBox(Duration);
}
Esempio n. 25
0
 void
 RTMPSession::sendPong()
 {
     m_jobQueue.enqueue([&] {
         
         int streamId = 0;
         
         std::vector<uint8_t> buff;
         
         put_byte(buff, 2); // chunk stream ID 2
         put_be24(buff, 0); // ts
         put_be24(buff, 6); // size (6 bytes)
         put_byte(buff, RTMP_PT_PING); // chunk type
         
         put_buff(buff, (uint8_t*)&streamId, sizeof(int32_t)); // msg stream id is little-endian
         put_be16(buff, 7);
         put_be16(buff, 0);
         put_be16(buff, 0);
         
         write(&buff[0], buff.size());
     });
 }
Esempio n. 26
0
File: rsoenc.c Progetto: kaone3/vsmm
static int rso_write_header(AVFormatContext *s)
{
    ByteIOContext  *pb  = s->pb;
    AVCodecContext *enc = s->streams[0]->codec;

    if (!enc->codec_tag)
        return AVERROR_INVALIDDATA;

    if (enc->channels != 1) {
        av_log(s, AV_LOG_ERROR, "RSO only supports mono\n");
        return AVERROR_INVALIDDATA;
    }

    if (url_is_streamed(s->pb)) {
        av_log(s, AV_LOG_ERROR, "muxer does not support non seekable output\n");
        return AVERROR_INVALIDDATA;
    }

    /* XXX: find legal sample rates (if any) */
    if (enc->sample_rate >= 1u<<16) {
        av_log(s, AV_LOG_ERROR, "Sample rate must be < 65536\n");
        return AVERROR_INVALIDDATA;
    }

    if (enc->codec_id == CODEC_ID_ADPCM_IMA_WAV) {
        av_log(s, AV_LOG_ERROR, "ADPCM in RSO not implemented\n");
        return AVERROR_PATCHWELCOME;
    }

    /* format header */
    put_be16(pb, enc->codec_tag);   /* codec ID */
    put_be16(pb, 0);                /* data size, will be written at EOF */
    put_be16(pb, enc->sample_rate);
    put_be16(pb, 0x0000);           /* play mode ? (0x0000 = don't loop) */

    put_flush_packet(pb);

    return 0;
}
Esempio n. 27
0
void CMp4Builder::put_AudioMediaBox()
{
  PRINT_FUNCTION_NAME;
  //MediaBox
  put_be32(AudioMediaBox_SIZE);               //uint32 size
  put_boxtype("mdia");                        //'mdia'

  //MediaHeaderBox
  put_be32(MediaHeaderBox_SIZE);              //uint32 size
  put_boxtype("mdhd");                        //'mdhd'
  put_byte(0);                                //uint8 version
  put_be24(0);                                //bits24 flags
  //uint32 creation_time [version==0] uint64 creation_time [version==1]
  put_be32(_create_time);
  //uint32 modification_time [version==0] uint64 modification_time [version==1]
  put_be32(_create_time);
  //Audio's timescale is the same as Video, 90000
  put_be32(mH264Info.scale);                  //uint32 timescale
  //uint32 duration [version==0] uint64 duration [version==1]
  put_be32(mAudioDuration);
  put_be16(0);                  //bits5 language[3]  //ISO-639-2/T language code
  put_be16(0);                                //uint16 pre_defined

  //HandlerReferenceBox
  put_be32(AudioHandlerReferenceBox_SIZE);    //uint32 size
  put_boxtype("hdlr");	//'hdlr'
  put_byte(0);                                //uint8 version
  put_be24(0);                                //bits24 flags
  put_be32(0);                                //uint32 pre_defined
  put_boxtype("soun");	//'soun':audio track
  put_be32(0);                                //uint32 reserved[3]
  put_be32(0);
  put_be32(0);
  //char name[], name[0] is actual length
  put_byte(AUDIO_HANDLER_NAME_LEN);
  put_buffer((AM_U8 *)AUDIO_HANDLER_NAME, AUDIO_HANDLER_NAME_LEN-1);

  put_AudioMediaInformationBox();
}
Esempio n. 28
0
static int gxf_write_umf_media_description(ByteIOContext *pb, GXFContext *ctx)
{
    int64_t pos;
    int i;

    pos = url_ftell(pb);
    ctx->umf_media_offset = pos - ctx->umf_start_offset;
    for (i = 0; i < ctx->fc->nb_streams; ++i) {
        GXFStreamContext *sc = &ctx->streams[i];
        char buffer[88];
        int64_t startpos, curpos;
        int path_size = strlen(ES_NAME_PATTERN);

        memset(buffer, 0, 88);
        startpos = url_ftell(pb);
        put_le16(pb, 0); /* length */
        put_le16(pb, sc->media_info);
        put_le16(pb, 0); /* reserved */
        put_le16(pb, 0); /* reserved */
        put_le32(pb, ctx->nb_frames);
        put_le32(pb, 0); /* attributes rw, ro */
        put_le32(pb, 0); /* mark in */
        put_le32(pb, ctx->nb_frames); /* mark out */
        strncpy(buffer, ES_NAME_PATTERN, path_size);
        put_buffer(pb, (uint8_t *)buffer, path_size);
        put_be16(pb, sc->media_info);
        put_buffer(pb, (uint8_t *)buffer + path_size + 2, 88 - path_size - 2);
        put_le32(pb, sc->track_type);
        put_le32(pb, sc->sample_rate);
        put_le32(pb, sc->sample_size);
        put_le32(pb, 0); /* reserved */
        switch (sc->codec->codec_id) {
        case CODEC_ID_MPEG2VIDEO:
            gxf_write_umf_media_mpeg(pb, sc);
            break;
        case CODEC_ID_PCM_S16LE:
            gxf_write_umf_media_audio(pb, sc);
            break;
        case CODEC_ID_DVVIDEO:
            gxf_write_umf_media_dv(pb, sc);
            break;
        default:
            gxf_write_umf_media_timecode(pb, sc); /* 8 0bytes */
        }
        curpos = url_ftell(pb);
        url_fseek(pb, startpos, SEEK_SET);
        put_le16(pb, curpos - startpos);
        url_fseek(pb, curpos, SEEK_SET);
    }
    return url_ftell(pb) - pos;
}
Esempio n. 29
0
void CMp4Builder::put_VideoMediaBox(AM_UINT Duration)
{
  PRINT_FUNCTION_NAME;
  INFO("Video duration is %lu", mVideoDuration);
  //MediaBox
  put_be32(VideoMediaBox_SIZE); //uint32 size
  put_boxtype("mdia");          //'mdia'

  //MediaHeaderBox
  put_be32(MediaHeaderBox_SIZE); //uint32 size
  put_boxtype("mdhd");           //'mdhd'
  put_byte(0);                   //uint8 version
  put_be24(0);                   //bits24 flags
  //uint32 creation_time [version==0] uint64 creation_time [version==1]
  put_be32(_create_time);
  //uint32 modification_time [version==0] uint64 modification_time [version==1]
  put_be32(_create_time);
  put_be32(mH264Info.scale);     //uint32 timescale
  //uint32 duration [version==0] uint64 duration [version==1]
  put_be32(Duration);
  put_be16(0);                  //bits5 language[3]  //ISO-639-2/T language code
  put_be16(0);                  //uint16 pre_defined

  //HandlerReferenceBox
  put_be32(VideoHandlerReferenceBox_SIZE); //uint32 size
  put_boxtype("hdlr");                     //'hdlr'
  put_byte(0);                             //uint8 version
  put_be24(0);                             //bits24 flags
  put_be32(0);                             //uint32 pre_defined
  put_boxtype("vide");                     //'vide'
  put_be32(0);                             //uint32 reserved[3]
  put_be32(0);
  put_be32(0);
  put_byte(VIDEO_HANDLER_NAME_LEN);   //char name[], name[0] is actual length
  put_buffer((AM_U8 *)VIDEO_HANDLER_NAME, VIDEO_HANDLER_NAME_LEN-1);

  put_VideoMediaInformationBox();
}
Esempio n. 30
0
/**
 * Configures the device to advertise iBeacons.
 *
 * @param uuid                  The 128-bit UUID to advertise.
 * @param major                 The major version number to include in
 *                                  iBeacons.
 * @param minor                 The minor version number to include in
 *                                  iBeacons.
 *
 * @return                      0 on success;
 *                              BLE_HS_EBUSY if advertising is in progress;
 *                              Other nonzero on failure.
 */
int
ble_ibeacon_set_adv_data(void *uuid128, uint16_t major, uint16_t minor)
{
    struct ble_hs_adv_fields fields;
    uint8_t buf[BLE_IBEACON_MFG_DATA_SIZE];
    int8_t tx_pwr;
    int rc;

    /** Company identifier (Apple). */
    buf[0] = 0x4c;
    buf[1] = 0x00;

    /** iBeacon indicator. */
    buf[2] = 0x02;
    buf[3] = 0x15;

    /** UUID. */
    memcpy(buf + 4, uuid128, 16);

    /** Version number. */
    put_be16(buf + 20, major);
    put_be16(buf + 22, minor);

    /** Last byte (tx power level) filled in after HCI exchange. */

    rc = ble_hs_hci_util_read_adv_tx_pwr(&tx_pwr);
    if (rc != 0) {
        return rc;
    }
    buf[24] = tx_pwr;

    memset(&fields, 0, sizeof fields);
    fields.mfg_data = buf;
    fields.mfg_data_len = sizeof buf;

    rc = ble_gap_adv_set_fields(&fields);
    return rc;
}