Exemple #1
0
int ff_mov_add_hinted_packet(AVFormatContext *s, AVPacket *pkt,
                             int track_index, int sample)
{
    MOVMuxContext *mov = s->priv_data;
    MOVTrack *trk = &mov->tracks[track_index];
    AVFormatContext *rtp_ctx = trk->rtp_ctx;
    uint8_t *buf = NULL;
    int size;
    AVIOContext *hintbuf = NULL;
    AVPacket hint_pkt;
    int ret = 0, count;

    if (!rtp_ctx)
        return AVERROR(ENOENT);
    if (!rtp_ctx->pb)
        return AVERROR(ENOMEM);

    sample_queue_push(&trk->sample_queue, pkt, sample);

    /* Feed the packet to the RTP muxer */
    ff_write_chained(rtp_ctx, 0, pkt, s);

    /* Fetch the output from the RTP muxer, open a new output buffer
     * for next time. */
    size = url_close_dyn_buf(rtp_ctx->pb, &buf);
    if ((ret = url_open_dyn_packet_buf(&rtp_ctx->pb,
                                       RTP_MAX_PACKET_SIZE)) < 0)
        goto done;

    if (size <= 0)
        goto done;

    /* Open a buffer for writing the hint */
    if ((ret = url_open_dyn_buf(&hintbuf)) < 0)
        goto done;
    av_init_packet(&hint_pkt);
    count = write_hint_packets(hintbuf, buf, size, trk, &hint_pkt.dts);
    av_freep(&buf);

    /* Write the hint data into the hint track */
    hint_pkt.size = size = url_close_dyn_buf(hintbuf, &buf);
    hint_pkt.data = buf;
    hint_pkt.pts  = hint_pkt.dts;
    hint_pkt.stream_index = track_index;
    if (pkt->flags & AV_PKT_FLAG_KEY)
        hint_pkt.flags |= AV_PKT_FLAG_KEY;
    if (count > 0)
        ff_mov_write_packet(s, &hint_pkt);
done:
    av_free(buf);
    sample_queue_retain(&trk->sample_queue);
    return ret;
}
Exemple #2
0
static int tcp_write_packet(AVFormatContext *s, RTSPStream *rtsp_st)
{
    RTSPState *rt = s->priv_data;
    AVFormatContext *rtpctx = rtsp_st->transport_priv;
    uint8_t *buf, *ptr;
    int size;
    uint8_t interleave_header[4];

    size = url_close_dyn_buf(rtpctx->pb, &buf);
    ptr = buf;
    while (size > 4) {
        uint32_t packet_len = AV_RB32(ptr);
        int id;
        ptr += 4;
        size -= 4;
        if (packet_len > size || packet_len < 2)
            break;
        if (ptr[1] >= 200 && ptr[1] <= 204)
            id = rtsp_st->interleaved_max; /* RTCP */
        else
            id = rtsp_st->interleaved_min; /* RTP */
        interleave_header[0] = '$';
        interleave_header[1] = id;
        AV_WB16(interleave_header + 2, packet_len);
        url_write(rt->rtsp_hd, interleave_header, 4);
        url_write(rt->rtsp_hd, ptr, packet_len);
        ptr += packet_len;
        size -= packet_len;
    }
    av_free(buf);
    url_open_dyn_packet_buf(&rtpctx->pb, RTSP_TCP_MAX_PACKET_SIZE);
    return 0;
}
Exemple #3
0
AVFormatContext *ff_rtp_chain_mux_open(AVFormatContext *s, AVStream *st,
                                       URLContext *handle, int packet_size)
{
    AVFormatContext *rtpctx;
    int ret;
    AVOutputFormat *rtp_format = av_guess_format("rtp", NULL, NULL);

    if (!rtp_format)
        return NULL;

    /* Allocate an AVFormatContext for each output stream */
    rtpctx = avformat_alloc_context();
    if (!rtpctx)
        return NULL;

    rtpctx->oformat = rtp_format;
    if (!av_new_stream(rtpctx, 0)) {
        av_free(rtpctx);
        return NULL;
    }
    /* Copy the max delay setting; the rtp muxer reads this. */
    rtpctx->max_delay = s->max_delay;
    /* Copy other stream parameters. */
    rtpctx->streams[0]->sample_aspect_ratio = st->sample_aspect_ratio;

    /* Set the synchronized start time. */
    rtpctx->start_time_realtime = s->start_time_realtime;

    /* Remove the local codec, link to the original codec
     * context instead, to give the rtp muxer access to
     * codec parameters. */
    av_free(rtpctx->streams[0]->codec);
    rtpctx->streams[0]->codec = st->codec;

    if (handle) {
        url_fdopen(&rtpctx->pb, handle);
    } else
        url_open_dyn_packet_buf(&rtpctx->pb, packet_size);
    ret = av_write_header(rtpctx);

    if (ret) {
        if (handle) {
            url_fclose(rtpctx->pb);
        } else {
            uint8_t *ptr;
            url_close_dyn_buf(rtpctx->pb, &ptr);
            av_free(ptr);
        }
        av_free(rtpctx->streams[0]);
        av_free(rtpctx);
        return NULL;
    }

    /* Copy the RTP AVStream timebase back to the original AVStream */
    st->time_base = rtpctx->streams[0]->time_base;
    return rtpctx;
}
Exemple #4
0
static void svq3_extradata_free(PayloadContext *sv)
{
    if (sv->pktbuf) {
        uint8_t *buf;
        url_close_dyn_buf(sv->pktbuf, &buf);
        av_free(buf);
    }
    av_free(sv);
}
Exemple #5
0
static void prepare_packet(AVPacket *pkt, PayloadContext *vp8, int stream)
{
    av_init_packet(pkt);
    pkt->stream_index = stream;
    pkt->flags        = vp8->is_keyframe ? AV_PKT_FLAG_KEY : 0;
    pkt->size         = url_close_dyn_buf(vp8->data, &pkt->data);
    pkt->destruct     = av_destruct_packet;
    vp8->data         = NULL;
}
Exemple #6
0
static void vp8_free_context(PayloadContext *vp8)
{
    if (vp8->data) {
        uint8_t *tmp;
        url_close_dyn_buf(vp8->data, &tmp);
        av_free(tmp);
    }
    av_free(vp8);
}
Exemple #7
0
static void asfrtp_free_context(PayloadContext *asf)
{
    if (asf->pktbuf) {
        uint8_t *p = NULL;
        url_close_dyn_buf(asf->pktbuf, &p);
        asf->pktbuf = NULL;
        av_free(p);
    }
    av_freep(&asf->buf);
    av_free(asf);
}
Exemple #8
0
int ff_avc_parse_nal_units_buf(const uint8_t *buf_in, uint8_t **buf, int *size)
{
    ByteIOContext *pb;
    int ret = url_open_dyn_buf(&pb);
    if(ret < 0)
        return ret;

    ff_avc_parse_nal_units(pb, buf_in, *size);

    av_freep(buf);
    *size = url_close_dyn_buf(pb, buf);
    return 0;
}
Exemple #9
0
static void put_str16(ByteIOContext *s, const char *tag)
{
    int len;
    uint8_t *pb;
    ByteIOContext *dyn_buf;
    if (url_open_dyn_buf(&dyn_buf) < 0)
        return;

    avio_put_str16le(dyn_buf, tag);
    len = url_close_dyn_buf(dyn_buf, &pb);
    put_le16(s, len);
    put_buffer(s, pb, len);
    av_freep(&pb);
}
Exemple #10
0
void rtp_send_punch_packets(URLContext* rtp_handle)
{
    AVIOContext *pb;
    uint8_t *buf;
    int len;

    /* Send a small RTP packet */
    if (url_open_dyn_buf(&pb) < 0)
        return;

    avio_w8(pb, (RTP_VERSION << 6));
    avio_w8(pb, 0); /* Payload type */
    avio_wb16(pb, 0); /* Seq */
    avio_wb32(pb, 0); /* Timestamp */
    avio_wb32(pb, 0); /* SSRC */

    avio_flush(pb);
    len = url_close_dyn_buf(pb, &buf);
    if ((len > 0) && buf)
        url_write(rtp_handle, buf, len);
    av_free(buf);

    /* Send a minimal RTCP RR */
    if (url_open_dyn_buf(&pb) < 0)
        return;

    avio_w8(pb, (RTP_VERSION << 6));
    avio_w8(pb, RTCP_RR); /* receiver report */
    avio_wb16(pb, 1); /* length in words - 1 */
    avio_wb32(pb, 0); /* our own SSRC */

    avio_flush(pb);
    len = url_close_dyn_buf(pb, &buf);
    if ((len > 0) && buf)
        url_write(rtp_handle, buf, len);
    av_free(buf);
}
Exemple #11
0
void ff_mov_close_hinting(MOVTrack *track) {
    AVFormatContext* rtp_ctx = track->rtp_ctx;
    uint8_t *ptr;

    av_freep(&track->enc);
    sample_queue_free(&track->sample_queue);
    if (!rtp_ctx)
        return;
    if (rtp_ctx->pb) {
        av_write_trailer(rtp_ctx);
        url_close_dyn_buf(rtp_ctx->pb, &ptr);
        av_free(ptr);
    }
    avformat_free_context(rtp_ctx);
}
Exemple #12
0
int ff_avc_parse_nal_units(const uint8_t *buf_in, uint8_t **buf, int *size)
{
    ByteIOContext pb;
    const uint8_t *p = buf_in;
    const uint8_t *end = p + *size;
    const uint8_t *nal_start, *nal_end;
    int ret = url_open_dyn_buf(&pb);
    if(ret < 0)
        return ret;

    nal_start = ff_avc_find_startcode(p, end);
    while (nal_start < end) {
        while(!*(nal_start++));
        nal_end = ff_avc_find_startcode(nal_start, end);
        put_be32(&pb, nal_end - nal_start);
        put_buffer(&pb, nal_start, nal_end - nal_start);
        nal_start = nal_end;
    }
    av_freep(buf);
    *size = url_close_dyn_buf(&pb, buf);
    return 0;
}
Exemple #13
0
static int tcp_write_packet(AVFormatContext *s, RTSPStream *rtsp_st)
{
    RTSPState *rt = s->priv_data;
    AVFormatContext *rtpctx = rtsp_st->transport_priv;
    uint8_t *buf, *ptr;
    int size;
    uint8_t *interleave_header, *interleaved_packet;

    size = url_close_dyn_buf(rtpctx->pb, &buf);
    ptr = buf;
    while (size > 4) {
        uint32_t packet_len = AV_RB32(ptr);
        int id;
        /* The interleaving header is exactly 4 bytes, which happens to be
         * the same size as the packet length header from
         * url_open_dyn_packet_buf. So by writing the interleaving header
         * over these bytes, we get a consecutive interleaved packet
         * that can be written in one call. */
        interleaved_packet = interleave_header = ptr;
        ptr += 4;
        size -= 4;
        if (packet_len > size || packet_len < 2)
            break;
        if (ptr[1] >= RTCP_SR && ptr[1] <= RTCP_APP)
            id = rtsp_st->interleaved_max; /* RTCP */
        else
            id = rtsp_st->interleaved_min; /* RTP */
        interleave_header[0] = '$';
        interleave_header[1] = id;
        AV_WB16(interleave_header + 2, packet_len);
        url_write(rt->rtsp_hd_out, interleaved_packet, 4 + packet_len);
        ptr += packet_len;
        size -= packet_len;
    }
    av_free(buf);
    url_open_dyn_packet_buf(&rtpctx->pb, RTSP_TCP_MAX_PACKET_SIZE);
    return 0;
}
Exemple #14
0
static int ogg_write_page(AVFormatContext *s, OGGPage *page, int extra_flags)
{
    OGGStreamContext *oggstream = s->streams[page->stream_index]->priv_data;
    ByteIOContext *pb;
    int64_t crc_offset;
    int ret, size;
    uint8_t *buf;

    ret = url_open_dyn_buf(&pb);
    if (ret < 0)
        return ret;
    init_checksum(pb, ff_crc04C11DB7_update, 0);
    put_tag(pb, "OggS");
    put_byte(pb, 0);
    put_byte(pb, page->flags | extra_flags);
    put_le64(pb, page->granule);
    put_le32(pb, oggstream->serial_num);
    put_le32(pb, oggstream->page_counter++);
    crc_offset = url_ftell(pb);
    put_le32(pb, 0); // crc
    put_byte(pb, page->segments_count);
    put_buffer(pb, page->segments, page->segments_count);
    put_buffer(pb, page->data, page->size);

    ogg_update_checksum(s, pb, crc_offset);
    put_flush_packet(pb);

    size = url_close_dyn_buf(pb, &buf);
    if (size < 0)
        return size;

    put_buffer(s->pb, buf, size);
    put_flush_packet(s->pb);
    av_free(buf);
    oggstream->page_count--;
    return 0;
}
Exemple #15
0
int rtp_check_and_send_back_rr(RTPDemuxContext *s, int count)
{
    AVIOContext *pb;
    uint8_t *buf;
    int len;
    int rtcp_bytes;
    RTPStatistics *stats= &s->statistics;
    uint32_t lost;
    uint32_t extended_max;
    uint32_t expected_interval;
    uint32_t received_interval;
    uint32_t lost_interval;
    uint32_t expected;
    uint32_t fraction;
    uint64_t ntp_time= s->last_rtcp_ntp_time; // TODO: Get local ntp time?

    if (!s->rtp_ctx || (count < 1))
        return -1;

    /* TODO: I think this is way too often; RFC 1889 has algorithm for this */
    /* XXX: mpeg pts hardcoded. RTCP send every 0.5 seconds */
    s->octet_count += count;
    rtcp_bytes = ((s->octet_count - s->last_octet_count) * RTCP_TX_RATIO_NUM) /
        RTCP_TX_RATIO_DEN;
    rtcp_bytes /= 50; // mmu_man: that's enough for me... VLC sends much less btw !?
    if (rtcp_bytes < 28)
        return -1;
    s->last_octet_count = s->octet_count;

    if (url_open_dyn_buf(&pb) < 0)
        return -1;

    // Receiver Report
    avio_w8(pb, (RTP_VERSION << 6) + 1); /* 1 report block */
    avio_w8(pb, RTCP_RR);
    avio_wb16(pb, 7); /* length in words - 1 */
    // our own SSRC: we use the server's SSRC + 1 to avoid conflicts
    avio_wb32(pb, s->ssrc + 1);
    avio_wb32(pb, s->ssrc); // server SSRC
    // some placeholders we should really fill...
    // RFC 1889/p64
    extended_max= stats->cycles + stats->max_seq;
    expected= extended_max - stats->base_seq + 1;
    lost= expected - stats->received;
    lost= FFMIN(lost, 0xffffff); // clamp it since it's only 24 bits...
    expected_interval= expected - stats->expected_prior;
    stats->expected_prior= expected;
    received_interval= stats->received - stats->received_prior;
    stats->received_prior= stats->received;
    lost_interval= expected_interval - received_interval;
    if (expected_interval==0 || lost_interval<=0) fraction= 0;
    else fraction = (lost_interval<<8)/expected_interval;

    fraction= (fraction<<24) | lost;

    avio_wb32(pb, fraction); /* 8 bits of fraction, 24 bits of total packets lost */
    avio_wb32(pb, extended_max); /* max sequence received */
    avio_wb32(pb, stats->jitter>>4); /* jitter */

    if(s->last_rtcp_ntp_time==AV_NOPTS_VALUE)
    {
        avio_wb32(pb, 0); /* last SR timestamp */
        avio_wb32(pb, 0); /* delay since last SR */
    } else {
        uint32_t middle_32_bits= s->last_rtcp_ntp_time>>16; // this is valid, right? do we need to handle 64 bit values special?
        uint32_t delay_since_last= ntp_time - s->last_rtcp_ntp_time;

        avio_wb32(pb, middle_32_bits); /* last SR timestamp */
        avio_wb32(pb, delay_since_last); /* delay since last SR */
    }

    // CNAME
    avio_w8(pb, (RTP_VERSION << 6) + 1); /* 1 report block */
    avio_w8(pb, RTCP_SDES);
    len = strlen(s->hostname);
    avio_wb16(pb, (6 + len + 3) / 4); /* length in words - 1 */
    avio_wb32(pb, s->ssrc);
    avio_w8(pb, 0x01);
    avio_w8(pb, len);
    avio_write(pb, s->hostname, len);
    // padding
    for (len = (6 + len) % 4; len % 4; len++) {
        avio_w8(pb, 0);
    }

    avio_flush(pb);
    len = url_close_dyn_buf(pb, &buf);
    if ((len > 0) && buf) {
        int result;
        av_dlog(s->ic, "sending %d bytes of RR\n", len);
        result= url_write(s->rtp_ctx, buf, len);
        av_dlog(s->ic, "result from url_write: %d\n", result);
        av_free(buf);
    }
    return 0;
}
Exemple #16
0
/** return 0 on packet, <0 on partial packet or error... */
static int svq3_parse_packet (AVFormatContext *s, PayloadContext *sv,
                              AVStream *st, AVPacket *pkt,
                              uint32_t *timestamp,
                              const uint8_t *buf, int len, int flags)
{
    int config_packet, start_packet, end_packet;

    if (len < 2)
        return AVERROR_INVALIDDATA;

    config_packet = buf[0] & 0x40;
    start_packet  = buf[0] & 0x20;
    end_packet    = buf[0] & 0x10;
    buf += 2;     // ignore buf[1]
    len -= 2;

    if (config_packet) {

        av_freep(&st->codec->extradata);
        st->codec->extradata_size = 0;

        if (len < 2 || !(st->codec->extradata =
                         av_malloc(len + 8 + FF_INPUT_BUFFER_PADDING_SIZE)))
            return AVERROR_INVALIDDATA;

        st->codec->extradata_size = len + 8;
        memcpy(st->codec->extradata, "SEQH", 4);
        AV_WB32(st->codec->extradata + 4, len);
        memcpy(st->codec->extradata + 8, buf, len);

        /* We set codec_id to CODEC_ID_NONE initially to
         * delay decoder initialization since extradata is
         * carried within the RTP stream, not SDP. Here,
         * by setting codec_id to CODEC_ID_SVQ3, we are signalling
         * to the decoder that it is OK to initialize. */
        st->codec->codec_id = CODEC_ID_SVQ3;

        return AVERROR(EAGAIN);
    }

    if (start_packet) {
        int res;

        if (sv->pktbuf) {
            uint8_t *tmp;
            url_close_dyn_buf(sv->pktbuf, &tmp);
            av_free(tmp);
        }
        if ((res = url_open_dyn_buf(&sv->pktbuf)) < 0)
            return res;
        sv->timestamp   = *timestamp;
    }

    if (!sv->pktbuf)
        return AVERROR_INVALIDDATA;

    put_buffer(sv->pktbuf, buf, len);

    if (end_packet) {
        av_init_packet(pkt);
        pkt->stream_index = st->index;
        *timestamp        = sv->timestamp;
        pkt->size         = url_close_dyn_buf(sv->pktbuf, &pkt->data);
        pkt->destruct     = av_destruct_packet;
        sv->pktbuf        = NULL;
        return 0;
    }

    return AVERROR(EAGAIN);
}
Exemple #17
0
/**
 * @return 0 when a packet was written into /p pkt, and no more data is left;
 *         1 when a packet was written into /p pkt, and more packets might be left;
 *        <0 when not enough data was provided to return a full packet, or on error.
 */
static int asfrtp_parse_packet(AVFormatContext *s, PayloadContext *asf,
                               AVStream *st, AVPacket *pkt,
                               uint32_t *timestamp,
                               const uint8_t *buf, int len, int flags)
{
    ByteIOContext *pb = &asf->pb;
    int res, mflags, len_off;
    RTSPState *rt = s->priv_data;

    if (!rt->asf_ctx)
        return -1;

    if (len > 0) {
        int off, out_len = 0;

        if (len < 4)
            return -1;

        av_freep(&asf->buf);

        init_put_byte(pb, buf, len, 0, NULL, NULL, NULL, NULL);

        while (url_ftell(pb) + 4 < len) {
            int start_off = url_ftell(pb);

            mflags = get_byte(pb);
            if (mflags & 0x80)
                flags |= RTP_FLAG_KEY;
            len_off = get_be24(pb);
            if (mflags & 0x20)   /**< relative timestamp */
                url_fskip(pb, 4);
            if (mflags & 0x10)   /**< has duration */
                url_fskip(pb, 4);
            if (mflags & 0x8)    /**< has location ID */
                url_fskip(pb, 4);
            off = url_ftell(pb);

            if (!(mflags & 0x40)) {
                /**
                 * If 0x40 is not set, the len_off field specifies an offset
                 * of this packet's payload data in the complete (reassembled)
                 * ASF packet. This is used to spread one ASF packet over
                 * multiple RTP packets.
                 */
                if (asf->pktbuf && len_off != url_ftell(asf->pktbuf)) {
                    uint8_t *p;
                    url_close_dyn_buf(asf->pktbuf, &p);
                    asf->pktbuf = NULL;
                    av_free(p);
                }
                if (!len_off && !asf->pktbuf &&
                    (res = url_open_dyn_buf(&asf->pktbuf)) < 0)
                    return res;
                if (!asf->pktbuf)
                    return AVERROR(EIO);

                put_buffer(asf->pktbuf, buf + off, len - off);
                url_fskip(pb, len - off);
                if (!(flags & RTP_FLAG_MARKER))
                    return -1;
                out_len     = url_close_dyn_buf(asf->pktbuf, &asf->buf);
                asf->pktbuf = NULL;
            } else {
                /**
                 * If 0x40 is set, the len_off field specifies the length of
                 * the next ASF packet that can be read from this payload
                 * data alone. This is commonly the same as the payload size,
                 * but could be less in case of packet splitting (i.e.
                 * multiple ASF packets in one RTP packet).
                 */

                int cur_len = start_off + len_off - off;
                int prev_len = out_len;
                out_len += cur_len;
                asf->buf = av_realloc(asf->buf, out_len);
                memcpy(asf->buf + prev_len, buf + off,
                       FFMIN(cur_len, len - off));
                url_fskip(pb, cur_len);
            }
        }

        init_packetizer(pb, asf->buf, out_len);
        pb->pos += rt->asf_pb_pos;
        pb->eof_reached = 0;
        rt->asf_ctx->pb = pb;
    }

    for (;;) {
        int i;

        res = av_read_packet(rt->asf_ctx, pkt);
        rt->asf_pb_pos = url_ftell(pb);
        if (res != 0)
            break;
        for (i = 0; i < s->nb_streams; i++) {
            if (s->streams[i]->id == rt->asf_ctx->streams[pkt->stream_index]->id) {
                pkt->stream_index = i;
                return 1; // FIXME: return 0 if last packet
            }
        }
        av_free_packet(pkt);
    }

    return res == 1 ? -1 : res;
}
/* XXX: This class should really pass the picture parameters by a separate API
 * so that we can, by contract, enforce that the frame size can't suddenly
 * change on us. */
void Java_org_devtcg_rojocam_ffmpeg_RtpOutputContext_nativeWriteFrame(JNIEnv *env,
        jclass clazz, jint nativeInt, jbyteArray data, jlong frameTime,
        jint frameFormat, jint frameWidth, jint frameHeight,
        jint frameBitsPerPixel) {
    RtpOutputContext *rtpContext = (RtpOutputContext *)nativeInt;
    AVFormatContext *avContext;
    AVCodecContext *codec;
    AVStream *outputStream;
    AVPacket pkt;
    jbyte *data_c;
    int max_packet_size;
    uint8_t *rtp_data;
    int rtp_data_len;

    avContext = rtpContext->avContext;
    outputStream = avContext->streams[0];
    codec = outputStream->codec;

    if (rtpContext->tempFrame == NULL) {
        if (!first_frame_init(env, rtpContext, frameFormat,
                frameWidth, frameHeight)) {
            LOGE("Error initializing encoding buffers, cannot stream");
            return;
        }
    }

    data_c = (*env)->GetByteArrayElements(env, data, NULL);

    /* Convert the input arguments to an AVPacket, simulating it as though we
     * read this from the ffmpeg libraries but there was no need to do this as
     * it was passed into us already as a raw video frame. */
    int frameDuration = frameTime - rtpContext->lastFrameTime;
    bool frameEncoded = encode_video_frame(rtpContext,
            outputStream, rtpContext->tempFrame, rtpContext->imgConvert,
            rtpContext->tempEncodedBuf, sizeof(rtpContext->tempEncodedBuf),
            data_c, frameTime, frameDuration, frameFormat,
            frameWidth, frameHeight, frameBitsPerPixel, &pkt);
    rtpContext->lastFrameTime = frameTime;

    (*env)->ReleaseByteArrayElements(env, data, data_c, JNI_ABORT);

    if (frameEncoded) {
#if PROFILE_WRITE_FRAME
        struct timeval then;
        gettimeofday(&then, NULL);
#endif

        max_packet_size = url_get_max_packet_size(rtpContext->urlContext);
        url_open_dyn_packet_buf(&avContext->pb, max_packet_size);

        avContext->pb->seekable = 0;

        /* This organizes our encoded packet into RTP packet segments (but it
         * doesn't actually send anything over the network yet). */
        if (av_write_frame(avContext, &pkt) < 0) {
            jniThrowException(env, "java/io/IOException", "Error writing frame to output");
        }

        /* Actually deliver the packetized RTP data to the remote peer. */
        rtp_data_len = url_close_dyn_buf(avContext->pb, &rtp_data);
        exhaustive_send(rtpContext->urlContext, rtp_data, rtp_data_len);
        av_free(rtp_data);

        /* XXX: I dunno, ffserver.c does this... */
        outputStream->codec->frame_number++;

#if PROFILE_WRITE_FRAME
        store_elapsed(&rtpContext->write_time, &then);
#endif
    } else {
#if PROFILE_WRITE_TIME
        rtpContext->write_time = 0;
#endif
    }

#if PROFILE_WRITE_FRAME
    //LOGI("resample@%ld ms; encode@%ld ms; write@%ld ms",
     //       rtpContext->resampling_time, rtpContext->encoding_time,
      //      rtpContext->write_time);
#endif
}
jint Java_org_devtcg_rojocam_ffmpeg_RtpOutputContext_nativeCreate(JNIEnv *env,
        jclass clazz, jint streamConfigNativeInt, jlong nowNanoTime,
        jstring hostAddress, jint rtpPort) {
    FFStreamConfig *config = (FFStreamConfig *)streamConfigNativeInt;
    RtpOutputContext *rtpContext = NULL;
    AVFormatContext *avContext = NULL;
    AVStream *st = NULL;
    uint8_t *dummy_buf;
    int max_packet_size;

    rtpContext = av_mallocz(sizeof(RtpOutputContext));
    if (rtpContext == NULL) {
        jniThrowOOM(env);
        goto cleanup;
    }

    rtpContext->lastFrameTime = nowNanoTime;
    rtpContext->config = config;

    avContext = avformat_alloc_context();
    if (avContext == NULL) {
        jniThrowOOM(env);
        goto cleanup;
    }
    avContext->oformat = av_guess_format("rtp", NULL, NULL);
    if (avContext->oformat == NULL) {
        jniThrowException(env, "java/lang/IllegalStateException",
                "rtp avformat is not available");
        goto cleanup;
    }

    rtpContext->avContext = avContext;

    st = av_mallocz(sizeof(AVStream));
    if (st == NULL) {
        jniThrowOOM(env);
        goto cleanup;
    }
    avContext->nb_streams = 1;
    avContext->streams = av_malloc(avContext->nb_streams * sizeof(*avContext->streams));
    avContext->streams[0] = st;

    /* XXX: What would we be doing if we supported audio as well? */
    memcpy(st, config->streams[0], sizeof(AVStream));
    st->priv_data = NULL;

    const jbyte *hostAddress_str = (*env)->GetStringUTFChars(env,
            hostAddress, NULL);
    snprintf(avContext->filename, sizeof(avContext->filename),
            "rtp://%s:%d?localrtpport=5000&localrtcpport=5001",
            hostAddress_str, rtpPort);
    (*env)->ReleaseStringUTFChars(env, hostAddress, hostAddress_str);

    if (url_open(&rtpContext->urlContext,
            avContext->filename, URL_WRONLY) < 0) {
        LOGE("Cannot open url context for filename=%s", avContext->filename);
        jniThrowException(env, "java/io/IOException", "Unable to open URL");
        goto cleanup;
    }

    max_packet_size = url_get_max_packet_size(rtpContext->urlContext);

    /* XXX: No idea what purpose this serves... */
    url_open_dyn_packet_buf(&avContext->pb, max_packet_size);

    av_set_parameters(avContext, NULL);
    if (av_write_header(avContext) < 0) {
        jniThrowException(env, "java/io/IOException", "Unexpected error writing dummy RTP header");
        goto cleanup;
    }

    url_close_dyn_buf(avContext->pb, &dummy_buf);
    av_free(dummy_buf);

    return (jint)rtpContext;

cleanup:
    rtp_output_context_free(rtpContext);
    assert((*env)->ExceptionOccurred(env));

    return 0;
}
Exemple #20
0
/* write the header (used two times if non streamed) */
static int asf_write_header1(AVFormatContext *s, int64_t file_size, int64_t data_chunk_size)
{
    ASFContext *asf = s->priv_data;
    ByteIOContext *pb = s->pb;
    AVMetadataTag *tags[5];
    int header_size, n, extra_size, extra_size2, wav_extra_size, file_time;
    int has_title;
    int metadata_count;
    AVCodecContext *enc;
    int64_t header_offset, cur_pos, hpos;
    int bit_rate;
    int64_t duration;

    ff_metadata_conv(&s->metadata, ff_asf_metadata_conv, NULL);

    tags[0] = av_metadata_get(s->metadata, "title"    , NULL, 0);
    tags[1] = av_metadata_get(s->metadata, "author"   , NULL, 0);
    tags[2] = av_metadata_get(s->metadata, "copyright", NULL, 0);
    tags[3] = av_metadata_get(s->metadata, "comment"  , NULL, 0);
    tags[4] = av_metadata_get(s->metadata, "rating"   , NULL, 0);

    duration = asf->duration + PREROLL_TIME * 10000;
    has_title = tags[0] || tags[1] || tags[2] || tags[3] || tags[4];
    metadata_count = s->metadata ? s->metadata->count : 0;

    bit_rate = 0;
    for(n=0;n<s->nb_streams;n++) {
        enc = s->streams[n]->codec;

        av_set_pts_info(s->streams[n], 32, 1, 1000); /* 32 bit pts in ms */

        bit_rate += enc->bit_rate;
    }

    if (asf->is_streamed) {
        put_chunk(s, 0x4824, 0, 0xc00); /* start of stream (length will be patched later) */
    }

    put_guid(pb, &ff_asf_header);
    put_le64(pb, -1); /* header length, will be patched after */
    put_le32(pb, 3 + has_title + !!metadata_count + s->nb_streams); /* number of chunks in header */
    put_byte(pb, 1); /* ??? */
    put_byte(pb, 2); /* ??? */

    /* file header */
    header_offset = url_ftell(pb);
    hpos = put_header(pb, &ff_asf_file_header);
    put_guid(pb, &ff_asf_my_guid);
    put_le64(pb, file_size);
    file_time = 0;
    put_le64(pb, unix_to_file_time(file_time));
    put_le64(pb, asf->nb_packets); /* number of packets */
    put_le64(pb, duration); /* end time stamp (in 100ns units) */
    put_le64(pb, asf->duration); /* duration (in 100ns units) */
    put_le64(pb, PREROLL_TIME); /* start time stamp */
    put_le32(pb, (asf->is_streamed || url_is_streamed(pb)) ? 3 : 2); /* ??? */
    put_le32(pb, s->packet_size); /* packet size */
    put_le32(pb, s->packet_size); /* packet size */
    put_le32(pb, bit_rate); /* Nominal data rate in bps */
    end_header(pb, hpos);

    /* unknown headers */
    hpos = put_header(pb, &ff_asf_head1_guid);
    put_guid(pb, &ff_asf_head2_guid);
    put_le32(pb, 6);
    put_le16(pb, 0);
    end_header(pb, hpos);

    /* title and other infos */
    if (has_title) {
        int len;
        uint8_t *buf;
        ByteIOContext *dyn_buf;

        if (url_open_dyn_buf(&dyn_buf) < 0)
            return AVERROR(ENOMEM);

        hpos = put_header(pb, &ff_asf_comment_header);

        for (n = 0; n < FF_ARRAY_ELEMS(tags); n++) {
            len = tags[n] ? avio_put_str16le(dyn_buf, tags[n]->value) : 0;
            put_le16(pb, len);
        }
        len = url_close_dyn_buf(dyn_buf, &buf);
        put_buffer(pb, buf, len);
        av_freep(&buf);
        end_header(pb, hpos);
    }
    if (metadata_count) {
        AVMetadataTag *tag = NULL;
        hpos = put_header(pb, &ff_asf_extended_content_header);
        put_le16(pb, metadata_count);
        while ((tag = av_metadata_get(s->metadata, "", tag, AV_METADATA_IGNORE_SUFFIX))) {
            put_str16(pb, tag->key);
            put_le16(pb, 0);
            put_str16(pb, tag->value);
        }
        end_header(pb, hpos);
    }

    /* stream headers */
    for(n=0;n<s->nb_streams;n++) {
        int64_t es_pos;
        //        ASFStream *stream = &asf->streams[n];

        enc = s->streams[n]->codec;
        asf->streams[n].num = n + 1;
        asf->streams[n].seq = 0;


        switch(enc->codec_type) {
        case AVMEDIA_TYPE_AUDIO:
            wav_extra_size = 0;
            extra_size = 18 + wav_extra_size;
            extra_size2 = 8;
            break;
        default:
        case AVMEDIA_TYPE_VIDEO:
            wav_extra_size = enc->extradata_size;
            extra_size = 0x33 + wav_extra_size;
            extra_size2 = 0;
            break;
        }

        hpos = put_header(pb, &ff_asf_stream_header);
        if (enc->codec_type == AVMEDIA_TYPE_AUDIO) {
            put_guid(pb, &ff_asf_audio_stream);
            put_guid(pb, &ff_asf_audio_conceal_spread);
        } else {
            put_guid(pb, &ff_asf_video_stream);
            put_guid(pb, &ff_asf_video_conceal_none);
        }
        put_le64(pb, 0); /* ??? */
        es_pos = url_ftell(pb);
        put_le32(pb, extra_size); /* wav header len */
        put_le32(pb, extra_size2); /* additional data len */
        put_le16(pb, n + 1); /* stream number */
        put_le32(pb, 0); /* ??? */

        if (enc->codec_type == AVMEDIA_TYPE_AUDIO) {
            /* WAVEFORMATEX header */
            int wavsize = ff_put_wav_header(pb, enc);
            if ((enc->codec_id != CODEC_ID_MP3) && (enc->codec_id != CODEC_ID_MP2) && (enc->codec_id != CODEC_ID_ADPCM_IMA_WAV) && (enc->extradata_size==0)) {
                wavsize += 2;
                put_le16(pb, 0);
            }

            if (wavsize < 0)
                return -1;
            if (wavsize != extra_size) {
                cur_pos = url_ftell(pb);
                url_fseek(pb, es_pos, SEEK_SET);
                put_le32(pb, wavsize); /* wav header len */
                url_fseek(pb, cur_pos, SEEK_SET);
            }
            /* ERROR Correction */
            put_byte(pb, 0x01);
            if(enc->codec_id == CODEC_ID_ADPCM_G726 || !enc->block_align){
                put_le16(pb, 0x0190);
                put_le16(pb, 0x0190);
            }else{
                put_le16(pb, enc->block_align);
                put_le16(pb, enc->block_align);
            }
            put_le16(pb, 0x01);
            put_byte(pb, 0x00);
        } else {
            put_le32(pb, enc->width);
            put_le32(pb, enc->height);
            put_byte(pb, 2); /* ??? */
            put_le16(pb, 40 + enc->extradata_size); /* size */

            /* BITMAPINFOHEADER header */
            ff_put_bmp_header(pb, enc, ff_codec_bmp_tags, 1);
        }
        end_header(pb, hpos);
    }

    /* media comments */

    hpos = put_header(pb, &ff_asf_codec_comment_header);
    put_guid(pb, &ff_asf_codec_comment1_header);
    put_le32(pb, s->nb_streams);
    for(n=0;n<s->nb_streams;n++) {
        AVCodec *p;
        const char *desc;
        int len;
        uint8_t *buf;
        ByteIOContext *dyn_buf;

        enc = s->streams[n]->codec;
        p = avcodec_find_encoder(enc->codec_id);

        if(enc->codec_type == AVMEDIA_TYPE_AUDIO)
            put_le16(pb, 2);
        else if(enc->codec_type == AVMEDIA_TYPE_VIDEO)
            put_le16(pb, 1);
        else
            put_le16(pb, -1);

        if(enc->codec_id == CODEC_ID_WMAV2)
            desc = "Windows Media Audio V8";
        else
            desc = p ? p->name : enc->codec_name;

        if ( url_open_dyn_buf(&dyn_buf) < 0)
            return AVERROR(ENOMEM);

        avio_put_str16le(dyn_buf, desc);
        len = url_close_dyn_buf(dyn_buf, &buf);
        put_le16(pb, len / 2); // "number of characters" = length in bytes / 2

        put_buffer(pb, buf, len);
        av_freep(&buf);

        put_le16(pb, 0); /* no parameters */


        /* id */
        if (enc->codec_type == AVMEDIA_TYPE_AUDIO) {
            put_le16(pb, 2);
            put_le16(pb, enc->codec_tag);
        } else {
            put_le16(pb, 4);
            put_le32(pb, enc->codec_tag);
        }
        if(!enc->codec_tag)
            return -1;
    }
    end_header(pb, hpos);

    /* patch the header size fields */

    cur_pos = url_ftell(pb);
    header_size = cur_pos - header_offset;
    if (asf->is_streamed) {
        header_size += 8 + 30 + 50;

        url_fseek(pb, header_offset - 10 - 30, SEEK_SET);
        put_le16(pb, header_size);
        url_fseek(pb, header_offset - 2 - 30, SEEK_SET);
        put_le16(pb, header_size);

        header_size -= 8 + 30 + 50;
    }
    header_size += 24 + 6;
    url_fseek(pb, header_offset - 14, SEEK_SET);
    put_le64(pb, header_size);
    url_fseek(pb, cur_pos, SEEK_SET);

    /* movie chunk, followed by packets of packet_size */
    asf->data_offset = cur_pos;
    put_guid(pb, &ff_asf_data_header);
    put_le64(pb, data_chunk_size);
    put_guid(pb, &ff_asf_my_guid);
    put_le64(pb, asf->nb_packets); /* nb packets */
    put_byte(pb, 1); /* ??? */
    put_byte(pb, 1); /* ??? */
    return 0;
}
Exemple #21
0
int ff_mov_init_hinting(AVFormatContext *s, int index, int src_index)
{
    MOVMuxContext *mov  = s->priv_data;
    MOVTrack *track     = &mov->tracks[index];
    MOVTrack *src_track = &mov->tracks[src_index];
    AVStream *src_st    = s->streams[src_index];
    int ret = AVERROR(ENOMEM);
    AVOutputFormat *rtp_format = av_guess_format("rtp", NULL, NULL);

    track->tag = MKTAG('r','t','p',' ');
    track->src_track = src_index;

    if (!rtp_format) {
        ret = AVERROR(ENOENT);
        goto fail;
    }

    track->enc = avcodec_alloc_context();
    if (!track->enc)
        goto fail;
    track->enc->codec_type = AVMEDIA_TYPE_DATA;
    track->enc->codec_tag  = track->tag;

    track->rtp_ctx = avformat_alloc_context();
    if (!track->rtp_ctx)
        goto fail;
    track->rtp_ctx->oformat = rtp_format;
    if (!av_new_stream(track->rtp_ctx, 0))
        goto fail;

    /* Copy stream parameters */
    track->rtp_ctx->streams[0]->sample_aspect_ratio =
                        src_st->sample_aspect_ratio;

    /* Remove the allocated codec context, link to the original one
     * instead, to give the rtp muxer access to codec parameters. */
    av_free(track->rtp_ctx->streams[0]->codec);
    track->rtp_ctx->streams[0]->codec = src_st->codec;

    if ((ret = url_open_dyn_packet_buf(&track->rtp_ctx->pb,
                                       RTP_MAX_PACKET_SIZE)) < 0)
        goto fail;
    ret = av_write_header(track->rtp_ctx);
    if (ret)
        goto fail;

    /* Copy the RTP AVStream timebase back to the hint AVStream */
    track->timescale = track->rtp_ctx->streams[0]->time_base.den;

    /* Mark the hinted track that packets written to it should be
     * sent to this track for hinting. */
    src_track->hint_track = index;
    return 0;
fail:
    av_log(s, AV_LOG_WARNING,
           "Unable to initialize hinting of stream %d\n", src_index);
    if (track->rtp_ctx && track->rtp_ctx->pb) {
        uint8_t *buf;
        url_close_dyn_buf(track->rtp_ctx->pb, &buf);
        av_free(buf);
    }
    if (track->rtp_ctx && track->rtp_ctx->streams[0]) {
        av_metadata_free(&track->rtp_ctx->streams[0]->metadata);
        av_free(track->rtp_ctx->streams[0]);
    }
    if (track->rtp_ctx) {
        av_metadata_free(&track->rtp_ctx->metadata);
        av_free(track->rtp_ctx->priv_data);
        av_freep(&track->rtp_ctx);
    }
    av_freep(&track->enc);
    /* Set a default timescale, to avoid crashes in dump_format */
    track->timescale = 90000;
    return ret;
}