Exemplo n.º 1
0
static int x265_close(HEVCEncoderContext *s, uint8_t **pbuf)
{
    int buf_len, ret, i;
    uint32_t nal_count;
    x265_nal *p_nal;
    
    /* get last compressed pictures */
    for(;;) {
        ret = x265_encoder_encode(s->enc, &p_nal, &nal_count, NULL, NULL);
        if (ret <= 0)
            break;
        for(i = 0; i < nal_count; i++) {
            add_nal(s, p_nal[i].payload, p_nal[i].sizeBytes);
        }
    }

    if (s->buf_len < s->buf_size) {
        s->buf = realloc(s->buf, s->buf_len);
    }

    *pbuf = s->buf;
    buf_len = s->buf_len;

    x265_encoder_close(s->enc);
    x265_picture_free(s->pic);
    free(s);
    return buf_len;
}
Exemplo n.º 2
0
static int x265_encode(HEVCEncoderContext *s, Image *img)
{
    int c_count, i, ret;
    x265_picture *pic;
    uint32_t nal_count;
    x265_nal *p_nal;
    
    pic = s->pic;

    if (img->format == BPG_FORMAT_GRAY)
        c_count = 1;
    else
        c_count = 3;
    for(i = 0; i < c_count; i++) {
        pic->planes[i] = img->data[i];
        pic->stride[i] = img->linesize[i];
    }
    pic->bitDepth = img->bit_depth;

    ret = x265_encoder_encode(s->enc, &p_nal, &nal_count, pic, NULL);
    if (ret > 0) {
        for(i = 0; i < nal_count; i++) {
            add_nal(s, p_nal[i].payload, p_nal[i].sizeBytes);
        }
    }
    return 0;
}
bool Video_Encoder_H265::operator ()(uint8_t **yuv_data, int *linesize, int top_or_bottom_field)
{
    x265_nal *pp_nal = NULL;
    uint32_t pi_nal = 0;

    for (int i = 0; i < 3; ++i)
    {
        _x265_picture->planes[i] = yuv_data[i];
        _x265_picture->stride[i] = linesize[i];
    }

    x265_picture pic_out;
    if(_insert_idr)
    {
        pic_out.sliceType = X265_TYPE_IDR;
        _insert_idr = false;
    }
    else
    {
        pic_out.sliceType = X265_TYPE_AUTO;
    }


    /*When the last of the raw input pictures has been sent to the encoder,
     * x265_encoder_encode() must still be called repeatedly with a pic_in argument of 0,
     * indicating a pipeline flush, until the function returns a value less than or equal to 0
     * (indicating the output bitstream is complete).*/
    int rc = x265_encoder_encode(_x265_encoder, &pp_nal, &pi_nal, _x265_picture, &pic_out);
    if(rc < 0)
    {
        //error...
        printf("x265_encoder_encode error~\n");
        return false;
    }

        //1 if a picture and access unit were output

    gettimeofday(&_tv_current, NULL);
    struct timeval tv_result;
    timersub(&_tv_current, &_tv_start, &tv_result);
    u_int64_t  ts = tv_result.tv_sec * 1000000 + tv_result.tv_usec;
    ts -= 1000;
    ts = ( ts * 90000 ) / 1000000;

    for(unsigned int i=0; i<pi_nal; ++i)
    {
//        printf("@@@@@@@@@@@@@@@@@@@@@@@@%d%d%d, size=%d\n", i, i, i, pp_nal[i].sizeBytes);
        (*_callback)(pp_nal[i].payload, pp_nal[i].sizeBytes, pp_nal[i].type, ts);
    }

//    x265_stats *stats;
//    uint32_t statsSizeBytes;
    //At any time during this process, the application may query running statistics from the encoder:
//    x265_encoder_get_stats(_x265_encoder, stats, statsSizeBytes);

    return true;
}
Exemplo n.º 4
0
static int libx265_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
                                const AVFrame *pic, int *got_packet)
{
    libx265Context *ctx = avctx->priv_data;
    x265_picture x265pic;
    x265_picture x265pic_out = { { 0 } };
    x265_nal *nal;
    uint8_t *dst;
    int payload = 0;
    int nnal;
    int ret;
    int i;

    x265_picture_init(ctx->params, &x265pic);

    if (pic) {
        for (i = 0; i < 3; i++) {
           x265pic.planes[i] = pic->data[i];
           x265pic.stride[i] = pic->linesize[i];
        }

        x265pic.pts      = pic->pts;
        x265pic.bitDepth = av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth_minus1 + 1;
    }

    ret = x265_encoder_encode(ctx->encoder, &nal, &nnal,
                              pic ? &x265pic : NULL, &x265pic_out);
    if (ret < 0)
        return AVERROR_UNKNOWN;

    if (!nnal)
        return 0;

    for (i = 0; i < nnal; i++)
        payload += nal[i].sizeBytes;

    ret = ff_alloc_packet(pkt, payload);
    if (ret < 0) {
        av_log(avctx, AV_LOG_ERROR, "Error getting output packet.\n");
        return ret;
    }
    dst = pkt->data;

    for (i = 0; i < nnal; i++) {
        memcpy(dst, nal[i].payload, nal[i].sizeBytes);
        dst += nal[i].sizeBytes;

        if (is_keyframe(nal[i].type))
            pkt->flags |= AV_PKT_FLAG_KEY;
    }

    pkt->pts = x265pic_out.pts;
    pkt->dts = x265pic_out.dts;

    *got_packet = 1;
    return 0;
}
Exemplo n.º 5
0
int encx265Work(hb_work_object_t *w, hb_buffer_t **buf_in, hb_buffer_t **buf_out)
{
    hb_work_private_t *pv = w->private_data;
    hb_buffer_t       *in = *buf_in;

    if (in->size <= 0)
    {
        uint32_t nnal;
        x265_nal *nal;
        x265_picture pic_out;
        hb_buffer_t *last_buf = NULL;

        // flush delayed frames
        while (x265_encoder_encode(pv->x265, &nal, &nnal, NULL, &pic_out) > 0)
        {
            hb_buffer_t *buf = nal_encode(w, &pic_out, nal, nnal);
            if (buf != NULL)
            {
                if (last_buf == NULL)
                {
                    *buf_out = buf;
                }
                else
                {
                    last_buf->next = buf;
                }
                last_buf = buf;
            }
        }

        // add the EOF to the end of the chain
        if (last_buf == NULL)
        {
            *buf_out = in;
        }
        else
        {
            last_buf->next = in;
        }

        *buf_in = NULL;
        return HB_WORK_DONE;
    }

    *buf_out = x265_encode(w, in);
    return HB_WORK_OK;
}
bool VideoEncoderX265::encodeFrame(VideoFrame* codedFrame)
{
    int success;
    unsigned piNal;
    x265_nal* nals;
    SlicedVideoFrame* slicedFrame;

    slicedFrame = dynamic_cast<SlicedVideoFrame*> (codedFrame);

    if (!slicedFrame || !encoder) {
        utils::errorMsg("Could not encode x265 video frame. Target frame or encoder are NULL");
        return false;
    }

    if (forceIntra) {
        picIn->sliceType = X265_TYPE_I;
        forceIntra = false;
    } else {
        picIn->sliceType = X265_TYPE_AUTO;
    }

    picIn->pts = pts;
    success = x265_encoder_encode(encoder, &nals, &piNal, picIn, picOut);

    pts++;

    if (success < 0) {
        utils::errorMsg("X265 Encoder: Could not encode video frame");
        return false;
    } else if (success == 0) {
        utils::debugMsg("X265 Encoder: NAL not retrieved after encoding");
        return false;
    }

    for (unsigned i = 0; i < piNal; i++) {
        if (!slicedFrame->setSlice(nals[i].payload, nals[i].sizeBytes)) {
            utils::errorMsg("X265 Encoder: too many NALs for one slicedFrame");
            return false;
        }
    }

    return true;
}
Exemplo n.º 7
0
static hb_buffer_t* x265_encode(hb_work_object_t *w, hb_buffer_t *in)
{
    hb_work_private_t *pv = w->private_data;
    hb_job_t *job         = pv->job;
    x265_picture pic_in, pic_out;
    x265_nal *nal;
    uint32_t nnal;

    x265_picture_init(pv->param, &pic_in);

    pic_in.stride[0] = in->plane[0].stride;
    pic_in.stride[1] = in->plane[1].stride;
    pic_in.stride[2] = in->plane[2].stride;
    pic_in.planes[0] = in->plane[0].data;
    pic_in.planes[1] = in->plane[1].data;
    pic_in.planes[2] = in->plane[2].data;
    pic_in.poc       = pv->frames_in++;
    pic_in.pts       = in->s.start;
    pic_in.bitDepth  = 8;

    if (in->s.new_chap && job->chapter_markers)
    {
        if (pv->next_chapter_pts == AV_NOPTS_VALUE)
        {
            pv->next_chapter_pts = in->s.start;
        }
        /*
         * Chapter markers are sometimes so close we can get a new one before
         * the previous marker has been through the encoding queue.
         *
         * Dropping markers can cause weird side-effects downstream, including
         * but not limited to missing chapters in the output, so we need to save
         * it somehow.
         */
        struct chapter_s *item = malloc(sizeof(struct chapter_s));
        if (item != NULL)
        {
            item->start = in->s.start;
            item->index = in->s.new_chap;
            hb_list_add(pv->delayed_chapters, item);
        }
        /* don't let 'work_loop' put a chapter mark on the wrong buffer */
        in->s.new_chap = 0;
        /*
         * Chapters have to start with an IDR frame so request that this frame be
         * coded as IDR. Since there may be up to 16 frames currently buffered in
         * the encoder, remember the timestamp so when this frame finally pops out
         * of the encoder we'll mark its buffer as the start of a chapter.
         */
        pic_in.sliceType = X265_TYPE_IDR;
    }
    else
    {
        pic_in.sliceType = X265_TYPE_AUTO;
    }

    if (pv->last_stop != in->s.start)
    {
        hb_log("encx265 input continuity err: last stop %"PRId64"  start %"PRId64,
               pv->last_stop, in->s.start);
    }
    pv->last_stop = in->s.stop;
    save_frame_info(pv, in);

    if (x265_encoder_encode(pv->x265, &nal, &nnal, &pic_in, &pic_out) > 0)
    {
        return nal_encode(w, &pic_out, nal, nnal);
    }
    return NULL;
}
Exemplo n.º 8
0
static GstFlowReturn
gst_x265_enc_encode_frame (GstX265Enc * encoder, x265_picture * pic_in,
    GstVideoCodecFrame * input_frame, guint32 * i_nal, gboolean send)
{
  GstVideoCodecFrame *frame = NULL;
  GstBuffer *out_buf = NULL;
  x265_picture pic_out;
  x265_nal *nal;
  int i_size, i, offset;
  int encoder_return;
  GstFlowReturn ret = GST_FLOW_OK;
  gboolean update_latency = FALSE;

  if (G_UNLIKELY (encoder->x265enc == NULL)) {
    if (input_frame)
      gst_video_codec_frame_unref (input_frame);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  GST_OBJECT_LOCK (encoder);
  if (encoder->reconfig) {
    // x265_encoder_reconfig is not yet implemented thus we shut down and re-create encoder
    gst_x265_enc_init_encoder (encoder);
    update_latency = TRUE;
  }

  if (pic_in && input_frame) {
    if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (input_frame)) {
      GST_INFO_OBJECT (encoder, "Forcing key frame");
      pic_in->sliceType = X265_TYPE_IDR;
    }
  }
  GST_OBJECT_UNLOCK (encoder);

  if (G_UNLIKELY (update_latency))
    gst_x265_enc_set_latency (encoder);

  encoder_return = x265_encoder_encode (encoder->x265enc,
      &nal, i_nal, pic_in, &pic_out);

  GST_DEBUG_OBJECT (encoder, "encoder result (%d) with %u nal units",
      encoder_return, *i_nal);

  if (encoder_return < 0) {
    GST_ELEMENT_ERROR (encoder, STREAM, ENCODE, ("Encode x265 frame failed."),
        ("x265_encoder_encode return code=%d", encoder_return));
    ret = GST_FLOW_ERROR;
    /* Make sure we finish this frame */
    frame = input_frame;
    goto out;
  }

  /* Input frame is now queued */
  if (input_frame)
    gst_video_codec_frame_unref (input_frame);

  if (!*i_nal) {
    ret = GST_FLOW_OK;
    GST_LOG_OBJECT (encoder, "no output yet");
    goto out;
  }

  frame = gst_video_encoder_get_frame (GST_VIDEO_ENCODER (encoder),
      GPOINTER_TO_INT (pic_out.userData));
  g_assert (frame || !send);

  GST_DEBUG_OBJECT (encoder,
      "output picture ready POC=%d system=%d frame found %d", pic_out.poc,
      GPOINTER_TO_INT (pic_out.userData), frame != NULL);

  if (!send || !frame) {
    GST_LOG_OBJECT (encoder, "not sending (%d) or frame not found (%d)", send,
        frame != NULL);
    ret = GST_FLOW_OK;
    goto out;
  }

  i_size = 0;
  offset = 0;
  for (i = 0; i < *i_nal; i++)
    i_size += nal[i].sizeBytes;
  out_buf = gst_buffer_new_allocate (NULL, i_size, NULL);
  for (i = 0; i < *i_nal; i++) {
    gst_buffer_fill (out_buf, offset, nal[i].payload, nal[i].sizeBytes);
    offset += nal[i].sizeBytes;
  }

  frame->output_buffer = out_buf;

  if (encoder->push_header) {
    GstBuffer *header;

    header = gst_x265_enc_get_header_buffer (encoder);
    frame->output_buffer = gst_buffer_append (header, frame->output_buffer);
    encoder->push_header = FALSE;
  }

  GST_LOG_OBJECT (encoder,
      "output: dts %" G_GINT64_FORMAT " pts %" G_GINT64_FORMAT,
      (gint64) pic_out.dts, (gint64) pic_out.pts);

  frame->dts = pic_out.dts + encoder->dts_offset;

out:
  if (frame) {
    gst_x265_enc_dequeue_frame (encoder, frame);
    ret = gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (encoder), frame);
  }

  return ret;
}
Exemplo n.º 9
0
Arquivo: x265.c Projeto: 0xheart0/vlc
static block_t *Encode(encoder_t *p_enc, picture_t *p_pict)
{
    encoder_sys_t *p_sys = p_enc->p_sys;
    x265_picture pic;

    x265_picture_init(&p_sys->param, &pic);

    if (likely(p_pict)) {
        pic.pts = p_pict->date;
        if (unlikely(p_sys->initial_date == 0)) {
            p_sys->initial_date = p_pict->date;
#ifndef NDEBUG
            p_sys->start = mdate();
#endif
        }

        for (int i = 0; i < p_pict->i_planes; i++) {
            pic.planes[i] = p_pict->p[i].p_pixels;
            pic.stride[i] = p_pict->p[i].i_pitch;
        }
    }

    x265_nal *nal;
    uint32_t i_nal = 0;
    x265_encoder_encode(p_sys->h, &nal, &i_nal,
            likely(p_pict) ? &pic : NULL, &pic);

    if (!i_nal)
        return NULL;

    int i_out = 0;
    for (uint32_t i = 0; i < i_nal; i++)
        i_out += nal[i].sizeBytes;

    block_t *p_block = block_Alloc(i_out);
    if (!p_block)
        return NULL;

    /* all payloads are sequentially laid out in memory */
    memcpy(p_block->p_buffer, nal[0].payload, i_out);

    /* This isn't really valid for streams with B-frames */
    p_block->i_length = CLOCK_FREQ *
        p_enc->fmt_in.video.i_frame_rate_base /
            p_enc->fmt_in.video.i_frame_rate;

    p_block->i_pts = p_sys->initial_date + pic.poc * p_block->i_length;
    p_block->i_dts = p_sys->initial_date + p_sys->dts++ * p_block->i_length;

    switch (pic.sliceType)
    {
    case X265_TYPE_I:
    case X265_TYPE_IDR:
        p_block->i_flags |= BLOCK_FLAG_TYPE_I;
        break;
    case X265_TYPE_P:
        p_block->i_flags |= BLOCK_FLAG_TYPE_P;
        break;
    case X265_TYPE_B:
    case X265_TYPE_BREF:
        p_block->i_flags |= BLOCK_FLAG_TYPE_B;
        break;
    }

#ifndef NDEBUG
    msg_Dbg(p_enc, "%zu bytes (frame %"PRId64", %.2ffps)", p_block->i_buffer,
        p_sys->dts, (float)p_sys->dts * CLOCK_FREQ / (mdate() - p_sys->start));
#endif

    return p_block;
}
Exemplo n.º 10
0
int h265_encode(struct videnc_state *st, bool update,
		const struct vidframe *frame,
		videnc_packet_h *pkth, void *arg)
{
	x265_picture *pic_in = NULL, pic_out;
	x265_nal *nalv;
	uint32_t i, nalc = 0;
	int n, err = 0;

	if (!st || !frame || !pkth || frame->fmt != VID_FMT_YUV420P)
		return EINVAL;

	if (!st->x265 || !vidsz_cmp(&st->size, &frame->size)) {

		err = open_encoder(st, &frame->size);
		if (err)
			return err;

		st->size = frame->size;
	}

	if (update) {
		debug("h265: encode: picture update was requested\n");
	}

	pic_in = x265_picture_alloc();
	if (!pic_in) {
		warning("h265: x265_picture_alloc failed\n");
		return ENOMEM;
	}

	x265_picture_init(st->param, pic_in);

	pic_in->sliceType  = update ? X265_TYPE_IDR : X265_TYPE_AUTO;
	pic_in->pts        = ++st->pts;      /* XXX: add PTS to API */
	pic_in->colorSpace = X265_CSP_I420;

	for (i=0; i<3; i++) {
		pic_in->planes[i] = frame->data[i];
		pic_in->stride[i] = frame->linesize[i];
	}

	/* NOTE: important to get the PTS of the "out" picture */
	n = x265_encoder_encode(st->x265, &nalv, &nalc, pic_in, &pic_out);
	if (n <= 0)
		goto out;

	for (i=0; i<nalc; i++) {

		x265_nal *nal = &nalv[i];
		uint8_t *p = nal->payload;
		size_t len = nal->sizeBytes;
		bool marker;

#if 1
		debug("h265: encode: %s type=%2d  %s\n",
			  h265_is_keyframe(nal->type) ? "<KEY>" : "     ",
			  nal->type, h265_nalunit_name(nal->type));
#endif

		h265_skip_startcode(&p, &len);

		/* XXX: use pic_out.pts */

		marker = (i+1)==nalc;  /* last NAL */

		err = packetize(marker, p, len, st->pktsize, pkth, arg);
		if (err)
			goto out;
	}

 out:
	if (pic_in)
		x265_picture_free(pic_in);

	return err;
}