Exemplo n.º 1
0
static GstFlowReturn
theora_push_packet (GstTheoraEnc * enc, ogg_packet * packet)
{
    GstVideoEncoder *benc;
    GstFlowReturn ret;
    GstVideoCodecFrame *frame;

    benc = GST_VIDEO_ENCODER (enc);

    frame = gst_video_encoder_get_oldest_frame (benc);
    if (gst_video_encoder_allocate_output_frame (benc, frame,
            packet->bytes) != GST_FLOW_OK) {
        GST_WARNING_OBJECT (enc, "Could not allocate buffer");
        gst_video_codec_frame_unref (frame);
        ret = GST_FLOW_ERROR;
        goto done;
    }

    gst_buffer_fill (frame->output_buffer, 0, packet->packet, packet->bytes);

    /* the second most significant bit of the first data byte is cleared
     * for keyframes */
    if (packet->bytes > 0 && (packet->packet[0] & 0x40) == 0) {
        GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
    } else {
        GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
    }
    enc->packetno++;

    ret = gst_video_encoder_finish_frame (benc, frame);

done:
    return ret;
}
Exemplo n.º 2
0
static GstFlowReturn
gst_y4m_encode_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstY4mEncode *filter = GST_Y4M_ENCODE (encoder);
  GstClockTime timestamp;

  /* check we got some decent info from caps */
  if (GST_VIDEO_INFO_FORMAT (&filter->info) == GST_VIDEO_FORMAT_UNKNOWN)
    goto not_negotiated;

  timestamp = GST_BUFFER_TIMESTAMP (frame->input_buffer);

  if (G_UNLIKELY (!filter->header)) {
    gboolean tff = FALSE;

    if (GST_VIDEO_INFO_IS_INTERLACED (&filter->info)) {
      tff =
          GST_BUFFER_FLAG_IS_SET (frame->input_buffer,
          GST_VIDEO_BUFFER_FLAG_TFF);
    }
    frame->output_buffer = gst_y4m_encode_get_stream_header (filter, tff);
    filter->header = TRUE;
    frame->output_buffer =
        gst_buffer_append (frame->output_buffer,
        gst_y4m_encode_get_frame_header (filter));
  } else {
    frame->output_buffer = gst_y4m_encode_get_frame_header (filter);
  }

  frame->output_buffer =
      gst_buffer_append (frame->output_buffer,
      gst_buffer_copy (frame->input_buffer));

  /* decorate */
  frame->output_buffer = gst_buffer_make_writable (frame->output_buffer);
  GST_BUFFER_TIMESTAMP (frame->output_buffer) = timestamp;

  return gst_video_encoder_finish_frame (encoder, frame);

not_negotiated:
  {
    GST_ELEMENT_ERROR (filter, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated"));

    return GST_FLOW_NOT_NEGOTIATED;
  }
}
Exemplo n.º 3
0
static GstFlowReturn
gst_av1_enc_process (GstAV1Enc * encoder)
{
  aom_codec_iter_t iter = NULL;
  const aom_codec_cx_pkt_t *pkt;
  GstVideoCodecFrame *frame;
  GstVideoEncoder *video_encoder;
  GstFlowReturn ret = GST_FLOW_CUSTOM_SUCCESS;

  video_encoder = GST_VIDEO_ENCODER (encoder);

  while ((pkt = aom_codec_get_cx_data (&encoder->encoder, &iter)) != NULL) {
    if (pkt->kind == AOM_CODEC_STATS_PKT) {
      GST_WARNING_OBJECT (encoder, "Unhandled stats packet");
    } else if (pkt->kind == AOM_CODEC_FPMB_STATS_PKT) {
      GST_WARNING_OBJECT (encoder, "Unhandled FPMB pkt");
    } else if (pkt->kind == AOM_CODEC_PSNR_PKT) {
      GST_WARNING_OBJECT (encoder, "Unhandled PSNR packet");
    } else if (pkt->kind == AOM_CODEC_CX_FRAME_PKT) {
      frame = gst_video_encoder_get_oldest_frame (video_encoder);
      g_assert (frame != NULL);
      if ((pkt->data.frame.flags & AOM_FRAME_IS_KEY) != 0) {
        GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
      } else {
        GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
      }

      frame->output_buffer =
          gst_buffer_new_wrapped (g_memdup (pkt->data.frame.buf,
              pkt->data.frame.sz), pkt->data.frame.sz);

      if ((pkt->data.frame.flags & AOM_FRAME_IS_DROPPABLE) != 0)
        GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DROPPABLE);
      if ((pkt->data.frame.flags & AOM_FRAME_IS_INVISIBLE) != 0)
        GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DECODE_ONLY);

      ret = gst_video_encoder_finish_frame (video_encoder, frame);
      if (ret != GST_FLOW_OK)
        break;
    }
  }

  return ret;
}
Exemplo n.º 4
0
static GstFlowReturn
gst_video_encoder_tester_handle_frame (GstVideoEncoder * dec,
    GstVideoCodecFrame * frame)
{
  guint8 *data;
  GstMapInfo map;
  guint64 input_num;

  gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ);
  input_num = *((guint64 *) map.data);
  gst_buffer_unmap (frame->input_buffer, &map);

  data = g_malloc (sizeof (guint64));
  *(guint64 *) data = input_num;

  frame->output_buffer = gst_buffer_new_wrapped (data, sizeof (guint64));
  frame->pts = GST_BUFFER_PTS (frame->input_buffer);
  frame->duration = GST_BUFFER_DURATION (frame->input_buffer);

  return gst_video_encoder_finish_frame (dec, frame);
}
Exemplo n.º 5
0
static GstFlowReturn
gst_amc_video_enc_handle_output_frame (GstAmcVideoEnc * self,
    GstAmcBuffer * buf, const GstAmcBufferInfo * buffer_info,
    GstVideoCodecFrame * frame)
{
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstVideoEncoder *encoder = GST_VIDEO_ENCODER_CAST (self);

  /* The BUFFER_FLAG_CODEC_CONFIG logic is borrowed from
   * gst-omx. see *_handle_output_frame in
   * gstomxvideoenc.c and gstomxh264enc.c */
  if ((buffer_info->flags & BUFFER_FLAG_CODEC_CONFIG)
      && buffer_info->size > 0) {
    GstStructure *s;
    GstVideoCodecState *state;

    state = gst_video_encoder_get_output_state (encoder);
    s = gst_caps_get_structure (state->caps, 0);
    if (!strcmp (gst_structure_get_name (s), "video/x-h264")) {
      gst_video_codec_state_unref (state);

      if (buffer_info->size > 4 &&
          GST_READ_UINT32_BE (buf->data + buffer_info->offset) == 0x00000001) {
        GList *l = NULL;
        GstBuffer *hdrs;

        GST_DEBUG_OBJECT (self, "got codecconfig in byte-stream format");

        hdrs = gst_buffer_new_and_alloc (buffer_info->size);
        gst_buffer_fill (hdrs, 0, buf->data + buffer_info->offset,
            buffer_info->size);

        l = g_list_append (l, hdrs);
        gst_video_encoder_set_headers (encoder, l);
      }
    } else {
      GstBuffer *codec_data;

      GST_DEBUG_OBJECT (self, "Handling codec data");

      codec_data = gst_buffer_new_and_alloc (buffer_info->size);
      gst_buffer_fill (codec_data, 0, buf->data + buffer_info->offset,
          buffer_info->size);
      state->codec_data = codec_data;
      gst_video_codec_state_unref (state);

      if (!gst_video_encoder_negotiate (encoder)) {
        gst_video_codec_frame_unref (frame);
        return GST_FLOW_NOT_NEGOTIATED;
      }

      return GST_FLOW_OK;
    }
  }

  if (buffer_info->size > 0) {
    GstBuffer *out_buf;
    GstPad *srcpad;

    srcpad = GST_VIDEO_ENCODER_SRC_PAD (encoder);
    out_buf =
        gst_video_encoder_allocate_output_buffer (encoder, buffer_info->size);
    gst_buffer_fill (out_buf, 0, buf->data + buffer_info->offset,
        buffer_info->size);

    GST_BUFFER_PTS (out_buf) =
        gst_util_uint64_scale (buffer_info->presentation_time_us, GST_USECOND,
        1);

    if (frame) {
      frame->output_buffer = out_buf;
      flow_ret = gst_video_encoder_finish_frame (encoder, frame);
    } else {
      /* This sometimes happens at EOS or if the input is not properly framed,
       * let's handle it gracefully by allocating a new buffer for the current
       * caps and filling it
       */

      GST_ERROR_OBJECT (self, "No corresponding frame found");
      flow_ret = gst_pad_push (srcpad, out_buf);
    }
  } else if (frame) {
    flow_ret = gst_video_encoder_finish_frame (encoder, frame);
  }

  return flow_ret;
}
Exemplo n.º 6
0
static GstVideoCodecFrame *
_find_nearest_frame (GstAmcVideoEnc * self, GstClockTime reference_timestamp)
{
  GList *l, *best_l = NULL;
  GList *finish_frames = NULL;
  GstVideoCodecFrame *best = NULL;
  guint64 best_timestamp = 0;
  guint64 best_diff = G_MAXUINT64;
  BufferIdentification *best_id = NULL;
  GList *frames;

  frames = gst_video_encoder_get_frames (GST_VIDEO_ENCODER (self));

  for (l = frames; l; l = l->next) {
    GstVideoCodecFrame *tmp = l->data;
    BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
    guint64 timestamp, diff;

    /* This happens for frames that were just added but
     * which were not passed to the component yet. Ignore
     * them here!
     */
    if (!id)
      continue;

    timestamp = id->timestamp;

    if (timestamp > reference_timestamp)
      diff = timestamp - reference_timestamp;
    else
      diff = reference_timestamp - timestamp;

    if (best == NULL || diff < best_diff) {
      best = tmp;
      best_timestamp = timestamp;
      best_diff = diff;
      best_l = l;
      best_id = id;

      /* For frames without timestamp we simply take the first frame */
      if ((reference_timestamp == 0 && timestamp == 0) || diff == 0)
        break;
    }
  }

  if (best_id) {
    for (l = frames; l && l != best_l; l = l->next) {
      GstVideoCodecFrame *tmp = l->data;
      BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
      guint64 diff_time, diff_frames;

      if (id->timestamp > best_timestamp)
        break;

      if (id->timestamp == 0 || best_timestamp == 0)
        diff_time = 0;
      else
        diff_time = best_timestamp - id->timestamp;
      diff_frames = best->system_frame_number - tmp->system_frame_number;

      if (diff_time > MAX_FRAME_DIST_TIME
          || diff_frames > MAX_FRAME_DIST_FRAMES) {
        finish_frames =
            g_list_prepend (finish_frames, gst_video_codec_frame_ref (tmp));
      }
    }
  }

  if (finish_frames) {
    g_warning ("%s: Too old frames, bug in encoder -- please file a bug",
        GST_ELEMENT_NAME (self));
    for (l = finish_frames; l; l = l->next) {
      gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (self), l->data);
    }
  }

  if (best)
    gst_video_codec_frame_ref (best);

  g_list_foreach (frames, (GFunc) gst_video_codec_frame_unref, NULL);
  g_list_free (frames);

  return best;
}
Exemplo n.º 7
0
static GstFlowReturn
gst_openh264enc_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstOpenh264Enc *openh264enc = GST_OPENH264ENC (encoder);
  SSourcePicture *src_pic = NULL;
  GstVideoFrame video_frame;
  gboolean force_keyframe;
  gint ret;
  SFrameBSInfo frame_info;
  gfloat fps;
  GstMapInfo map;
  gint i, j;
  gsize buf_length = 0;

  if (frame) {
    src_pic = new SSourcePicture;

    if (src_pic == NULL) {
      if (frame)
        gst_video_codec_frame_unref (frame);
      return GST_FLOW_ERROR;
    }
    //fill default src_pic
    src_pic->iColorFormat = videoFormatI420;
    src_pic->uiTimeStamp = frame->pts / GST_MSECOND;
  }

  openh264enc->frame_count++;
  if (frame) {
    if (G_UNLIKELY (openh264enc->frame_count == 1)) {
      openh264enc->time_per_frame = (GST_SECOND / openh264enc->framerate);
      openh264enc->previous_timestamp = frame->pts;
    } else {
      openh264enc->time_per_frame =
          openh264enc->time_per_frame * 0.8 + (frame->pts -
          openh264enc->previous_timestamp) * 0.2;
      openh264enc->previous_timestamp = frame->pts;
      if (openh264enc->frame_count % 10 == 0) {
        fps = GST_SECOND / (gdouble) openh264enc->time_per_frame;
        openh264enc->encoder->SetOption (ENCODER_OPTION_FRAME_RATE, &fps);
      }
    }
  }

  if (frame) {
    gst_video_frame_map (&video_frame, &openh264enc->input_state->info,
        frame->input_buffer, GST_MAP_READ);
    src_pic->iPicWidth = GST_VIDEO_FRAME_WIDTH (&video_frame);
    src_pic->iPicHeight = GST_VIDEO_FRAME_HEIGHT (&video_frame);
    src_pic->iStride[0] = GST_VIDEO_FRAME_COMP_STRIDE (&video_frame, 0);
    src_pic->iStride[1] = GST_VIDEO_FRAME_COMP_STRIDE (&video_frame, 1);
    src_pic->iStride[2] = GST_VIDEO_FRAME_COMP_STRIDE (&video_frame, 2);
    src_pic->pData[0] = GST_VIDEO_FRAME_COMP_DATA (&video_frame, 0);
    src_pic->pData[1] = GST_VIDEO_FRAME_COMP_DATA (&video_frame, 1);
    src_pic->pData[2] = GST_VIDEO_FRAME_COMP_DATA (&video_frame, 2);

    force_keyframe = GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame);
    if (force_keyframe) {
      openh264enc->encoder->ForceIntraFrame (true);
      GST_DEBUG_OBJECT (openh264enc,
          "Got force key unit event, next frame coded as intra picture");
    }
  }

  memset (&frame_info, 0, sizeof (SFrameBSInfo));
  ret = openh264enc->encoder->EncodeFrame (src_pic, &frame_info);
  if (ret != cmResultSuccess) {
    if (frame) {
      gst_video_frame_unmap (&video_frame);
      gst_video_codec_frame_unref (frame);
      delete src_pic;
      GST_ELEMENT_ERROR (openh264enc, STREAM, ENCODE,
          ("Could not encode frame"), ("Openh264 returned %d", ret));
      return GST_FLOW_ERROR;
    } else {
      return GST_FLOW_EOS;
    }
  }

  if (videoFrameTypeSkip == frame_info.eFrameType) {
    if (frame) {
      gst_video_frame_unmap (&video_frame);
      gst_video_encoder_finish_frame (encoder, frame);
      delete src_pic;
    }

    return GST_FLOW_OK;
  }

  if (frame) {
    gst_video_frame_unmap (&video_frame);
    gst_video_codec_frame_unref (frame);
    delete src_pic;
    src_pic = NULL;
    frame = NULL;
  }

  /* FIXME: openh264 has no way for us to get a connection
   * between the input and output frames, we just have to
   * guess based on the input */
  frame = gst_video_encoder_get_oldest_frame (encoder);
  if (!frame) {
    GST_ELEMENT_ERROR (openh264enc, STREAM, ENCODE,
        ("Could not encode frame"), ("openh264enc returned %d", ret));
    gst_video_codec_frame_unref (frame);
    return GST_FLOW_ERROR;
  }

  if (videoFrameTypeIDR == frame_info.eFrameType) {
    GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
  } else {
    GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
  }

  for (i = 0; i < frame_info.iLayerNum; i++) {
    for (j = 0; j < frame_info.sLayerInfo[i].iNalCount; j++) {
      buf_length += frame_info.sLayerInfo[i].pNalLengthInByte[j];
    }
  }

  frame->output_buffer =
      gst_video_encoder_allocate_output_buffer (encoder, buf_length);
  gst_buffer_map (frame->output_buffer, &map, GST_MAP_WRITE);

  buf_length = 0;
  for (i = 0; i < frame_info.iLayerNum; i++) {
    gsize layer_size = 0;
    for (j = 0; j < frame_info.sLayerInfo[i].iNalCount; j++) {
      layer_size += frame_info.sLayerInfo[i].pNalLengthInByte[j];
    }
    memcpy (map.data + buf_length, frame_info.sLayerInfo[i].pBsBuf, layer_size);
    buf_length += layer_size;
  }

  gst_buffer_unmap (frame->output_buffer, &map);

  GST_LOG_OBJECT (openh264enc, "openh264 picture %scoded OK!",
      (ret != cmResultSuccess) ? "NOT " : "");

  return gst_video_encoder_finish_frame (encoder, frame);
}
Exemplo n.º 8
0
static GstFlowReturn
gst_ffmpegvidenc_flush_buffers (GstFFMpegVidEnc * ffmpegenc, gboolean send)
{
    GstVideoCodecFrame *frame;
    GstFlowReturn flow_ret = GST_FLOW_OK;
    GstBuffer *outbuf;
    gint ret;
    AVPacket *pkt;
    int have_data = 0;

    GST_DEBUG_OBJECT (ffmpegenc, "flushing buffers with sending %d", send);

    /* no need to empty codec if there is none */
    if (!ffmpegenc->opened)
        goto done;

    while ((frame =
                gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (ffmpegenc)))) {
        pkt = g_slice_new0 (AVPacket);
        have_data = 0;

        ret = avcodec_encode_video2 (ffmpegenc->context, pkt, NULL, &have_data);

        if (ret < 0) {              /* there should be something, notify and give up */
#ifndef GST_DISABLE_GST_DEBUG
            GstFFMpegVidEncClass *oclass =
                (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
            GST_WARNING_OBJECT (ffmpegenc,
                                "avenc_%s: failed to flush buffer", oclass->in_plugin->name);
#endif /* GST_DISABLE_GST_DEBUG */
            g_slice_free (AVPacket, pkt);
            gst_video_codec_frame_unref (frame);
            break;
        }

        /* save stats info if there is some as well as a stats file */
        if (ffmpegenc->file && ffmpegenc->context->stats_out)
            if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)
                GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,
                                   (("Could not write to file \"%s\"."), ffmpegenc->filename),
                                   GST_ERROR_SYSTEM);

        if (send && have_data) {
            outbuf =
                gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, pkt->data,
                                             pkt->size, 0, pkt->size, pkt, gst_ffmpegvidenc_free_avpacket);
            frame->output_buffer = outbuf;

            if (pkt->flags & AV_PKT_FLAG_KEY)
                GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
            else
                GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);

            flow_ret =
                gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame);
        } else {
            /* no frame attached, so will be skipped and removed from frame list */
            gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame);
        }
    }

done:

    return flow_ret;
}
Exemplo n.º 9
0
static GstFlowReturn
gst_x265_enc_encode_frame (GstX265Enc * encoder, x265_picture * pic_in,
    GstVideoCodecFrame * input_frame, guint32 * i_nal, gboolean send)
{
  GstVideoCodecFrame *frame = NULL;
  GstBuffer *out_buf = NULL;
  x265_picture pic_out;
  x265_nal *nal;
  int i_size, i, offset;
  int encoder_return;
  GstFlowReturn ret = GST_FLOW_OK;
  gboolean update_latency = FALSE;

  if (G_UNLIKELY (encoder->x265enc == NULL)) {
    if (input_frame)
      gst_video_codec_frame_unref (input_frame);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  GST_OBJECT_LOCK (encoder);
  if (encoder->reconfig) {
    // x265_encoder_reconfig is not yet implemented thus we shut down and re-create encoder
    gst_x265_enc_init_encoder (encoder);
    update_latency = TRUE;
  }

  if (pic_in && input_frame) {
    if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (input_frame)) {
      GST_INFO_OBJECT (encoder, "Forcing key frame");
      pic_in->sliceType = X265_TYPE_IDR;
    }
  }
  GST_OBJECT_UNLOCK (encoder);

  if (G_UNLIKELY (update_latency))
    gst_x265_enc_set_latency (encoder);

  encoder_return = x265_encoder_encode (encoder->x265enc,
      &nal, i_nal, pic_in, &pic_out);

  GST_DEBUG_OBJECT (encoder, "encoder result (%d) with %u nal units",
      encoder_return, *i_nal);

  if (encoder_return < 0) {
    GST_ELEMENT_ERROR (encoder, STREAM, ENCODE, ("Encode x265 frame failed."),
        ("x265_encoder_encode return code=%d", encoder_return));
    ret = GST_FLOW_ERROR;
    /* Make sure we finish this frame */
    frame = input_frame;
    goto out;
  }

  /* Input frame is now queued */
  if (input_frame)
    gst_video_codec_frame_unref (input_frame);

  if (!*i_nal) {
    ret = GST_FLOW_OK;
    GST_LOG_OBJECT (encoder, "no output yet");
    goto out;
  }

  frame = gst_video_encoder_get_frame (GST_VIDEO_ENCODER (encoder),
      GPOINTER_TO_INT (pic_out.userData));
  g_assert (frame || !send);

  GST_DEBUG_OBJECT (encoder,
      "output picture ready POC=%d system=%d frame found %d", pic_out.poc,
      GPOINTER_TO_INT (pic_out.userData), frame != NULL);

  if (!send || !frame) {
    GST_LOG_OBJECT (encoder, "not sending (%d) or frame not found (%d)", send,
        frame != NULL);
    ret = GST_FLOW_OK;
    goto out;
  }

  i_size = 0;
  offset = 0;
  for (i = 0; i < *i_nal; i++)
    i_size += nal[i].sizeBytes;
  out_buf = gst_buffer_new_allocate (NULL, i_size, NULL);
  for (i = 0; i < *i_nal; i++) {
    gst_buffer_fill (out_buf, offset, nal[i].payload, nal[i].sizeBytes);
    offset += nal[i].sizeBytes;
  }

  frame->output_buffer = out_buf;

  if (encoder->push_header) {
    GstBuffer *header;

    header = gst_x265_enc_get_header_buffer (encoder);
    frame->output_buffer = gst_buffer_append (header, frame->output_buffer);
    encoder->push_header = FALSE;
  }

  GST_LOG_OBJECT (encoder,
      "output: dts %" G_GINT64_FORMAT " pts %" G_GINT64_FORMAT,
      (gint64) pic_out.dts, (gint64) pic_out.pts);

  frame->dts = pic_out.dts + encoder->dts_offset;

out:
  if (frame) {
    gst_x265_enc_dequeue_frame (encoder, frame);
    ret = gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (encoder), frame);
  }

  return ret;
}
Exemplo n.º 10
0
static GstFlowReturn
gst_pngenc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame)
{
  GstPngEnc *pngenc;
  gint row_index;
  png_byte **row_pointers;
  GstFlowReturn ret = GST_FLOW_OK;
  GstVideoInfo *info;
  GstVideoFrame vframe;

  pngenc = GST_PNGENC (encoder);
  info = &pngenc->input_state->info;

  GST_DEBUG_OBJECT (pngenc, "BEGINNING");

  /* initialize png struct stuff */
  pngenc->png_struct_ptr = png_create_write_struct (PNG_LIBPNG_VER_STRING,
      (png_voidp) NULL, user_error_fn, user_warning_fn);
  if (pngenc->png_struct_ptr == NULL)
    goto struct_init_fail;

  pngenc->png_info_ptr = png_create_info_struct (pngenc->png_struct_ptr);
  if (!pngenc->png_info_ptr)
    goto png_info_fail;

  /* non-0 return is from a longjmp inside of libpng */
  if (setjmp (png_jmpbuf (pngenc->png_struct_ptr)) != 0)
    goto longjmp_fail;

  png_set_filter (pngenc->png_struct_ptr, 0,
      PNG_FILTER_NONE | PNG_FILTER_VALUE_NONE);
  png_set_compression_level (pngenc->png_struct_ptr, pngenc->compression_level);

  png_set_IHDR (pngenc->png_struct_ptr,
      pngenc->png_info_ptr,
      GST_VIDEO_INFO_WIDTH (info),
      GST_VIDEO_INFO_HEIGHT (info),
      pngenc->depth,
      pngenc->png_color_type,
      PNG_INTERLACE_NONE,
      PNG_COMPRESSION_TYPE_DEFAULT, PNG_FILTER_TYPE_DEFAULT);

  png_set_write_fn (pngenc->png_struct_ptr, pngenc,
      (png_rw_ptr) user_write_data, user_flush_data);

  row_pointers = g_new (png_byte *, GST_VIDEO_INFO_HEIGHT (info));
  if (!gst_video_frame_map (&vframe, &pngenc->input_state->info,
          frame->input_buffer, GST_MAP_READ)) {
    GST_ELEMENT_ERROR (pngenc, STREAM, FORMAT, (NULL),
        ("Failed to map video frame, caps problem?"));
    ret = GST_FLOW_ERROR;
    goto done;
  }

  for (row_index = 0; row_index < GST_VIDEO_INFO_HEIGHT (info); row_index++) {
    row_pointers[row_index] = GST_VIDEO_FRAME_COMP_DATA (&vframe, 0) +
        (row_index * GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0));
  }

  /* allocate the output buffer */
  pngenc->buffer_out = gst_buffer_new ();

  png_write_info (pngenc->png_struct_ptr, pngenc->png_info_ptr);
  png_write_image (pngenc->png_struct_ptr, row_pointers);
  png_write_end (pngenc->png_struct_ptr, NULL);

  g_free (row_pointers);
  gst_video_frame_unmap (&vframe);

  png_destroy_info_struct (pngenc->png_struct_ptr, &pngenc->png_info_ptr);
  png_destroy_write_struct (&pngenc->png_struct_ptr, (png_infopp) NULL);

  /* Set final size and store */
  frame->output_buffer = pngenc->buffer_out;

  pngenc->buffer_out = NULL;

  if ((ret = gst_video_encoder_finish_frame (encoder, frame)) != GST_FLOW_OK)
    goto done;

  if (pngenc->snapshot)
    ret = GST_FLOW_EOS;

done:
  GST_DEBUG_OBJECT (pngenc, "END, ret:%d", ret);

  return ret;

  /* ERRORS */
struct_init_fail:
  {
    GST_ELEMENT_ERROR (pngenc, LIBRARY, INIT, (NULL),
        ("Failed to initialize png structure"));
    return GST_FLOW_ERROR;
  }

png_info_fail:
  {
    png_destroy_write_struct (&(pngenc->png_struct_ptr), (png_infopp) NULL);
    GST_ELEMENT_ERROR (pngenc, LIBRARY, INIT, (NULL),
        ("Failed to initialize the png info structure"));
    return GST_FLOW_ERROR;
  }

longjmp_fail:
  {
    png_destroy_write_struct (&pngenc->png_struct_ptr, &pngenc->png_info_ptr);
    GST_ELEMENT_ERROR (pngenc, LIBRARY, FAILED, (NULL),
        ("returning from longjmp"));
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 11
0
static GstFlowReturn
gst_vaapiencode_push_frame (GstVaapiEncode * encode, gint64 timeout)
{
  GstVideoEncoder *const venc = GST_VIDEO_ENCODER_CAST (encode);
  GstVaapiEncodeClass *const klass = GST_VAAPIENCODE_GET_CLASS (encode);
  GstVideoCodecFrame *out_frame;
  GstVaapiCodedBufferProxy *codedbuf_proxy = NULL;
  GstVaapiEncoderStatus status;
  GstBuffer *out_buffer;
  GstFlowReturn ret;

  status = gst_vaapi_encoder_get_buffer_with_timeout (encode->encoder,
      &codedbuf_proxy, timeout);
  if (status == GST_VAAPI_ENCODER_STATUS_NO_BUFFER)
    return GST_VAAPI_ENCODE_FLOW_TIMEOUT;
  if (status != GST_VAAPI_ENCODER_STATUS_SUCCESS)
    goto error_get_buffer;

  out_frame = gst_vaapi_coded_buffer_proxy_get_user_data (codedbuf_proxy);
  if (!out_frame)
    goto error_get_buffer;
  gst_video_codec_frame_ref (out_frame);
  gst_video_codec_frame_set_user_data (out_frame, NULL, NULL);

  /* Update output state */
  GST_VIDEO_ENCODER_STREAM_LOCK (encode);
  if (!ensure_output_state (encode))
    goto error_output_state;
  GST_VIDEO_ENCODER_STREAM_UNLOCK (encode);

  /* Allocate and copy buffer into system memory */
  out_buffer = NULL;
  ret = klass->alloc_buffer (encode,
      GST_VAAPI_CODED_BUFFER_PROXY_BUFFER (codedbuf_proxy), &out_buffer);
  gst_vaapi_coded_buffer_proxy_replace (&codedbuf_proxy, NULL);
  if (ret != GST_FLOW_OK)
    goto error_allocate_buffer;

  gst_buffer_replace (&out_frame->output_buffer, out_buffer);
  gst_buffer_unref (out_buffer);

  GST_TRACE_OBJECT (encode, "output:%" GST_TIME_FORMAT ", size:%zu",
      GST_TIME_ARGS (out_frame->pts), gst_buffer_get_size (out_buffer));

  return gst_video_encoder_finish_frame (venc, out_frame);

  /* ERRORS */
error_get_buffer:
  {
    GST_ERROR ("failed to get encoded buffer (status %d)", status);
    if (codedbuf_proxy)
      gst_vaapi_coded_buffer_proxy_unref (codedbuf_proxy);
    return GST_FLOW_ERROR;
  }
error_allocate_buffer:
  {
    GST_ERROR ("failed to allocate encoded buffer in system memory");
    if (out_buffer)
      gst_buffer_unref (out_buffer);
    gst_video_codec_frame_unref (out_frame);
    return ret;
  }
error_output_state:
  {
    GST_ERROR ("failed to negotiate output state (status %d)", status);
    GST_VIDEO_ENCODER_STREAM_UNLOCK (encode);
    gst_video_codec_frame_unref (out_frame);
    return GST_FLOW_NOT_NEGOTIATED;
  }
}
Exemplo n.º 12
0
static GstFlowReturn gst_imx_vpu_base_enc_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *frame)
{
	VpuEncRetCode enc_ret;
	VpuEncEncParam enc_enc_param;
	GstImxPhysMemMeta *phys_mem_meta;
	GstImxVpuBaseEncClass *klass;
	GstImxVpuBaseEnc *vpu_base_enc;
	VpuFrameBuffer input_framebuf;
	GstBuffer *input_buffer;
	gint src_stride;

	vpu_base_enc = GST_IMX_VPU_BASE_ENC(encoder);
	klass = GST_IMX_VPU_BASE_ENC_CLASS(G_OBJECT_GET_CLASS(vpu_base_enc));

	g_assert(klass->set_frame_enc_params != NULL);

	memset(&enc_enc_param, 0, sizeof(enc_enc_param));
	memset(&input_framebuf, 0, sizeof(input_framebuf));

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(frame->input_buffer);

	/* If the incoming frame's buffer is not using physically contiguous memory,
	 * it needs to be copied to the internal input buffer, otherwise the VPU
	 * encoder cannot read the frame */
	if (phys_mem_meta == NULL)
	{
		/* No physical memory metadata found -> buffer is not physically contiguous */

		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		GST_LOG_OBJECT(vpu_base_enc, "input buffer not physically contiguous - frame copy is necessary");

		if (vpu_base_enc->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_base_enc->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;
				GstAllocator *allocator;

				GST_DEBUG_OBJECT(vpu_base_enc, "creating internal bufferpool");

				caps = gst_video_info_to_caps(&(vpu_base_enc->video_info));
				vpu_base_enc->internal_bufferpool = gst_imx_phys_mem_buffer_pool_new(FALSE);
				allocator = gst_imx_vpu_enc_allocator_obtain();

				config = gst_buffer_pool_get_config(vpu_base_enc->internal_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_base_enc->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_base_enc->internal_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_base_enc->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_base_enc, "failed to create internal bufferpool");
					return GST_FLOW_ERROR;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_base_enc->internal_bufferpool))
				gst_buffer_pool_set_active(vpu_base_enc->internal_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_base_enc->internal_bufferpool, &(vpu_base_enc->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return flow_ret;
			}
		}

		/* The internal input buffer exists at this point. Since the incoming frame
		 * is not stored in physical memory, copy its pixels to the internal
		 * input buffer, so the encoder can read them. */

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_base_enc->video_info), frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_base_enc->video_info), vpu_base_enc->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		/* Set the internal input buffer as the encoder's input */
		input_buffer = vpu_base_enc->internal_input_buffer;
		/* And use the internal input buffer's physical memory metadata */
		phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(vpu_base_enc->internal_input_buffer);
	}
	else
	{
		/* Physical memory metadata found -> buffer is physically contiguous
		 * It can be used directly as input for the VPU encoder */
		input_buffer = frame->input_buffer;
	}

	/* Set up physical addresses for the input framebuffer */
	{
		gsize *plane_offsets;
		gint *plane_strides;
		GstVideoMeta *video_meta;
		unsigned char *phys_ptr;

		/* Try to use plane offset and stride information from the video
		 * metadata if present, since these can be more accurate than
		 * the information from the video info */
		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			plane_offsets = video_meta->offset;
			plane_strides = video_meta->stride;
		}
		else
		{
			plane_offsets = vpu_base_enc->video_info.offset;
			plane_strides = vpu_base_enc->video_info.stride;
		}

		phys_ptr = (unsigned char*)(phys_mem_meta->phys_addr);

		input_framebuf.pbufY = phys_ptr;
		input_framebuf.pbufCb = phys_ptr + plane_offsets[1];
		input_framebuf.pbufCr = phys_ptr + plane_offsets[2];
		input_framebuf.pbufMvCol = NULL; /* not used by the VPU encoder */
		input_framebuf.nStrideY = plane_strides[0];
		input_framebuf.nStrideC = plane_strides[1];

		/* this is needed for framebuffers registration below */
		src_stride = plane_strides[0];

		GST_TRACE_OBJECT(vpu_base_enc, "width: %d   height: %d   stride 0: %d   stride 1: %d   offset 0: %d   offset 1: %d   offset 2: %d", GST_VIDEO_INFO_WIDTH(&(vpu_base_enc->video_info)), GST_VIDEO_INFO_HEIGHT(&(vpu_base_enc->video_info)), plane_strides[0], plane_strides[1], plane_offsets[0], plane_offsets[1], plane_offsets[2]);
	}

	/* Create framebuffers structure (if not already present) */
	if (vpu_base_enc->framebuffers == NULL)
	{
		GstImxVpuFramebufferParams fbparams;
		gst_imx_vpu_framebuffers_enc_init_info_to_params(&(vpu_base_enc->init_info), &fbparams);
		fbparams.pic_width = vpu_base_enc->open_param.nPicWidth;
		fbparams.pic_height = vpu_base_enc->open_param.nPicHeight;

		vpu_base_enc->framebuffers = gst_imx_vpu_framebuffers_new(&fbparams, gst_imx_vpu_enc_allocator_obtain());
		if (vpu_base_enc->framebuffers == NULL)
		{
			GST_ELEMENT_ERROR(vpu_base_enc, RESOURCE, NO_SPACE_LEFT, ("could not create framebuffers structure"), (NULL));
			return GST_FLOW_ERROR;
		}

		gst_imx_vpu_framebuffers_register_with_encoder(vpu_base_enc->framebuffers, vpu_base_enc->handle, src_stride);
	}

	/* Allocate physical buffer for output data (if not already present) */
	if (vpu_base_enc->output_phys_buffer == NULL)
	{
		vpu_base_enc->output_phys_buffer = (GstImxPhysMemory *)gst_allocator_alloc(gst_imx_vpu_enc_allocator_obtain(), vpu_base_enc->framebuffers->total_size, NULL);

		if (vpu_base_enc->output_phys_buffer == NULL)
		{
			GST_ERROR_OBJECT(vpu_base_enc, "could not allocate physical buffer for output data");
			return GST_FLOW_ERROR;
		}
	}

	/* Set up encoding parameters */
	enc_enc_param.nInVirtOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->mapped_virt_addr); /* TODO */
	enc_enc_param.nInPhyOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->phys_addr);
	enc_enc_param.nInOutputBufLen = vpu_base_enc->output_phys_buffer->mem.size;
	enc_enc_param.nPicWidth = vpu_base_enc->framebuffers->pic_width;
	enc_enc_param.nPicHeight = vpu_base_enc->framebuffers->pic_height;
	enc_enc_param.nFrameRate = vpu_base_enc->open_param.nFrameRate;
	enc_enc_param.pInFrame = &input_framebuf;
	enc_enc_param.nForceIPicture = 0;

	/* Force I-frame if either IS_FORCE_KEYFRAME or IS_FORCE_KEYFRAME_HEADERS is set for the current frame. */
	if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME(frame) || GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS(frame))
	{
		enc_enc_param.nForceIPicture = 1;
		GST_LOG_OBJECT(vpu_base_enc, "got request to make this a keyframe - forcing I frame");
		GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT(frame);
	}

	/* Give the derived class a chance to set up encoding parameters too */
	if (!klass->set_frame_enc_params(vpu_base_enc, &enc_enc_param, &(vpu_base_enc->open_param)))
	{
		GST_ERROR_OBJECT(vpu_base_enc, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}

	/* Main encoding block */
	{
		GstBuffer *output_buffer = NULL;
		gsize output_buffer_offset = 0;
		gboolean frame_finished = FALSE;

		frame->output_buffer = NULL;

		/* Run in a loop until the VPU reports the input as used */
		do
		{
			/* Feed input data */
			enc_ret = VPU_EncEncodeFrame(vpu_base_enc->handle, &enc_enc_param);
			if (enc_ret != VPU_ENC_RET_SUCCESS)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "failed to encode frame: %s", gst_imx_vpu_strerror(enc_ret));
				VPU_EncReset(vpu_base_enc->handle);
				return GST_FLOW_ERROR;
			}

			if (frame_finished)
			{
				GST_WARNING_OBJECT(vpu_base_enc, "frame was already finished for the current input, but input not yet marked as used");
				continue;
			}

			if (enc_enc_param.eOutRetCode & (VPU_ENC_OUTPUT_DIS | VPU_ENC_OUTPUT_SEQHEADER))
			{
				/* Create an output buffer on demand */
				if (output_buffer == NULL)
				{
					output_buffer = gst_video_encoder_allocate_output_buffer(
						encoder,
						vpu_base_enc->output_phys_buffer->mem.size
					);
					frame->output_buffer = output_buffer;
				}

				GST_LOG_OBJECT(vpu_base_enc, "processing output data: %u bytes, output buffer offset %u", enc_enc_param.nOutOutputSize, output_buffer_offset);

				if (klass->fill_output_buffer != NULL)
				{
					/* Derived class fills data on its own */

					gsize cur_offset = output_buffer_offset;
					output_buffer_offset += klass->fill_output_buffer(
						vpu_base_enc,
						frame,
						cur_offset,
						vpu_base_enc->output_phys_buffer->mapped_virt_addr,
						enc_enc_param.nOutOutputSize,
						enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_SEQHEADER
					);
				}
				else
				{
					/* Use default data filling (= copy input to output) */

					gst_buffer_fill(
						output_buffer,
						output_buffer_offset,
						vpu_base_enc->output_phys_buffer->mapped_virt_addr,
						enc_enc_param.nOutOutputSize
					);
					output_buffer_offset += enc_enc_param.nOutOutputSize;
				}

				if (enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_DIS)
				{
					g_assert(output_buffer != NULL);

					/* Set the output buffer's size to the actual number of bytes
					 * filled by the derived class */
					gst_buffer_set_size(output_buffer, output_buffer_offset);

					/* Set the frame DTS */
					frame->dts = frame->pts;

					/* And finish the frame, handing the output data over to the base class */
					gst_video_encoder_finish_frame(encoder, frame);

					output_buffer = NULL;
					frame_finished = TRUE;

					if (!(enc_enc_param.eOutRetCode & VPU_ENC_INPUT_USED))
						GST_WARNING_OBJECT(vpu_base_enc, "frame finished, but VPU did not report the input as used");

					break;
				}
			}
		}
		while (!(enc_enc_param.eOutRetCode & VPU_ENC_INPUT_USED)); /* VPU_ENC_INPUT_NOT_USED has value 0x0 - cannot use it for flag checks */

		/* If output_buffer is NULL at this point, it means VPU_ENC_OUTPUT_DIS was never communicated
		 * by the VPU, and the buffer is unfinished. -> Drop it. */
		if (output_buffer != NULL)
		{
			GST_WARNING_OBJECT(vpu_base_enc, "frame unfinished ; dropping");
			gst_buffer_unref(output_buffer);
			frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
			gst_video_encoder_finish_frame(encoder, frame);
		}
	}

	return GST_FLOW_OK;
}
Exemplo n.º 13
0
static GstFlowReturn gst_imx_vpu_encoder_base_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *input_frame)
{
	GstImxPhysMemMeta *phys_mem_meta;
	GstImxVpuEncoderBaseClass *klass;
	GstImxVpuEncoderBase *vpu_encoder_base;
	GstBuffer *input_buffer;
	ImxVpuEncParams enc_params;

	vpu_encoder_base = GST_IMX_VPU_ENCODER_BASE(encoder);
	klass = GST_IMX_VPU_ENCODER_BASE_CLASS(G_OBJECT_GET_CLASS(vpu_encoder_base));

	if (vpu_encoder_base->drop)
	{
		input_frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
		gst_video_encoder_finish_frame(encoder, input_frame);
		return GST_FLOW_OK;
	}

	/* Get access to the input buffer's physical address */

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(input_frame->input_buffer);

	/* If the incoming frame's buffer is not using physically contiguous memory,
	 * it needs to be copied to the internal input buffer, otherwise the VPU
	 * encoder cannot read the frame */
	if (phys_mem_meta == NULL)
	{
		/* No physical memory metadata found -> buffer is not physically contiguous */

		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		GST_LOG_OBJECT(vpu_encoder_base, "input buffer not physically contiguous - frame copy is necessary");

		if (vpu_encoder_base->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_encoder_base->internal_input_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;

				GST_DEBUG_OBJECT(vpu_encoder_base, "creating internal bufferpool");

				caps = gst_video_info_to_caps(&(vpu_encoder_base->video_info));
				vpu_encoder_base->internal_input_bufferpool = gst_imx_phys_mem_buffer_pool_new(FALSE);

				gst_object_ref(vpu_encoder_base->phys_mem_allocator);

				config = gst_buffer_pool_get_config(vpu_encoder_base->internal_input_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_encoder_base->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, vpu_encoder_base->phys_mem_allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_encoder_base->internal_input_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_encoder_base->internal_input_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_encoder_base, "failed to create internal bufferpool");
					return GST_FLOW_ERROR;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_encoder_base->internal_input_bufferpool))
				gst_buffer_pool_set_active(vpu_encoder_base->internal_input_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_encoder_base->internal_input_bufferpool, &(vpu_encoder_base->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_encoder_base, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return flow_ret;
			}
		}

		/* The internal input buffer exists at this point. Since the incoming frame
		 * is not stored in physical memory, copy its pixels to the internal
		 * input buffer, so the encoder can read them. */

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_encoder_base->video_info), input_frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_encoder_base->video_info), vpu_encoder_base->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		/* Set the input buffer as the encoder's input */
		input_buffer = vpu_encoder_base->internal_input_buffer;
		/* And use the input buffer's physical memory metadata */
		phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(vpu_encoder_base->internal_input_buffer);
	}
	else
	{
		/* Physical memory metadata found -> buffer is physically contiguous
		 * It can be used directly as input for the VPU encoder */
		input_buffer = input_frame->input_buffer;
	}


	/* Prepare the input buffer's information (strides, plane offsets ..) for encoding */

	{
		GstVideoMeta *video_meta;

		/* Try to use plane offset and stride information from the video
		 * metadata if present, since these can be more accurate than
		 * the information from the video info */
		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			vpu_encoder_base->input_framebuffer.y_stride = video_meta->stride[0];
			vpu_encoder_base->input_framebuffer.cbcr_stride = video_meta->stride[1];

			vpu_encoder_base->input_framebuffer.y_offset = video_meta->offset[0];
			vpu_encoder_base->input_framebuffer.cb_offset = video_meta->offset[1];
			vpu_encoder_base->input_framebuffer.cr_offset = video_meta->offset[2];
		}
		else
		{
			vpu_encoder_base->input_framebuffer.y_stride = GST_VIDEO_INFO_PLANE_STRIDE(&(vpu_encoder_base->video_info), 0);
			vpu_encoder_base->input_framebuffer.cbcr_stride = GST_VIDEO_INFO_PLANE_STRIDE(&(vpu_encoder_base->video_info), 1);

			vpu_encoder_base->input_framebuffer.y_offset = GST_VIDEO_INFO_PLANE_OFFSET(&(vpu_encoder_base->video_info), 0);
			vpu_encoder_base->input_framebuffer.cb_offset = GST_VIDEO_INFO_PLANE_OFFSET(&(vpu_encoder_base->video_info), 1);
			vpu_encoder_base->input_framebuffer.cr_offset = GST_VIDEO_INFO_PLANE_OFFSET(&(vpu_encoder_base->video_info), 2);
		}

		vpu_encoder_base->input_framebuffer.mvcol_offset = 0; /* this is not used by the encoder */
		vpu_encoder_base->input_framebuffer.context = (void *)(input_frame->system_frame_number);

		vpu_encoder_base->input_dmabuffer.fd = -1;
		vpu_encoder_base->input_dmabuffer.physical_address = phys_mem_meta->phys_addr;
		vpu_encoder_base->input_dmabuffer.size = gst_buffer_get_size(input_buffer);
	}


	/* Prepare the encoding parameters */

	memset(&enc_params, 0, sizeof(enc_params));
	imx_vpu_enc_set_default_encoding_params(vpu_encoder_base->encoder, &enc_params);
	enc_params.force_I_frame = 0;
	enc_params.acquire_output_buffer = gst_imx_vpu_encoder_base_acquire_output_buffer;
	enc_params.finish_output_buffer = gst_imx_vpu_encoder_base_finish_output_buffer;
	enc_params.output_buffer_context = vpu_encoder_base;

	/* Force I-frame if either IS_FORCE_KEYFRAME or IS_FORCE_KEYFRAME_HEADERS is set for the current frame. */
	if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME(input_frame) || GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS(input_frame))
	{
		enc_params.force_I_frame = 1;
		GST_LOG_OBJECT(vpu_encoder_base, "got request to make this a keyframe - forcing I frame");
		GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT(input_frame);
	}

	/* Give the derived class a chance to set up encoding parameters too */
	if ((klass->set_frame_enc_params != NULL) && !klass->set_frame_enc_params(vpu_encoder_base, &enc_params))
	{
		GST_ERROR_OBJECT(vpu_encoder_base, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}


	/* Main encoding block */
	{
		ImxVpuEncReturnCodes enc_ret;
		unsigned int output_code = 0;
		ImxVpuEncodedFrame encoded_data_frame;

		vpu_encoder_base->output_buffer = NULL;

		/* The actual encoding call */
		memset(&encoded_data_frame, 0, sizeof(ImxVpuEncodedFrame));
		enc_ret = imx_vpu_enc_encode(vpu_encoder_base->encoder, &(vpu_encoder_base->input_frame), &encoded_data_frame, &enc_params, &output_code);
		if (enc_ret != IMX_VPU_ENC_RETURN_CODE_OK)
		{
			GST_ERROR_OBJECT(vpu_encoder_base, "failed to encode frame: %s", imx_vpu_enc_error_string(enc_ret));
			if (vpu_encoder_base->output_buffer != NULL)
				gst_buffer_unref(vpu_encoder_base->output_buffer);
			return GST_FLOW_ERROR;
		}

		/* Give the derived class a chance to process the output_block_buffer */
		if ((klass->process_output_buffer != NULL) && !klass->process_output_buffer(vpu_encoder_base, input_frame, &(vpu_encoder_base->output_buffer)))
		{
			GST_ERROR_OBJECT(vpu_encoder_base, "derived class reports failure while processing encoded output");
			if (vpu_encoder_base->output_buffer != NULL)
				gst_buffer_unref(vpu_encoder_base->output_buffer);
			return GST_FLOW_ERROR;
		}

		if (output_code & IMX_VPU_ENC_OUTPUT_CODE_ENCODED_FRAME_AVAILABLE)
		{
			GST_LOG_OBJECT(vpu_encoder_base, "VPU outputs encoded frame");

			/* TODO: make use of the frame context that is retrieved with get_frame(i)
			 * This is not strictly necessary, since the VPU encoder does not
			 * do frame reordering, nor does it produce delays, but it would
			 * be a bit cleaner. */

			input_frame->dts = input_frame->pts;

			/* Take all of the encoded bits. The adapter contains an encoded frame
			 * at this point. */
			input_frame->output_buffer = vpu_encoder_base->output_buffer;

			/* And finish the frame, handing the output data over to the base class */
			gst_video_encoder_finish_frame(encoder, input_frame);
		}
		else
		{
			/* If at this point IMX_VPU_ENC_OUTPUT_CODE_ENCODED_FRAME_AVAILABLE is not set
			 * in the output_code, it means the input was used up before a frame could be
			 * encoded. Therefore, no output frame can be pushed downstream. Note that this
			 * should not happen during normal operation, so a warning is logged. */

			if (vpu_encoder_base->output_buffer != NULL)
				gst_buffer_unref(vpu_encoder_base->output_buffer);

			GST_WARNING_OBJECT(vpu_encoder_base, "frame unfinished ; dropping");
			input_frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
			gst_video_encoder_finish_frame(encoder, input_frame);
		}
	}


	return GST_FLOW_OK;
}
Exemplo n.º 14
0
static GstFlowReturn
gst_ffmpegvidenc_flush_buffers (GstFFMpegVidEnc * ffmpegenc, gboolean send)
{
  GstVideoCodecFrame *frame;
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstBuffer *outbuf;
  gint ret_size;

  GST_DEBUG_OBJECT (ffmpegenc, "flushing buffers with sending %d", send);

  /* no need to empty codec if there is none */
  if (!ffmpegenc->opened)
    goto done;

  while ((frame =
          gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (ffmpegenc)))) {

    ffmpegenc_setup_working_buf (ffmpegenc);

    ret_size = avcodec_encode_video (ffmpegenc->context,
        ffmpegenc->working_buf, ffmpegenc->working_buf_size, NULL);

    if (ret_size < 0) {         /* there should be something, notify and give up */
#ifndef GST_DISABLE_GST_DEBUG
      GstFFMpegVidEncClass *oclass =
          (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
      GST_WARNING_OBJECT (ffmpegenc,
          "avenc_%s: failed to flush buffer", oclass->in_plugin->name);
#endif /* GST_DISABLE_GST_DEBUG */
      gst_video_codec_frame_unref (frame);
      break;
    }

    /* save stats info if there is some as well as a stats file */
    if (ffmpegenc->file && ffmpegenc->context->stats_out)
      if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)
        GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,
            (("Could not write to file \"%s\"."), ffmpegenc->filename),
            GST_ERROR_SYSTEM);

    if (send) {
      if (gst_video_encoder_allocate_output_frame (GST_VIDEO_ENCODER
              (ffmpegenc), frame, ret_size) != GST_FLOW_OK) {
#ifndef GST_DISABLE_GST_DEBUG
        GstFFMpegVidEncClass *oclass =
            (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
        GST_WARNING_OBJECT (ffmpegenc,
            "avenc_%s: failed to allocate buffer", oclass->in_plugin->name);
#endif /* GST_DISABLE_GST_DEBUG */
        gst_video_codec_frame_unref (frame);
        break;
      }
      outbuf = frame->output_buffer;
      gst_buffer_fill (outbuf, 0, ffmpegenc->working_buf, ret_size);

      if (ffmpegenc->context->coded_frame->key_frame)
        GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);

      flow_ret =
          gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (ffmpegenc), frame);
    } else {
      gst_video_codec_frame_unref (frame);
    }
  }

done:

  return flow_ret;
}
Exemplo n.º 15
0
static GstFlowReturn
gst_ffmpegvidenc_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstFFMpegVidEnc *ffmpegenc = (GstFFMpegVidEnc *) encoder;
  GstBuffer *outbuf;
  gint ret_size = 0, c;
  GstVideoInfo *info = &ffmpegenc->input_state->info;
  GstVideoFrame vframe;

  if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame))
    ffmpegenc->picture->pict_type = AV_PICTURE_TYPE_I;

  if (!gst_video_frame_map (&vframe, info, frame->input_buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (encoder, "Failed to map input buffer");
    return GST_FLOW_ERROR;
  }

  /* Fill avpicture */
  for (c = 0; c < AV_NUM_DATA_POINTERS; c++) {
    if (c < GST_VIDEO_INFO_N_COMPONENTS (info)) {
      ffmpegenc->picture->data[c] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, c);
      ffmpegenc->picture->linesize[c] =
          GST_VIDEO_FRAME_COMP_STRIDE (&vframe, c);
    } else {
      ffmpegenc->picture->data[c] = NULL;
      ffmpegenc->picture->linesize[c] = 0;
    }
  }

  ffmpegenc->picture->pts =
      gst_ffmpeg_time_gst_to_ff (frame->pts /
      ffmpegenc->context->ticks_per_frame, ffmpegenc->context->time_base);

  ffmpegenc_setup_working_buf (ffmpegenc);

  ret_size = avcodec_encode_video (ffmpegenc->context,
      ffmpegenc->working_buf, ffmpegenc->working_buf_size, ffmpegenc->picture);

  gst_video_frame_unmap (&vframe);

  if (ret_size < 0)
    goto encode_fail;

  /* Encoder needs more data */
  if (!ret_size)
    return GST_FLOW_OK;

  /* save stats info if there is some as well as a stats file */
  if (ffmpegenc->file && ffmpegenc->context->stats_out)
    if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)
      GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,
          (("Could not write to file \"%s\"."), ffmpegenc->filename),
          GST_ERROR_SYSTEM);

  gst_video_codec_frame_unref (frame);

  /* Get oldest frame */
  frame = gst_video_encoder_get_oldest_frame (encoder);

  /* Allocate output buffer */
  if (gst_video_encoder_allocate_output_frame (encoder, frame,
          ret_size) != GST_FLOW_OK) {
    gst_video_codec_frame_unref (frame);
    goto alloc_fail;
  }

  outbuf = frame->output_buffer;
  gst_buffer_fill (outbuf, 0, ffmpegenc->working_buf, ret_size);

  /* buggy codec may not set coded_frame */
  if (ffmpegenc->context->coded_frame) {
    if (ffmpegenc->context->coded_frame->key_frame)
      GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
  } else
    GST_WARNING_OBJECT (ffmpegenc, "codec did not provide keyframe info");

  /* Reset frame type */
  if (ffmpegenc->picture->pict_type)
    ffmpegenc->picture->pict_type = 0;

  return gst_video_encoder_finish_frame (encoder, frame);

  /* ERRORS */
encode_fail:
  {
#ifndef GST_DISABLE_GST_DEBUG
    GstFFMpegVidEncClass *oclass =
        (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
    GST_ERROR_OBJECT (ffmpegenc,
        "avenc_%s: failed to encode buffer", oclass->in_plugin->name);
#endif /* GST_DISABLE_GST_DEBUG */
    return GST_FLOW_OK;
  }
alloc_fail:
  {
#ifndef GST_DISABLE_GST_DEBUG
    GstFFMpegVidEncClass *oclass =
        (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
    GST_ERROR_OBJECT (ffmpegenc,
        "avenc_%s: failed to allocate buffer", oclass->in_plugin->name);
#endif /* GST_DISABLE_GST_DEBUG */
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 16
0
static GstFlowReturn gst_fsl_vpu_base_enc_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *frame)
{
	VpuEncRetCode enc_ret;
	VpuEncEncParam enc_enc_param;
	GstFslPhysMemMeta *phys_mem_meta;
	GstFslVpuBaseEncClass *klass;
	GstFslVpuBaseEnc *vpu_base_enc;
	VpuFrameBuffer input_framebuf;
	GstBuffer *input_buffer;

	vpu_base_enc = GST_FSL_VPU_BASE_ENC(encoder);
	klass = GST_FSL_VPU_BASE_ENC_CLASS(G_OBJECT_GET_CLASS(vpu_base_enc));

	g_assert(klass->set_frame_enc_params != NULL);

	memset(&enc_enc_param, 0, sizeof(enc_enc_param));
	memset(&input_framebuf, 0, sizeof(input_framebuf));

	phys_mem_meta = GST_FSL_PHYS_MEM_META_GET(frame->input_buffer);

	if (phys_mem_meta == NULL)
	{
		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		if (vpu_base_enc->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_base_enc->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;
				GstAllocator *allocator;

				caps = gst_video_info_to_caps(&(vpu_base_enc->video_info));
				vpu_base_enc->internal_bufferpool = gst_fsl_phys_mem_buffer_pool_new(FALSE);
				allocator = gst_fsl_vpu_enc_allocator_obtain();

				config = gst_buffer_pool_get_config(vpu_base_enc->internal_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_base_enc->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_FSL_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_base_enc->internal_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_base_enc->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_base_enc, "failed to create internal bufferpool");
					return FALSE;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_base_enc->internal_bufferpool))
				gst_buffer_pool_set_active(vpu_base_enc->internal_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_base_enc->internal_bufferpool, &(vpu_base_enc->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return FALSE;
			}
		}

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_base_enc->video_info), frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_base_enc->video_info), vpu_base_enc->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		input_buffer = vpu_base_enc->internal_input_buffer;
		phys_mem_meta = GST_FSL_PHYS_MEM_META_GET(vpu_base_enc->internal_input_buffer);
	}
	else
		input_buffer = frame->input_buffer;

	{
		gsize *plane_offsets;
		gint *plane_strides;
		GstVideoMeta *video_meta;
		unsigned char *phys_ptr;

		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			plane_offsets = video_meta->offset;
			plane_strides = video_meta->stride;
		}
		else
		{
			plane_offsets = vpu_base_enc->video_info.offset;
			plane_strides = vpu_base_enc->video_info.stride;
		}

		phys_ptr = (unsigned char*)(phys_mem_meta->phys_addr);

		input_framebuf.pbufY = phys_ptr;
		input_framebuf.pbufCb = phys_ptr + plane_offsets[1];
		input_framebuf.pbufCr = phys_ptr + plane_offsets[2];
		input_framebuf.pbufMvCol = NULL;
		input_framebuf.nStrideY = plane_strides[0];
		input_framebuf.nStrideC = plane_strides[1];

		GST_TRACE_OBJECT(vpu_base_enc, "width: %d   height: %d   stride 0: %d   stride 1: %d   offset 0: %d   offset 1: %d   offset 2: %d", GST_VIDEO_INFO_WIDTH(&(vpu_base_enc->video_info)), GST_VIDEO_INFO_HEIGHT(&(vpu_base_enc->video_info)), plane_strides[0], plane_strides[1], plane_offsets[0], plane_offsets[1], plane_offsets[2]);

		if (vpu_base_enc->framebuffers == NULL)
		{
			GstFslVpuFramebufferParams fbparams;
			gst_fsl_vpu_framebuffers_enc_init_info_to_params(&(vpu_base_enc->init_info), &fbparams);
			fbparams.pic_width = vpu_base_enc->open_param.nPicWidth;
			fbparams.pic_height = vpu_base_enc->open_param.nPicHeight;
			vpu_base_enc->framebuffers = gst_fsl_vpu_framebuffers_new(&fbparams, gst_fsl_vpu_enc_allocator_obtain());
			gst_fsl_vpu_framebuffers_register_with_encoder(vpu_base_enc->framebuffers, vpu_base_enc->handle, plane_strides[0]);
		}

		if (vpu_base_enc->output_phys_buffer == NULL)
		{
			vpu_base_enc->output_phys_buffer = (GstFslPhysMemory *)gst_allocator_alloc(gst_fsl_vpu_enc_allocator_obtain(), vpu_base_enc->framebuffers->total_size, NULL);

			if (vpu_base_enc->output_phys_buffer == NULL)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "could not allocate physical buffer for output data");
				return GST_FLOW_ERROR;
			}
		}
	}

	enc_enc_param.nInVirtOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->mapped_virt_addr); /* TODO */
	enc_enc_param.nInPhyOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->phys_addr);
	enc_enc_param.nInOutputBufLen = vpu_base_enc->output_phys_buffer->mem.size;
	enc_enc_param.nPicWidth = vpu_base_enc->framebuffers->pic_width;
	enc_enc_param.nPicHeight = vpu_base_enc->framebuffers->pic_height;
	enc_enc_param.nFrameRate = vpu_base_enc->open_param.nFrameRate;
	enc_enc_param.pInFrame = &input_framebuf;

	if (!klass->set_frame_enc_params(vpu_base_enc, &enc_enc_param, &(vpu_base_enc->open_param)))
	{
		GST_ERROR_OBJECT(vpu_base_enc, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}

	enc_ret = VPU_EncEncodeFrame(vpu_base_enc->handle, &enc_enc_param);
	if (enc_ret != VPU_ENC_RET_SUCCESS)
	{
		GST_ERROR_OBJECT(vpu_base_enc, "failed to encode frame: %s", gst_fsl_vpu_strerror(enc_ret));
		VPU_EncReset(vpu_base_enc->handle);
		return GST_FLOW_ERROR;
	}

	GST_LOG_OBJECT(vpu_base_enc, "out ret code: 0x%x  out size: %u", enc_enc_param.eOutRetCode, enc_enc_param.nOutOutputSize);

	if ((enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_DIS) || (enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_SEQHEADER))
	{
		gst_video_encoder_allocate_output_frame(encoder, frame, enc_enc_param.nOutOutputSize);
		gst_buffer_fill(frame->output_buffer, 0, vpu_base_enc->output_phys_buffer->mapped_virt_addr, enc_enc_param.nOutOutputSize);
		gst_video_encoder_finish_frame(encoder, frame);
	}

	return GST_FLOW_OK;
}
static GstFlowReturn
gst_webp_enc_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstWebpEnc *enc = GST_WEBP_ENC (encoder);
  GstBuffer *out_buffer = NULL;
  GstVideoFrame vframe;

  GST_LOG_OBJECT (enc, "got new frame");

  gst_webp_set_picture_params (enc);

  if (!gst_video_frame_map (&vframe, &enc->input_state->info,
          frame->input_buffer, GST_MAP_READ))
    return GST_FLOW_ERROR;

  if (!enc->use_argb) {
    enc->webp_picture.y = GST_VIDEO_FRAME_COMP_DATA (&vframe, 0);
    enc->webp_picture.u = GST_VIDEO_FRAME_COMP_DATA (&vframe, 1);
    enc->webp_picture.v = GST_VIDEO_FRAME_COMP_DATA (&vframe, 2);

    enc->webp_picture.y_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0);
    enc->webp_picture.uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 1);

  } else {
    switch (enc->rgb_format) {
      case GST_VIDEO_FORMAT_RGB:
        WebPPictureImportRGB (&enc->webp_picture,
            GST_VIDEO_FRAME_COMP_DATA (&vframe, 0),
            GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0));
        break;
      case GST_VIDEO_FORMAT_RGBA:
        WebPPictureImportRGBA (&enc->webp_picture,
            GST_VIDEO_FRAME_COMP_DATA (&vframe, 0),
            GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0));
        break;
      default:
        break;
    }
  }

  if (WebPEncode (&enc->webp_config, &enc->webp_picture)) {
    WebPPictureFree (&enc->webp_picture);

    out_buffer = gst_buffer_new_allocate (NULL, enc->webp_writer.size, NULL);
    if (!out_buffer) {
      GST_ERROR_OBJECT (enc, "Failed to create output buffer");
      return GST_FLOW_ERROR;
    }
    gst_buffer_fill (out_buffer, 0, enc->webp_writer.mem,
        enc->webp_writer.size);
    free (enc->webp_writer.mem);
  } else {
    GST_ERROR_OBJECT (enc, "Failed to encode WebPPicture");
    return GST_FLOW_ERROR;
  }

  gst_video_frame_unmap (&vframe);
  frame->output_buffer = out_buffer;
  return gst_video_encoder_finish_frame (encoder, frame);
}
Exemplo n.º 18
0
static GstFlowReturn
gst_pnmenc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame)
{
  GstPnmenc *pnmenc;
  guint size, pixels;
  GstMapInfo omap, imap;
  gchar *header;
  GstVideoInfo *info;
  GstFlowReturn ret = GST_FLOW_OK;
  guint i_rowstride, o_rowstride;
  guint bytes = 0, index, head_size;
  guint i, j;

  pnmenc = GST_PNMENC (encoder);
  info = &pnmenc->input_state->info;

  switch (GST_VIDEO_INFO_FORMAT (info)) {
    case GST_VIDEO_FORMAT_RGB:
      pixels = size = pnmenc->info.width * pnmenc->info.height * 3;
      break;
    case GST_VIDEO_FORMAT_GRAY8:
      pixels = size = pnmenc->info.width * pnmenc->info.height * 1;
      break;
    default:
      ret = FALSE;
      goto done;
  }

  header = g_strdup_printf ("P%i\n%i %i\n%i\n",
      pnmenc->info.type + 3 * (1 - pnmenc->info.encoding), pnmenc->info.width,
      pnmenc->info.height, pnmenc->info.max);

  if (pnmenc->info.encoding == GST_PNM_ENCODING_ASCII) {
    /* Per component 4 bytes are used in case of ASCII encoding */
    size = size * 4;
    size += strlen (header);
    frame->output_buffer =
        gst_video_encoder_allocate_output_buffer (encoder, (size + size / 20));
  } else {
    size += strlen (header);
    frame->output_buffer =
        gst_video_encoder_allocate_output_buffer (encoder, size);
  }

  if (gst_buffer_map (frame->output_buffer, &omap, GST_MAP_WRITE) == FALSE) {
    ret = GST_FLOW_ERROR;
    goto done;
  }
  if (gst_buffer_map (frame->input_buffer, &imap, GST_MAP_READ) == FALSE) {
    /* Unmap already mapped buffer */
    gst_buffer_unmap (frame->output_buffer, &omap);
    ret = GST_FLOW_ERROR;
    goto done;
  }
  memcpy (omap.data, header, strlen (header));

  head_size = strlen (header);
  if (pnmenc->info.encoding == GST_PNM_ENCODING_ASCII) {
    /* We need to convert to ASCII */
    if (pnmenc->info.width % 4 != 0) {
      /* Convert from gstreamer rowstride to PNM rowstride */
      if (pnmenc->info.type == GST_PNM_TYPE_PIXMAP) {
        o_rowstride = 3 * pnmenc->info.width;
      } else {
        o_rowstride = pnmenc->info.width;
      }
      i_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (pnmenc->input_state, 0);

      for (i = 0; i < pnmenc->info.height; i++) {
        index = i * i_rowstride;
        for (j = 0; j < o_rowstride; j++, bytes++, index++) {
          g_snprintf ((char *) omap.data + head_size, 4, "%3i",
              imap.data[index]);
          head_size += 3;
          omap.data[head_size++] = ' ';
          /* Add new line so that file will not end up with sinle big line */
          if (!((bytes + 1) % 20))
            omap.data[head_size++] = '\n';
        }
      }
    } else {
      for (i = 0; i < pixels; i++) {
        g_snprintf ((char *) omap.data + head_size, 4, "%3i", imap.data[i]);
        head_size += 3;
        omap.data[head_size++] = ' ';
        if (!((i + 1) % 20))
          omap.data[head_size++] = '\n';
      }
    }
  } else {
    /* Need to convert from GStreamer rowstride to PNM rowstride */
    if (pnmenc->info.width % 4 != 0) {
      if (pnmenc->info.type == GST_PNM_TYPE_PIXMAP) {
        o_rowstride = 3 * pnmenc->info.width;
      } else {
        o_rowstride = pnmenc->info.width;
      }
      i_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (pnmenc->input_state, 0);

      for (i = 0; i < pnmenc->info.height; i++)
        memcpy (omap.data + head_size + o_rowstride * i,
            imap.data + i_rowstride * i, o_rowstride);
    } else {
      /* size contains complete image size inlcuding header size,
         Exclude header size while copying data */
      memcpy (omap.data + strlen (header), imap.data, (size - head_size));
    }
  }

  gst_buffer_unmap (frame->output_buffer, &omap);
  gst_buffer_unmap (frame->input_buffer, &imap);

  if ((ret = gst_video_encoder_finish_frame (encoder, frame)) != GST_FLOW_OK)
    goto done;

done:
  return ret;
}
Exemplo n.º 19
0
static GstFlowReturn
gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
  GstFlowReturn ret = GST_FLOW_OK;
#ifdef HAVE_OPENJPEG_1
  opj_cinfo_t *enc;
  GstMapInfo map;
  guint length;
  opj_cio_t *io;
#else
  opj_codec_t *enc;
  opj_stream_t *stream;
  MemStream mstream;
#endif
  opj_image_t *image;
  GstVideoFrame vframe;

  GST_DEBUG_OBJECT (self, "Handling frame");

  enc = opj_create_compress (self->codec_format);
  if (!enc)
    goto initialization_error;

#ifdef HAVE_OPENJPEG_1
  if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
          GST_LEVEL_TRACE)) {
    opj_event_mgr_t callbacks;

    callbacks.error_handler = gst_openjpeg_enc_opj_error;
    callbacks.warning_handler = gst_openjpeg_enc_opj_warning;
    callbacks.info_handler = gst_openjpeg_enc_opj_info;
    opj_set_event_mgr ((opj_common_ptr) enc, &callbacks, self);
  } else {
    opj_set_event_mgr ((opj_common_ptr) enc, NULL, NULL);
  }
#else
  if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
          GST_LEVEL_TRACE)) {
    opj_set_info_handler (enc, gst_openjpeg_enc_opj_info, self);
    opj_set_warning_handler (enc, gst_openjpeg_enc_opj_warning, self);
    opj_set_error_handler (enc, gst_openjpeg_enc_opj_error, self);
  } else {
    opj_set_info_handler (enc, NULL, NULL);
    opj_set_warning_handler (enc, NULL, NULL);
    opj_set_error_handler (enc, NULL, NULL);
  }
#endif

  if (!gst_video_frame_map (&vframe, &self->input_state->info,
          frame->input_buffer, GST_MAP_READ))
    goto map_read_error;

  image = gst_openjpeg_enc_fill_image (self, &vframe);
  if (!image)
    goto fill_image_error;
  gst_video_frame_unmap (&vframe);

  opj_setup_encoder (enc, &self->params, image);

#ifdef HAVE_OPENJPEG_1
  io = opj_cio_open ((opj_common_ptr) enc, NULL, 0);
  if (!io)
    goto open_error;

  if (!opj_encode (enc, io, image, NULL))
    goto encode_error;

  opj_image_destroy (image);

  length = cio_tell (io);

  ret =
      gst_video_encoder_allocate_output_frame (encoder, frame,
      length + (self->is_jp2c ? 8 : 0));
  if (ret != GST_FLOW_OK)
    goto allocate_error;

  gst_buffer_fill (frame->output_buffer, self->is_jp2c ? 8 : 0, io->buffer,
      length);
  if (self->is_jp2c) {
    gst_buffer_map (frame->output_buffer, &map, GST_MAP_WRITE);
    GST_WRITE_UINT32_BE (map.data, length + 8);
    GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c'));
    gst_buffer_unmap (frame->output_buffer, &map);
  }

  opj_cio_close (io);
  opj_destroy_compress (enc);
#else
  stream = opj_stream_create (4096, OPJ_FALSE);
  if (!stream)
    goto open_error;

  mstream.allocsize = 4096;
  mstream.data = g_malloc (mstream.allocsize);
  mstream.offset = 0;
  mstream.size = 0;

  opj_stream_set_read_function (stream, read_fn);
  opj_stream_set_write_function (stream, write_fn);
  opj_stream_set_skip_function (stream, skip_fn);
  opj_stream_set_seek_function (stream, seek_fn);
  opj_stream_set_user_data (stream, &mstream);
  opj_stream_set_user_data_length (stream, mstream.size);

  if (!opj_start_compress (enc, image, stream))
    goto encode_error;

  if (!opj_encode (enc, stream))
    goto encode_error;

  if (!opj_end_compress (enc, stream))
    goto encode_error;

  opj_image_destroy (image);
  opj_stream_destroy (stream);
  opj_destroy_codec (enc);

  frame->output_buffer = gst_buffer_new ();

  if (self->is_jp2c) {
    GstMapInfo map;
    GstMemory *mem;

    mem = gst_allocator_alloc (NULL, 8, NULL);
    gst_memory_map (mem, &map, GST_MAP_WRITE);
    GST_WRITE_UINT32_BE (map.data, mstream.size + 8);
    GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c'));
    gst_memory_unmap (mem, &map);
    gst_buffer_append_memory (frame->output_buffer, mem);
  }

  gst_buffer_append_memory (frame->output_buffer,
      gst_memory_new_wrapped (0, mstream.data, mstream.allocsize, 0,
          mstream.size, NULL, (GDestroyNotify) g_free));
#endif

  ret = gst_video_encoder_finish_frame (encoder, frame);

  return ret;

initialization_error:
  {
    gst_video_codec_frame_unref (frame);
    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to initialize OpenJPEG encoder"), (NULL));
    return GST_FLOW_ERROR;
  }
map_read_error:
  {
#ifdef HAVE_OPENJPEG_1
    opj_destroy_compress (enc);
#else
    opj_destroy_codec (enc);
#endif
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, CORE, FAILED,
        ("Failed to map input buffer"), (NULL));
    return GST_FLOW_ERROR;
  }
fill_image_error:
  {
#ifdef HAVE_OPENJPEG_1
    opj_destroy_compress (enc);
#else
    opj_destroy_codec (enc);
#endif
    gst_video_frame_unmap (&vframe);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to fill OpenJPEG image"), (NULL));
    return GST_FLOW_ERROR;
  }
open_error:
  {
    opj_image_destroy (image);
#ifdef HAVE_OPENJPEG_1
    opj_destroy_compress (enc);
#else
    opj_destroy_codec (enc);
#endif
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to open OpenJPEG data"), (NULL));
    return GST_FLOW_ERROR;
  }
encode_error:
  {
#ifdef HAVE_OPENJPEG_1
    opj_cio_close (io);
    opj_image_destroy (image);
    opj_destroy_compress (enc);
#else
    opj_stream_destroy (stream);
    g_free (mstream.data);
    opj_image_destroy (image);
    opj_destroy_codec (enc);
#endif
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, STREAM, ENCODE,
        ("Failed to encode OpenJPEG stream"), (NULL));
    return GST_FLOW_ERROR;
  }
#ifdef HAVE_OPENJPEG_1
allocate_error:
  {
    opj_cio_close (io);
    opj_destroy_compress (enc);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, CORE, FAILED,
        ("Failed to allocate output buffer"), (NULL));
    return ret;
  }
#endif
}
Exemplo n.º 20
0
static GstFlowReturn
gst_ffmpegvidenc_handle_frame (GstVideoEncoder * encoder,
                               GstVideoCodecFrame * frame)
{
    GstFFMpegVidEnc *ffmpegenc = (GstFFMpegVidEnc *) encoder;
    GstBuffer *outbuf;
    gint ret = 0, c;
    GstVideoInfo *info = &ffmpegenc->input_state->info;
    AVPacket *pkt;
    int have_data = 0;
    BufferInfo *buffer_info;

    if (ffmpegenc->interlaced) {
        ffmpegenc->picture->interlaced_frame = TRUE;
        /* if this is not the case, a filter element should be used to swap fields */
        ffmpegenc->picture->top_field_first =
            GST_BUFFER_FLAG_IS_SET (frame->input_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
    }

    if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame))
        ffmpegenc->picture->pict_type = AV_PICTURE_TYPE_I;

    buffer_info = g_slice_new0 (BufferInfo);
    buffer_info->buffer = gst_buffer_ref (frame->input_buffer);

    if (!gst_video_frame_map (&buffer_info->vframe, info, frame->input_buffer,
                              GST_MAP_READ)) {
        GST_ERROR_OBJECT (encoder, "Failed to map input buffer");
        gst_buffer_unref (buffer_info->buffer);
        g_slice_free (BufferInfo, buffer_info);
        gst_video_codec_frame_unref (frame);
        return GST_FLOW_ERROR;
    }

    /* Fill avpicture */
    ffmpegenc->picture->buf[0] =
        av_buffer_create (NULL, 0, buffer_info_free, buffer_info, 0);
    for (c = 0; c < AV_NUM_DATA_POINTERS; c++) {
        if (c < GST_VIDEO_INFO_N_COMPONENTS (info)) {
            ffmpegenc->picture->data[c] =
                GST_VIDEO_FRAME_PLANE_DATA (&buffer_info->vframe, c);
            ffmpegenc->picture->linesize[c] =
                GST_VIDEO_FRAME_COMP_STRIDE (&buffer_info->vframe, c);
        } else {
            ffmpegenc->picture->data[c] = NULL;
            ffmpegenc->picture->linesize[c] = 0;
        }
    }

    ffmpegenc->picture->format = ffmpegenc->context->pix_fmt;
    ffmpegenc->picture->width = GST_VIDEO_FRAME_WIDTH (&buffer_info->vframe);
    ffmpegenc->picture->height = GST_VIDEO_FRAME_HEIGHT (&buffer_info->vframe);

    ffmpegenc->picture->pts =
        gst_ffmpeg_time_gst_to_ff (frame->pts /
                                   ffmpegenc->context->ticks_per_frame, ffmpegenc->context->time_base);

    have_data = 0;
    pkt = g_slice_new0 (AVPacket);

    ret =
        avcodec_encode_video2 (ffmpegenc->context, pkt, ffmpegenc->picture,
                               &have_data);

    av_frame_unref (ffmpegenc->picture);

    if (ret < 0 || !have_data)
        g_slice_free (AVPacket, pkt);

    if (ret < 0)
        goto encode_fail;

    /* Encoder needs more data */
    if (!have_data) {
        gst_video_codec_frame_unref (frame);
        return GST_FLOW_OK;
    }

    /* save stats info if there is some as well as a stats file */
    if (ffmpegenc->file && ffmpegenc->context->stats_out)
        if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)
            GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,
                               (("Could not write to file \"%s\"."), ffmpegenc->filename),
                               GST_ERROR_SYSTEM);

    gst_video_codec_frame_unref (frame);

    /* Get oldest frame */
    frame = gst_video_encoder_get_oldest_frame (encoder);

    outbuf =
        gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, pkt->data,
                                     pkt->size, 0, pkt->size, pkt, gst_ffmpegvidenc_free_avpacket);
    frame->output_buffer = outbuf;

    if (pkt->flags & AV_PKT_FLAG_KEY)
        GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
    else
        GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);

    return gst_video_encoder_finish_frame (encoder, frame);

    /* ERRORS */
encode_fail:
    {
#ifndef GST_DISABLE_GST_DEBUG
        GstFFMpegVidEncClass *oclass =
            (GstFFMpegVidEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
        GST_ERROR_OBJECT (ffmpegenc,
                          "avenc_%s: failed to encode buffer", oclass->in_plugin->name);
#endif /* GST_DISABLE_GST_DEBUG */
        /* avoid frame (and ts etc) piling up */
        return gst_video_encoder_finish_frame (encoder, frame);
    }
}