static GstFlowReturn
gst_span_plc_chain (GstPad * pad, GstBuffer * buffer)
{
  GstSpanPlc *plc = GST_SPAN_PLC (GST_PAD_PARENT (pad));
  GstClockTime buffer_duration;

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    plc->last_stop = GST_BUFFER_TIMESTAMP (buffer);
  else
    GST_WARNING_OBJECT (plc, "Buffer has no timestamp!");

  if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
    buffer_duration = GST_BUFFER_DURATION (buffer);
  } else {
    GST_WARNING_OBJECT (plc, "Buffer has no duration!");
    buffer_duration = (GST_BUFFER_SIZE (buffer) /
        (plc->sample_rate * sizeof (guint16))) * GST_SECOND;
    GST_DEBUG_OBJECT (plc, "Buffer duration : %" GST_TIME_FORMAT,
        GST_TIME_ARGS (buffer_duration));
  }

  plc->last_stop += buffer_duration;

  if (plc->plc_state->missing_samples != 0)
    buffer = gst_buffer_make_writable (buffer);
  plc_rx (plc->plc_state, (int16_t *) GST_BUFFER_DATA (buffer),
      GST_BUFFER_SIZE (buffer) / 2);

  return gst_pad_push (plc->srcpad, buffer);
}
Exemple #2
0
/* Handles decrypted buffers only */
static GstFlowReturn
gst_hls_demux_handle_buffer (GstAdaptiveDemux * demux,
    GstAdaptiveDemuxStream * stream, GstBuffer * buffer, gboolean force)
{
  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);

  if (G_UNLIKELY (hlsdemux->do_typefind && buffer != NULL)) {
    GstCaps *caps = NULL;
    GstMapInfo info;
    guint buffer_size;
    GstTypeFindProbability prob = GST_TYPE_FIND_NONE;

    if (hlsdemux->pending_typefind_buffer)
      buffer = gst_buffer_append (hlsdemux->pending_typefind_buffer, buffer);
    hlsdemux->pending_typefind_buffer = NULL;

    gst_buffer_map (buffer, &info, GST_MAP_READ);
    buffer_size = info.size;

    /* Typefind could miss if buffer is too small. In this case we
     * will retry later */
    if (buffer_size >= (2 * 1024)) {
      caps =
          gst_type_find_helper_for_data (GST_OBJECT_CAST (hlsdemux), info.data,
          info.size, &prob);
    }
    gst_buffer_unmap (buffer, &info);

    if (G_UNLIKELY (!caps)) {
      /* Only fail typefinding if we already a good amount of data
       * and we still don't know the type */
      if (buffer_size > (2 * 1024 * 1024) || force) {
        GST_ELEMENT_ERROR (hlsdemux, STREAM, TYPE_NOT_FOUND,
            ("Could not determine type of stream"), (NULL));
        gst_buffer_unref (buffer);
        return GST_FLOW_NOT_NEGOTIATED;
      } else {
        hlsdemux->pending_typefind_buffer = buffer;
        return GST_FLOW_OK;
      }
    }

    GST_DEBUG_OBJECT (hlsdemux, "Typefind result: %" GST_PTR_FORMAT " prob:%d",
        caps, prob);

    gst_adaptive_demux_stream_set_caps (stream, caps);
    hlsdemux->do_typefind = FALSE;
  }

  g_assert (hlsdemux->pending_typefind_buffer == NULL);

  if (buffer) {
    buffer = gst_buffer_make_writable (buffer);
    GST_BUFFER_OFFSET (buffer) = hlsdemux->current_offset;
    hlsdemux->current_offset += gst_buffer_get_size (buffer);
    GST_BUFFER_OFFSET_END (buffer) = hlsdemux->current_offset;
    return gst_adaptive_demux_stream_push_buffer (stream, buffer);
  }
  return GST_FLOW_OK;
}
static GstFlowReturn
gst_kate_parse_queue_buffer (GstKateParse * parse, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  gint64 granpos;

  buf = gst_buffer_make_writable (buf);

  /* oggdemux stores the granule pos in the offset end */
  granpos = GST_BUFFER_OFFSET_END (buf);
  GST_LOG_OBJECT (parse, "granpos %16" G_GINT64_MODIFIER "x", granpos);
  g_queue_push_tail (parse->buffer_queue, buf);

#if 1
  /* if getting buffers from matroska, we won't have a granpos here... */
  //if (GST_BUFFER_OFFSET_END_IS_VALID (buf)) {
  ret = gst_kate_parse_drain_queue (parse, granpos);
  //}
#else
  if (granpos >= 0) {
    ret = gst_kate_parse_drain_queue (parse, granpos);
  } else {
    GST_ELEMENT_ERROR (parse, STREAM, DECODE, (NULL),
        ("Bad granulepos %" G_GINT64_FORMAT, granpos));
    ret = GST_FLOW_ERROR;
  }
#endif

  return ret;
}
static void
theora_parse_set_header_on_caps (GstTheoraParse * parse, GstCaps * caps)
{
  GstBuffer **bufs;
  GstStructure *structure;
  gint i;
  GValue array = { 0 };
  GValue value = { 0 };

  bufs = parse->streamheader;
  structure = gst_caps_get_structure (caps, 0);
  g_value_init (&array, GST_TYPE_ARRAY);

  for (i = 0; i < 3; i++) {
    if (bufs[i] == NULL)
      continue;

    bufs[i] = gst_buffer_make_writable (bufs[i]);
    GST_BUFFER_FLAG_SET (bufs[i], GST_BUFFER_FLAG_HEADER);

    g_value_init (&value, GST_TYPE_BUFFER);
    gst_value_set_buffer (&value, bufs[i]);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
  }

  gst_structure_take_value (structure, "streamheader", &array);
}
/* chain function
 * this function does the actual processing
 */
static GstFlowReturn
gst_opencv_text_overlay_chain (GstPad * pad, GstObject * parent,
    GstBuffer * buf)
{
  GstOpencvTextOverlay *filter;
  GstMapInfo map_info;
  guint8 *data;

  filter = GST_OPENCV_TEXT_OVERLAY (parent);

  gst_buffer_map (buf, &map_info, GST_MAP_READ);
  data = map_info.data;

  filter->cvImage->imageData = (char *) data;

  cvInitFont (&(filter->font), CV_FONT_VECTOR0, filter->width, filter->height,
      0, filter->thickness, 0);

  buf = gst_buffer_make_writable (buf);
  cvPutText (filter->cvImage, filter->textbuf, cvPoint (filter->xpos,
          filter->ypos), &(filter->font), cvScalar (filter->colorR,
          filter->colorG, filter->colorB, 0));

  gst_buffer_unmap (buf, &map_info);
  return gst_pad_push (filter->srcpad, buf);
}
static GstFlowReturn
gst_opencv_video_filter_transform_ip (GstBaseTransform * trans,
    GstBuffer * buffer)
{
  GstOpencvVideoFilter *transform;
  GstOpencvVideoFilterClass *fclass;
  GstFlowReturn ret;

  transform = GST_OPENCV_VIDEO_FILTER (trans);
  fclass = GST_OPENCV_VIDEO_FILTER_GET_CLASS (transform);

  g_return_val_if_fail (fclass->cv_trans_ip_func != NULL, GST_FLOW_ERROR);
  g_return_val_if_fail (transform->cvImage != NULL, GST_FLOW_ERROR);

  /* TODO this is not always needed and should be solved at BaseTransform
   * level */
  buffer = gst_buffer_make_writable (buffer);

  transform->cvImage->imageData = (char *) gst_buffer_map (buffer,
      NULL, NULL, GST_MAP_READWRITE);

  /* FIXME how to release buffer? */
  ret = fclass->cv_trans_ip_func (transform, buffer, transform->cvImage);

  gst_buffer_unmap (buffer, transform->cvImage->imageData, -1);

  return ret;
}
Exemple #7
0
static void	wb_image_c (GstTcamWhitebalance* self, GstBuffer* buf, byte wb_r, byte wb_g, byte wb_b)
{
    GstMapInfo info;
    gst_buffer_make_writable(buf);

    gst_buffer_map(buf, &info, GST_MAP_WRITE);

    guint* data = (guint*)info.data;

    unsigned int dim_x = self->image_size.width;
    unsigned int dim_y = self->image_size.height;

    guint pitch = 8 * dim_x / 8;

    tBY8Pattern odd = next_line(self->pattern);

    guint y;
    for (y = 0 ; y < (dim_y - 1); y += 2)
    {
        byte* line0 = (byte*)data + y * pitch;
        byte* line1 = (byte*)data + (y + 1) * pitch;

        wb_line_c(line0, line0, dim_x, wb_r, wb_g, wb_b, self->pattern);
        wb_line_c(line1, line1, dim_x, wb_r, wb_g, wb_b, odd);
    }

    if (y == (dim_y - 1))
    {
        byte* line = (byte*)data + y * pitch;
        wb_line_c(line, line, dim_x, wb_r, wb_g, wb_b, self->pattern);
    }

    gst_buffer_unmap(buf, &info);
}
static GstFlowReturn
gst_valve_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstValve *valve = GST_VALVE (parent);
  GstFlowReturn ret = GST_FLOW_OK;

  if (g_atomic_int_get (&valve->drop)) {
    gst_buffer_unref (buffer);
    valve->discont = TRUE;
  } else {
    if (valve->discont) {
      buffer = gst_buffer_make_writable (buffer);
      GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
      valve->discont = FALSE;
    }

    if (valve->need_repush_sticky)
      gst_valve_repush_sticky (valve);

    ret = gst_pad_push (valve->srcpad, buffer);
  }


  /* Ignore errors if "drop" was changed while the thread was blocked
   * downwards
   */
  if (g_atomic_int_get (&valve->drop))
    ret = GST_FLOW_OK;

  return ret;
}
static gboolean
process_list_item (GstBuffer ** buffer, guint idx, gpointer user_data)
{
  struct BufferListData *bd = user_data;
  GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;

  *buffer = gst_buffer_make_writable (*buffer);

  gst_rtp_buffer_map (*buffer, GST_MAP_READWRITE, &rtpbuffer);

  bd->drop = !process_buffer_locked (bd->rtp_mux, bd->padpriv, &rtpbuffer);

  gst_rtp_buffer_unmap (&rtpbuffer);

  if (bd->drop)
    return FALSE;

  if (GST_BUFFER_DURATION_IS_VALID (*buffer) &&
      GST_BUFFER_PTS_IS_VALID (*buffer))
    bd->rtp_mux->last_stop = GST_BUFFER_PTS (*buffer) +
        GST_BUFFER_DURATION (*buffer);
  else
    bd->rtp_mux->last_stop = GST_CLOCK_TIME_NONE;

  return TRUE;
}
static GstFlowReturn
gst_tag_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstTagMux *mux = GST_TAG_MUX (parent);
  GstFlowReturn ret;
  int length;

  if (mux->priv->render_start_tag) {

    GST_INFO_OBJECT (mux, "Adding tags to stream");
    ret = gst_tag_mux_render_start_tag (mux);
    if (ret != GST_FLOW_OK) {
      GST_DEBUG_OBJECT (mux, "flow: %s", gst_flow_get_name (ret));
      gst_buffer_unref (buffer);
      return ret;
    }

    /* Now send the cached newsegment event that we got from upstream */
    if (mux->priv->newsegment_ev) {
      GstEvent *newseg;
      GstSegment segment;

      GST_DEBUG_OBJECT (mux, "sending cached newsegment event");
      newseg = gst_tag_mux_adjust_event_offsets (mux, mux->priv->newsegment_ev);
      gst_event_unref (mux->priv->newsegment_ev);
      mux->priv->newsegment_ev = NULL;

      gst_event_copy_segment (newseg, &segment);

      gst_pad_push_event (mux->priv->srcpad, newseg);
      mux->priv->current_offset = segment.start;
      mux->priv->max_offset =
          MAX (mux->priv->max_offset, mux->priv->current_offset);
    } else {
      /* upstream sent no newsegment event or only one in a non-BYTE format */
    }

    mux->priv->render_start_tag = FALSE;
  }

  buffer = gst_buffer_make_writable (buffer);

  if (GST_BUFFER_OFFSET (buffer) != GST_BUFFER_OFFSET_NONE) {
    GST_LOG_OBJECT (mux, "Adjusting buffer offset from %" G_GINT64_FORMAT
        " to %" G_GINT64_FORMAT, GST_BUFFER_OFFSET (buffer),
        GST_BUFFER_OFFSET (buffer) + mux->priv->start_tag_size);
    GST_BUFFER_OFFSET (buffer) += mux->priv->start_tag_size;
  }

  length = gst_buffer_get_size (buffer);

  ret = gst_pad_push (mux->priv->srcpad, buffer);

  mux->priv->current_offset += length;
  mux->priv->max_offset =
      MAX (mux->priv->max_offset, mux->priv->current_offset);

  return ret;
}
static GstFlowReturn
gst_inter_sub_src_create (GstBaseSrc * src, guint64 offset, guint size,
                          GstBuffer ** buf)
{
    GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
    GstBuffer *buffer;

    GST_DEBUG_OBJECT (intersubsrc, "create");

    buffer = NULL;

    g_mutex_lock (&intersubsrc->surface->mutex);
    if (intersubsrc->surface->sub_buffer) {
        buffer = gst_buffer_ref (intersubsrc->surface->sub_buffer);
        //intersubsrc->surface->sub_buffer_count++;
        //if (intersubsrc->surface->sub_buffer_count >= 30) {
        gst_buffer_unref (intersubsrc->surface->sub_buffer);
        intersubsrc->surface->sub_buffer = NULL;
        //}
    }
    g_mutex_unlock (&intersubsrc->surface->mutex);

    if (buffer == NULL) {
        GstMapInfo map;

        buffer = gst_buffer_new_and_alloc (1);

        gst_buffer_map (buffer, &map, GST_MAP_WRITE);
        map.data[0] = 0;
        gst_buffer_unmap (buffer, &map);
    }

    buffer = gst_buffer_make_writable (buffer);

    /* FIXME: does this make sense? Rate is always 0 */
#if 0
    GST_BUFFER_TIMESTAMP (buffer) =
        gst_util_uint64_scale_int (GST_SECOND, intersubsrc->n_frames,
                                   intersubsrc->rate);
    GST_DEBUG_OBJECT (intersubsrc, "create ts %" GST_TIME_FORMAT,
                      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
    GST_BUFFER_DURATION (buffer) =
        gst_util_uint64_scale_int (GST_SECOND, (intersubsrc->n_frames + 1),
                                   intersubsrc->rate) - GST_BUFFER_TIMESTAMP (buffer);
#endif
    GST_BUFFER_OFFSET (buffer) = intersubsrc->n_frames;
    GST_BUFFER_OFFSET_END (buffer) = -1;
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
    if (intersubsrc->n_frames == 0) {
        GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
    }
    intersubsrc->n_frames++;

    *buf = buffer;

    return GST_FLOW_OK;
}
Exemple #12
0
/* With SPU LOCK */
static void
gst_dvd_spu_redraw_still (GstDVDSpu * dvdspu, gboolean force)
{
  /* If we have an active SPU command set and a reference frame, copy the
   * frame, redraw the SPU and store it as the pending frame for output */
  if (dvdspu->ref_frame) {
    gboolean redraw = (dvdspu->spu_state.flags & SPU_STATE_FORCED_DSP);
    redraw |= (dvdspu->spu_state.flags & SPU_STATE_FORCED_ONLY) == 0 &&
        (dvdspu->spu_state.flags & SPU_STATE_DISPLAY);

    if (redraw) {
      GstBuffer *buf = gst_buffer_ref (dvdspu->ref_frame);

      buf = gst_buffer_make_writable (buf);

      GST_LOG_OBJECT (dvdspu, "Redraw due to Still Frame with ref %p",
          dvdspu->ref_frame);
      GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
      GST_BUFFER_TIMESTAMP (buf) = GST_CLOCK_TIME_NONE;
      GST_BUFFER_DURATION (buf) = GST_CLOCK_TIME_NONE;

      /* Render the SPU overlay onto the buffer */
      gstspu_render (dvdspu, buf);
      gst_buffer_replace (&dvdspu->pending_frame, buf);
      gst_buffer_unref (buf);
    } else if (force) {
      /* Simply output the reference frame */
      GstBuffer *buf = gst_buffer_ref (dvdspu->ref_frame);
      buf = gst_buffer_make_writable (buf);
      GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
      GST_BUFFER_TIMESTAMP (buf) = GST_CLOCK_TIME_NONE;
      GST_BUFFER_DURATION (buf) = GST_CLOCK_TIME_NONE;

      GST_DEBUG_OBJECT (dvdspu, "Pushing reference frame at start of still");

      gst_buffer_replace (&dvdspu->pending_frame, buf);
      gst_buffer_unref (buf);
    } else {
      GST_LOG_OBJECT (dvdspu, "Redraw due to Still Frame skipped");
    }
  } else {
    GST_LOG_OBJECT (dvdspu, "Not redrawing still frame - no ref frame");
  }
}
Exemple #13
0
static GstBuffer *
gst_rtp_L16_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
  GstRtpL16Depay *rtpL16depay;
  GstBuffer *outbuf;
  gint payload_len;
  gboolean marker;
  GstRTPBuffer rtp = { NULL };

  rtpL16depay = GST_RTP_L16_DEPAY (depayload);

  gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
  payload_len = gst_rtp_buffer_get_payload_len (&rtp);

  if (payload_len <= 0)
    goto empty_packet;

  GST_DEBUG_OBJECT (rtpL16depay, "got payload of %d bytes", payload_len);

  outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
  marker = gst_rtp_buffer_get_marker (&rtp);

  if (marker) {
    /* mark talk spurt with DISCONT */
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
  }

  outbuf = gst_buffer_make_writable (outbuf);
  if (rtpL16depay->order &&
      !gst_audio_buffer_reorder_channels (outbuf,
          rtpL16depay->info.finfo->format, rtpL16depay->info.channels,
          rtpL16depay->info.position, rtpL16depay->order->pos)) {
    goto reorder_failed;
  }

  gst_rtp_buffer_unmap (&rtp);

  return outbuf;

  /* ERRORS */
empty_packet:
  {
    GST_ELEMENT_WARNING (rtpL16depay, STREAM, DECODE,
        ("Empty Payload."), (NULL));
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
reorder_failed:
  {
    GST_ELEMENT_ERROR (rtpL16depay, STREAM, DECODE,
        ("Channel reordering failed."), (NULL));
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
}
/* chain function
 * this function does the actual processing
 */
static GstFlowReturn
gst_face_blur_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstFaceBlur *filter;
  CvSeq *faces;
  GstMapInfo info;
  int i;

  filter = GST_FACE_BLUR (GST_OBJECT_PARENT (pad));

  buf = gst_buffer_make_writable (buf);
  gst_buffer_map (buf, &info, GST_MAP_READWRITE);
  filter->cvImage->imageData = (char *) info.data;

  cvCvtColor (filter->cvImage, filter->cvGray, CV_RGB2GRAY);
  cvClearMemStorage (filter->cvStorage);

  if (filter->cvCascade) {
    faces =
        cvHaarDetectObjects (filter->cvGray, filter->cvCascade,
        filter->cvStorage, 1.1, 2, 0, cvSize (30, 30)
#if (CV_MAJOR_VERSION >= 2) && (CV_MINOR_VERSION >= 2)
        , cvSize (32, 32)
#endif
        );

    if (faces && faces->total > 0) {
      buf = gst_buffer_make_writable (buf);
    }
    for (i = 0; i < (faces ? faces->total : 0); i++) {
      CvRect *r = (CvRect *) cvGetSeqElem (faces, i);
      cvSetImageROI (filter->cvImage, *r);
      cvSmooth (filter->cvImage, filter->cvImage, CV_BLUR, 11, 11, 0, 0);
      cvSmooth (filter->cvImage, filter->cvImage, CV_GAUSSIAN, 11, 11, 0, 0);
      cvResetImageROI (filter->cvImage);
    }
  }

  /* these filters operate in place, so we push the same buffer */

  return gst_pad_push (filter->srcpad, buf);
}
Exemple #15
0
static GstFlowReturn
gst_kate_tiger_video_chain (GstPad * pad, GstBuffer * buf)
{
  GstKateTiger *tiger = GST_KATE_TIGER (gst_pad_get_parent (pad));
  GstFlowReturn rflow = GST_FLOW_OK;
  unsigned char *ptr;
  int ret;

  g_mutex_lock (tiger->mutex);

  GST_LOG_OBJECT (tiger, "got video frame, %u bytes", GST_BUFFER_SIZE (buf));

  /* draw on it */
  buf = gst_buffer_make_writable (buf);
  if (G_UNLIKELY (!buf)) {
    GST_WARNING_OBJECT (tiger, "Failed to make video buffer writable");
  } else {
    ptr = GST_BUFFER_DATA (buf);
    if (!ptr) {
      GST_WARNING_OBJECT (tiger,
          "Failed to get a pointer to video buffer data");
    } else {
      ret = tiger_renderer_set_buffer (tiger->tr, ptr, tiger->video_width, tiger->video_height, tiger->video_width * 4, 0);     // TODO: stride ?
      if (G_UNLIKELY (ret < 0)) {
        GST_WARNING_OBJECT (tiger,
            "Tiger renderer failed to set buffer to video frame: %d", ret);
      } else {
        kate_float t = GST_BUFFER_TIMESTAMP (buf) / (gdouble) GST_SECOND;
        ret = tiger_renderer_update (tiger->tr, t, 1);
        if (G_UNLIKELY (ret < 0)) {
          GST_WARNING_OBJECT (tiger, "Tiger renderer failed to update: %d",
              ret);
        } else {
          ret = tiger_renderer_render (tiger->tr);
          if (G_UNLIKELY (ret < 0)) {
            GST_WARNING_OBJECT (tiger,
                "Tiger renderer failed to render to video frame: %d", ret);
          } else {
            GST_LOG_OBJECT (tiger,
                "Tiger renderer rendered on video frame at %f", t);
          }
        }
      }
    }
  }
  rflow = gst_pad_push (tiger->srcpad, buf);

  gst_object_unref (tiger);

  g_mutex_unlock (tiger->mutex);

  return rflow;
}
Exemple #16
0
static gboolean
process_buffer_from_list (GstBuffer ** buffer, guint idx, gpointer user_data)
{
  GstDtlsDec *self = GST_DTLS_DEC (user_data);
  gint size;

  *buffer = gst_buffer_make_writable (*buffer);
  size = process_buffer (self, *buffer);
  if (size <= 0)
    gst_buffer_replace (buffer, NULL);

  return TRUE;
}
Exemple #17
0
static GstFlowReturn
gst_span_plc_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstSpanPlc *plc = GST_SPAN_PLC (parent);
  GstMapInfo map;

  buffer = gst_buffer_make_writable (buffer);
  gst_buffer_map (buffer, &map, GST_MAP_READWRITE);
  plc_rx (plc->plc_state, (int16_t *) map.data, map.size / 2);
  gst_buffer_unmap (buffer, &map);

  return gst_pad_push (plc->srcpad, buffer);
}
Exemple #18
0
/* Internal method only. Tries to merge buffers at the head of the queue
 * to form a single larger buffer of size 'size'.
 *
 * Returns TRUE if it managed to merge anything.
 */
static gboolean
gst_adapter_try_to_merge_up (GstAdapter * adapter, gsize size)
{
  GstBuffer *cur, *head;
  GSList *g;
  gboolean ret = FALSE;
  gsize hsize;

  g = adapter->buflist;
  if (g == NULL)
    return FALSE;

  head = g->data;

  hsize = gst_buffer_get_size (head);

  /* Remove skipped part from the buffer (otherwise the buffer might grow indefinitely) */
  head = gst_buffer_make_writable (head);
  gst_buffer_resize (head, adapter->skip, hsize - adapter->skip);
  hsize -= adapter->skip;
  adapter->skip = 0;
  g->data = head;

  g = g_slist_next (g);

  while (g != NULL && hsize < size) {
    cur = g->data;
    /* Merge the head buffer and the next in line */
    GST_LOG_OBJECT (adapter, "Merging buffers of size %" G_GSIZE_FORMAT " & %"
        G_GSIZE_FORMAT " in search of target %" G_GSIZE_FORMAT,
        hsize, gst_buffer_get_size (cur), size);

    head = gst_buffer_append (head, cur);
    hsize = gst_buffer_get_size (head);
    ret = TRUE;

    /* Delete the front list item, and store our new buffer in the 2nd list
     * item */
    adapter->buflist = g_slist_delete_link (adapter->buflist, adapter->buflist);
    g->data = head;

    /* invalidate scan position */
    adapter->scan_offset = 0;
    adapter->scan_entry = NULL;

    g = g_slist_next (g);
  }

  return ret;
}
static GstFlowReturn
gst_y4m_encode_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstY4mEncode *filter = GST_Y4M_ENCODE (encoder);
  GstClockTime timestamp;

  /* check we got some decent info from caps */
  if (GST_VIDEO_INFO_FORMAT (&filter->info) == GST_VIDEO_FORMAT_UNKNOWN)
    goto not_negotiated;

  timestamp = GST_BUFFER_TIMESTAMP (frame->input_buffer);

  if (G_UNLIKELY (!filter->header)) {
    gboolean tff = FALSE;

    if (GST_VIDEO_INFO_IS_INTERLACED (&filter->info)) {
      tff =
          GST_BUFFER_FLAG_IS_SET (frame->input_buffer,
          GST_VIDEO_BUFFER_FLAG_TFF);
    }
    frame->output_buffer = gst_y4m_encode_get_stream_header (filter, tff);
    filter->header = TRUE;
    frame->output_buffer =
        gst_buffer_append (frame->output_buffer,
        gst_y4m_encode_get_frame_header (filter));
  } else {
    frame->output_buffer = gst_y4m_encode_get_frame_header (filter);
  }

  frame->output_buffer =
      gst_buffer_append (frame->output_buffer,
      gst_buffer_copy (frame->input_buffer));

  /* decorate */
  frame->output_buffer = gst_buffer_make_writable (frame->output_buffer);
  GST_BUFFER_TIMESTAMP (frame->output_buffer) = timestamp;

  return gst_video_encoder_finish_frame (encoder, frame);

not_negotiated:
  {
    GST_ELEMENT_ERROR (filter, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated"));

    return GST_FLOW_NOT_NEGOTIATED;
  }
}
Exemple #20
0
static GstFlowReturn
gst_fake_h264_decoder_sink_chain (GstPad * pad, GstObject * parent,
    GstBuffer * buf)
{
  GstElement *self = GST_ELEMENT (parent);
  GstPad *otherpad = gst_element_get_static_pad (self, "src");
  GstFlowReturn ret = GST_FLOW_OK;

  buf = gst_buffer_make_writable (buf);

  ret = gst_pad_push (otherpad, buf);

  gst_object_unref (otherpad);

  return ret;
}
static GstBuffer *
gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
    GstBuffer *outbuf;
    GstRTPBuffer rtpbuf = { NULL, };

    gst_rtp_buffer_map (buf, GST_MAP_READ, &rtpbuf);
    outbuf = gst_rtp_buffer_get_payload_buffer (&rtpbuf);
    gst_rtp_buffer_unmap (&rtpbuf);

    outbuf = gst_buffer_make_writable (outbuf);
    /* Filter away all metas that are not sensible to copy */
    gst_buffer_foreach_meta (outbuf, foreach_metadata, depayload);

    return outbuf;
}
Exemple #22
0
GstBuffer *
gst_rm_utils_descramble_dnet_buffer (GstBuffer * buf)
{
  guint8 *data, *end;

  buf = gst_buffer_make_writable (buf);

  /* dnet = byte-order swapped AC3 */
  data = GST_BUFFER_DATA (buf);
  end = GST_BUFFER_DATA (buf) + GST_BUFFER_SIZE (buf);
  while ((data + 1) < end) {
    /* byte-swap in an alignment-safe way */
    GST_WRITE_UINT16_BE (data, GST_READ_UINT16_LE (data));
    data += sizeof (guint16);
  }
  return buf;
}
static GstFlowReturn
gst_wavenc_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstWavEnc *wavenc = GST_WAVENC (parent);
  GstFlowReturn flow = GST_FLOW_OK;

  if (wavenc->channels <= 0) {
    GST_ERROR_OBJECT (wavenc, "Got data without caps");
    return GST_FLOW_NOT_NEGOTIATED;
  }

  if (G_UNLIKELY (!wavenc->sent_header)) {
    gst_pad_set_caps (wavenc->srcpad,
        gst_static_pad_template_get_caps (&src_factory));

    /* starting a file, means we have to finish it properly */
    wavenc->finished_properly = FALSE;

    /* push initial bogus header, it will be updated on EOS */
    flow = gst_wavenc_push_header (wavenc);
    if (flow != GST_FLOW_OK) {
      GST_WARNING_OBJECT (wavenc, "error pushing header: %s",
          gst_flow_get_name (flow));
      return flow;
    }
    GST_DEBUG_OBJECT (wavenc, "wrote dummy header");
    wavenc->audio_length = 0;
    wavenc->sent_header = TRUE;
  }

  GST_LOG_OBJECT (wavenc,
      "pushing %" G_GSIZE_FORMAT " bytes raw audio, ts=%" GST_TIME_FORMAT,
      gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

  buf = gst_buffer_make_writable (buf);

  GST_BUFFER_OFFSET (buf) = WAV_HEADER_LEN + wavenc->audio_length;
  GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;

  wavenc->audio_length += gst_buffer_get_size (buf);

  flow = gst_pad_push (wavenc->srcpad, buf);

  return flow;
}
Exemple #24
0
static GstFlowReturn
gst_y4m_encode_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstY4mEncode *filter = GST_Y4M_ENCODE (parent);
  GstBuffer *outbuf;
  GstClockTime timestamp;

  /* check we got some decent info from caps */
  if (GST_VIDEO_INFO_FORMAT (&filter->info) == GST_VIDEO_FORMAT_UNKNOWN)
    goto not_negotiated;

  timestamp = GST_BUFFER_TIMESTAMP (buf);

  if (G_UNLIKELY (!filter->header)) {
    gboolean tff;

    if (GST_VIDEO_INFO_IS_INTERLACED (&filter->info)) {
      tff = GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_TFF);
    }
    outbuf = gst_y4m_encode_get_stream_header (filter, tff);
    filter->header = TRUE;
    outbuf =
        gst_buffer_append (outbuf, gst_y4m_encode_get_frame_header (filter));
  } else {
    outbuf = gst_y4m_encode_get_frame_header (filter);
  }
  /* join with data, FIXME, strides are all wrong etc */
  outbuf = gst_buffer_append (outbuf, buf);
  /* decorate */
  outbuf = gst_buffer_make_writable (outbuf);

  GST_BUFFER_TIMESTAMP (outbuf) = timestamp;

  return gst_pad_push (filter->srcpad, outbuf);

  /* ERRORS */
not_negotiated:
  {
    GST_ELEMENT_ERROR (filter, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated before chain function"));
    gst_buffer_unref (buf);
    return GST_FLOW_NOT_NEGOTIATED;
  }
}
static GstFlowReturn
theora_parse_queue_buffer (GstTheoraParse * parse, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;

  buf = gst_buffer_make_writable (buf);

  g_queue_push_tail (parse->buffer_queue, buf);

  if (GST_BUFFER_OFFSET_END_IS_VALID (buf)) {
    if (parse->prev_keyframe < 0) {
      parse_granulepos (parse, GST_BUFFER_OFFSET_END (buf),
          &parse->prev_keyframe, NULL);
    }
    ret = theora_parse_drain_queue (parse, GST_BUFFER_OFFSET_END (buf));
  }

  return ret;
}
/* chain function
 * this function does the actual processing
 */
static GstFlowReturn
gst_opencv_text_overlay_chain (GstPad * pad, GstBuffer * buf)
{
  GstOpencvTextOverlay *filter;

  filter = GST_OPENCV_TEXT_OVERLAY (GST_OBJECT_PARENT (pad));

  filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);

  cvInitFont (&(filter->font), CV_FONT_VECTOR0, filter->width, filter->height,
      0, filter->thickness, 0);

  buf = gst_buffer_make_writable (buf);
  cvPutText (filter->cvImage, filter->textbuf, cvPoint (filter->xpos,
          filter->ypos), &(filter->font), cvScalar (filter->colorR,
          filter->colorG, filter->colorB, 0));

  return gst_pad_push (filter->srcpad, buf);
}
static GstFlowReturn
gst_rtp_L16_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstRtpL16Pay *rtpL16pay;

  rtpL16pay = GST_RTP_L16_PAY (basepayload);
  buffer = gst_buffer_make_writable (buffer);

  if (rtpL16pay->order &&
      !gst_audio_buffer_reorder_channels (buffer, rtpL16pay->info.finfo->format,
          rtpL16pay->info.channels, rtpL16pay->info.position,
          rtpL16pay->order->pos)) {
    return GST_FLOW_ERROR;
  }

  return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->handle_buffer (basepayload,
      buffer);
}
Exemple #28
0
static GstFlowReturn
sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstDtlsDec *self = GST_DTLS_DEC (parent);
  GstFlowReturn ret = GST_FLOW_OK;
  gint size;
  GstPad *other_pad;

  if (!self->agent) {
    gst_buffer_unref (buffer);
    return GST_FLOW_OK;
  }

  GST_DEBUG_OBJECT (self, "received buffer from %s with length %zd",
      self->connection_id, gst_buffer_get_size (buffer));

  buffer = gst_buffer_make_writable (buffer);
  size = process_buffer (self, buffer);

  if (size <= 0) {
    gst_buffer_unref (buffer);

    return GST_FLOW_OK;
  }

  g_mutex_lock (&self->src_mutex);
  other_pad = self->src;
  if (other_pad)
    gst_object_ref (other_pad);
  g_mutex_unlock (&self->src_mutex);

  if (other_pad) {
    GST_LOG_OBJECT (self, "decoded buffer with length %d, pushing", size);
    ret = gst_pad_push (other_pad, buffer);
    gst_object_unref (other_pad);
  } else {
    GST_LOG_OBJECT (self, "dropped buffer with length %d, not linked", size);
    gst_buffer_unref (buffer);
  }

  return ret;
}
Exemple #29
0
static GstBuffer *
gst_wildmidi_clip_buffer (GstWildmidi * wildmidi, GstBuffer * buffer)
{
  guint64 start, stop;
  guint64 new_start, new_stop;
  gint64 offset, length;
  guint64 bpf;

  /* clipping disabled for now */
  return buffer;

  start = GST_BUFFER_OFFSET (buffer);
  stop = GST_BUFFER_OFFSET_END (buffer);

  if (!gst_segment_clip (wildmidi->o_segment, GST_FORMAT_DEFAULT,
          start, stop, &new_start, &new_stop)) {
    gst_buffer_unref (buffer);
    return NULL;
  }

  if (start == new_start && stop == new_stop)
    return buffer;


  offset = new_start - start;
  length = new_stop - new_start;

  bpf = wildmidi->bytes_per_frame;
  buffer = gst_buffer_make_writable (buffer);
  gst_buffer_resize (buffer, offset * bpf, length * bpf);

  GST_BUFFER_OFFSET (buffer) = new_start;
  GST_BUFFER_OFFSET_END (buffer) = new_stop;
  GST_BUFFER_TIMESTAMP (buffer) =
      gst_util_uint64_scale_int (new_start, GST_SECOND, WILDMIDI_RATE);
  GST_BUFFER_DURATION (buffer) =
      gst_util_uint64_scale_int (new_stop, GST_SECOND, WILDMIDI_RATE) -
      GST_BUFFER_TIMESTAMP (buffer);

  return buffer;
}
Exemple #30
0
static GstFlowReturn
gst_opencv_base_transform_transform_ip (GstBaseTransform * trans,
    GstBuffer * buffer)
{
  GstOpencvBaseTransform *transform;
  GstOpencvBaseTransformClass *fclass;

  transform = GST_OPENCV_BASE_TRANSFORM (trans);
  fclass = GST_OPENCV_BASE_TRANSFORM_GET_CLASS (transform);

  g_return_val_if_fail (fclass->cv_trans_ip_func != NULL, GST_FLOW_ERROR);
  g_return_val_if_fail (transform->cvImage != NULL, GST_FLOW_ERROR);

  buffer = gst_buffer_make_writable (buffer);

  transform->cvImage->imageData = (char *) GST_BUFFER_DATA (buffer);


  /* FIXME how to release buffer? */
  return fclass->cv_trans_ip_func (transform, buffer, transform->cvImage);
}