Exemple #1
0
static GstFlowReturn
gst_selector_pad_chain (GstPad * pad, GstBuffer * buf)
{
  GstInputSelector *sel;
  GstFlowReturn res;
  GstPad *active_sinkpad;
  GstPad *prev_active_sinkpad;
  GstSelectorPad *selpad;
  GstClockTime start_time;
  GstSegment *seg;
  GstEvent *close_event = NULL, *start_event = NULL;
  GstCaps *caps;

  sel = GST_INPUT_SELECTOR (gst_pad_get_parent (pad));
  selpad = GST_SELECTOR_PAD_CAST (pad);
  seg = &selpad->segment;

  GST_INPUT_SELECTOR_LOCK (sel);
  /* wait or check for flushing */
  if (gst_input_selector_wait (sel, pad))
    goto flushing;

  GST_DEBUG_OBJECT (pad, "getting active pad");

  prev_active_sinkpad = sel->active_sinkpad;
  active_sinkpad = gst_input_selector_activate_sinkpad (sel, pad);

  /* update the segment on the srcpad */
  start_time = GST_BUFFER_TIMESTAMP (buf);
  if (GST_CLOCK_TIME_IS_VALID (start_time)) {
    GST_DEBUG_OBJECT (pad, "received start time %" GST_TIME_FORMAT,
        GST_TIME_ARGS (start_time));
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      GST_DEBUG_OBJECT (pad, "received end time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (start_time + GST_BUFFER_DURATION (buf)));

    GST_OBJECT_LOCK (pad);
    gst_segment_set_last_stop (seg, seg->format, start_time);
    GST_OBJECT_UNLOCK (pad);
  }

  /* Ignore buffers from pads except the selected one */
  if (pad != active_sinkpad)
    goto ignore;

  if (G_UNLIKELY (sel->pending_close)) {
    GstSegment *cseg = &sel->segment;

    GST_DEBUG_OBJECT (sel,
        "pushing close NEWSEGMENT update %d, rate %lf, applied rate %lf, "
        "format %d, "
        "%" G_GINT64_FORMAT " -- %" G_GINT64_FORMAT ", time %"
        G_GINT64_FORMAT, TRUE, cseg->rate, cseg->applied_rate, cseg->format,
        cseg->start, cseg->stop, cseg->time);

    /* create update segment */
    close_event = gst_event_new_new_segment_full (TRUE, cseg->rate,
        cseg->applied_rate, cseg->format, cseg->start, cseg->stop, cseg->time);

    sel->pending_close = FALSE;
  }
  /* if we have a pending segment, push it out now */
  if (G_UNLIKELY (selpad->segment_pending)) {
    GST_DEBUG_OBJECT (pad,
        "pushing pending NEWSEGMENT update %d, rate %lf, applied rate %lf, "
        "format %d, "
        "%" G_GINT64_FORMAT " -- %" G_GINT64_FORMAT ", time %"
        G_GINT64_FORMAT, FALSE, seg->rate, seg->applied_rate, seg->format,
        seg->start, seg->stop, seg->time);

    start_event = gst_event_new_new_segment_full (FALSE, seg->rate,
        seg->applied_rate, seg->format, seg->start, seg->stop, seg->time);

    selpad->segment_pending = FALSE;
  }
  GST_INPUT_SELECTOR_UNLOCK (sel);

  if (prev_active_sinkpad != active_sinkpad && pad == active_sinkpad)
    g_object_notify (G_OBJECT (sel), "active-pad");

  if (close_event)
    gst_pad_push_event (sel->srcpad, close_event);

  if (start_event)
    gst_pad_push_event (sel->srcpad, start_event);

  if (selpad->discont) {
    buf = gst_buffer_make_metadata_writable (buf);

    GST_DEBUG_OBJECT (pad, "Marking discont buffer %p", buf);
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
    selpad->discont = FALSE;
  }

  /* forward */
  GST_DEBUG_OBJECT (pad, "Forwarding buffer %p from pad %s:%s", buf,
      GST_DEBUG_PAD_NAME (pad));

  if ((caps = GST_BUFFER_CAPS (buf))) {
    if (GST_PAD_CAPS (sel->srcpad) != caps)
      gst_pad_set_caps (sel->srcpad, caps);
  }

  res = gst_pad_push (sel->srcpad, buf);

done:
  gst_object_unref (sel);
  return res;

  /* dropped buffers */
ignore:
  {
    GST_DEBUG_OBJECT (pad, "Pad not active, discard buffer %p", buf);
    /* when we drop a buffer, we're creating a discont on this pad */
    selpad->discont = TRUE;
    GST_INPUT_SELECTOR_UNLOCK (sel);
    gst_buffer_unref (buf);

    /* figure out what to return upstream */
    GST_OBJECT_LOCK (selpad);
    if (selpad->always_ok)
      res = GST_FLOW_OK;
    else
      res = GST_FLOW_NOT_LINKED;
    GST_OBJECT_UNLOCK (selpad);

    goto done;
  }
flushing:
  {
    GST_DEBUG_OBJECT (pad, "We are flushing, discard buffer %p", buf);
    GST_INPUT_SELECTOR_UNLOCK (sel);
    gst_buffer_unref (buf);
    res = GST_FLOW_WRONG_STATE;
    goto done;
  }
}
void
gst_audio_fx_base_fir_filter_push_residue (GstAudioFXBaseFIRFilter * self)
{
  GstBuffer *outbuf;
  GstFlowReturn res;
  gint rate = GST_AUDIO_FILTER_RATE (self);
  gint channels = GST_AUDIO_FILTER_CHANNELS (self);
  gint bps = GST_AUDIO_FILTER_BPS (self);
  gint outsize, outsamples;
  GstMapInfo map;
  guint8 *in, *out;

  if (channels == 0 || rate == 0 || self->nsamples_in == 0) {
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    return;
  }

  /* Calculate the number of samples and their memory size that
   * should be pushed from the residue */
  outsamples = self->nsamples_in - (self->nsamples_out - self->latency);
  if (outsamples <= 0) {
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    return;
  }
  outsize = outsamples * channels * bps;

  if (!self->fft || self->low_latency) {
    gint64 diffsize, diffsamples;

    /* Process the difference between latency and residue length samples
     * to start at the actual data instead of starting at the zeros before
     * when we only got one buffer smaller than latency */
    diffsamples =
        ((gint64) self->latency) - ((gint64) self->buffer_fill) / channels;
    if (diffsamples > 0) {
      diffsize = diffsamples * channels * bps;
      in = g_new0 (guint8, diffsize);
      out = g_new0 (guint8, diffsize);
      self->nsamples_out += self->process (self, in, out, diffsamples);
      g_free (in);
      g_free (out);
    }

    outbuf = gst_buffer_new_and_alloc (outsize);

    /* Convolve the residue with zeros to get the actual remaining data */
    in = g_new0 (guint8, outsize);
    gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);
    self->nsamples_out += self->process (self, in, map.data, outsamples);
    gst_buffer_unmap (outbuf, &map);

    g_free (in);
  } else {
    guint gensamples = 0;

    outbuf = gst_buffer_new_and_alloc (outsize);
    gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);

    while (gensamples < outsamples) {
      guint step_insamples = self->block_length - self->buffer_fill;
      guint8 *zeroes = g_new0 (guint8, step_insamples * channels * bps);
      guint8 *out = g_new (guint8, self->block_length * channels * bps);
      guint step_gensamples;

      step_gensamples = self->process (self, zeroes, out, step_insamples);
      g_free (zeroes);

      memcpy (map.data + gensamples * bps, out, MIN (step_gensamples,
              outsamples - gensamples) * bps);
      gensamples += MIN (step_gensamples, outsamples - gensamples);

      g_free (out);
    }
    self->nsamples_out += gensamples;

    gst_buffer_unmap (outbuf, &map);
  }

  /* Set timestamp, offset, etc from the values we
   * saved when processing the regular buffers */
  if (GST_CLOCK_TIME_IS_VALID (self->start_ts))
    GST_BUFFER_TIMESTAMP (outbuf) = self->start_ts;
  else
    GST_BUFFER_TIMESTAMP (outbuf) = 0;
  GST_BUFFER_TIMESTAMP (outbuf) +=
      gst_util_uint64_scale_int (self->nsamples_out - outsamples -
      self->latency, GST_SECOND, rate);

  GST_BUFFER_DURATION (outbuf) =
      gst_util_uint64_scale_int (outsamples, GST_SECOND, rate);

  if (self->start_off != GST_BUFFER_OFFSET_NONE) {
    GST_BUFFER_OFFSET (outbuf) =
        self->start_off + self->nsamples_out - outsamples - self->latency;
    GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + outsamples;
  }

  GST_DEBUG_OBJECT (self,
      "Pushing residue buffer of size %" G_GSIZE_FORMAT " with timestamp: %"
      GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
      G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
      gst_buffer_get_size (outbuf),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
      GST_BUFFER_OFFSET_END (outbuf), outsamples);

  res = gst_pad_push (GST_BASE_TRANSFORM_CAST (self)->srcpad, outbuf);

  if (G_UNLIKELY (res != GST_FLOW_OK)) {
    GST_WARNING_OBJECT (self, "failed to push residue");
  }

  self->buffer_fill = 0;
}
Exemple #3
0
static GstFlowReturn
gst_goo_encjpeg_chain (GstPad* pad, GstBuffer* buffer)
{
	GST_LOG ("");

	GstGooEncJpeg* self = GST_GOO_ENCJPEG (gst_pad_get_parent (pad));
	GstGooEncJpegPrivate* priv = GST_GOO_ENCJPEG_GET_PRIVATE (self);
	GstFlowReturn ret = GST_FLOW_OK;
	GstGooAdapter* adapter = self->adapter;
	OMX_BUFFERHEADERTYPE* omx_buffer = NULL;

	GstClockTime timestamp, duration;
	guint64 offset, offsetend;
	GstBuffer* outbuf = NULL;

	if (goo_port_is_tunneled (self->inport))
	{
		GST_INFO ("Inport is tunneled");
		ret = GST_FLOW_OK;
		priv->incount++;
		goto process_output;
	}

	if (goo_port_is_eos (self->inport))
	{
		GST_INFO ("port is eos");
		ret = GST_FLOW_UNEXPECTED;
		goto fail;
	}

	if (self->component->cur_state != OMX_StateExecuting)
	{
		goto fail;
	}

	/* let's copy the timestamp meta data */
	timestamp = GST_BUFFER_TIMESTAMP (buffer);
	duration  = GST_BUFFER_DURATION (buffer);
	offset	  = GST_BUFFER_OFFSET (buffer);
	offsetend = GST_BUFFER_OFFSET_END (buffer);

	if (GST_IS_GOO_BUFFER (buffer) &&
	    goo_port_is_my_buffer (self->inport,
				   GST_GOO_BUFFER (buffer)->omx_buffer))
	{
		GST_INFO ("My own OMX buffer");
		priv->incount++;
		gst_buffer_unref (buffer); /* let's push the buffer to omx */
		ret = GST_FLOW_OK;
	}
	else if (GST_IS_GOO_BUFFER (buffer) &&
		 !goo_port_is_my_buffer (self->inport,
					 GST_GOO_BUFFER (buffer)->omx_buffer))
	{
		GST_INFO ("Other OMX buffer");

		if (GST_BUFFER_SIZE (buffer) != priv->omxbufsiz)
		{
			GST_ELEMENT_ERROR (self, STREAM, FORMAT,
					   ("Frame is incomplete (%u!=%u)",
					    GST_BUFFER_SIZE (buffer),
					    priv->omxbufsiz),
					   ("Frame is incomplete (%u!=%u)",
					    GST_BUFFER_SIZE (buffer),
					    priv->omxbufsiz));
			ret = GST_FLOW_ERROR;
		}

		omx_buffer = goo_port_grab_buffer (self->inport);
		memcpy (omx_buffer->pBuffer, GST_BUFFER_DATA (buffer),
			priv->omxbufsiz);
		omx_buffer->nFilledLen = priv->omxbufsiz;
		priv->incount++;
		goo_component_release_buffer (self->component, omx_buffer);
		gst_buffer_unref (buffer);
		ret = GST_FLOW_OK;
	}
	else
	{
		if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
		{
			gst_goo_adapter_clear (adapter);
		}

		GST_LOG ("size = %d bytes", GST_BUFFER_SIZE (buffer));
		gst_goo_adapter_push (adapter, buffer);

		guint tmp = priv->incount;
		while (gst_goo_adapter_available (adapter) >= priv->omxbufsiz &&
		       ret == GST_FLOW_OK)
		{
			GST_DEBUG ("Pushing data to OMX");
			OMX_BUFFERHEADERTYPE* omx_buffer;
			omx_buffer = goo_port_grab_buffer (self->inport);
			gst_goo_adapter_peek (adapter, priv->omxbufsiz,
					      omx_buffer);
			omx_buffer->nFilledLen = priv->omxbufsiz;
			gst_goo_adapter_flush (adapter, priv->omxbufsiz);
			priv->incount++;
			goo_component_release_buffer (self->component,
						      omx_buffer);
			ret = GST_FLOW_OK;
		}

		if (tmp == priv->incount)
		{
			goto done;
		}
	}

process_output:
	if (goo_port_is_tunneled (self->outport))
	{
		outbuf = gst_ghost_buffer_new ();
		GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_READONLY);
		GST_BUFFER_DATA (outbuf)       = NULL;
		GST_BUFFER_SIZE (outbuf)       = 0;
		GST_BUFFER_TIMESTAMP (outbuf)  = timestamp;
		GST_BUFFER_DURATION (outbuf)   = duration;
		GST_BUFFER_OFFSET (outbuf)     = offset;
		GST_BUFFER_OFFSET_END (outbuf) = offsetend;
		gst_buffer_set_caps (outbuf, GST_PAD_CAPS (self->srcpad));
		gst_pad_push (self->srcpad, outbuf);
		goto done;
	}

	GST_DEBUG ("Poping out buffer from OMX");
	omx_buffer = goo_port_grab_buffer (self->outport);

	if (omx_buffer->nFilledLen <= 0)
	{
		ret = GST_FLOW_ERROR;
		goto done;
	}

	if (gst_pad_alloc_buffer (self->srcpad, priv->outcount,
				  omx_buffer->nFilledLen,
				  GST_PAD_CAPS (self->srcpad),
				  &outbuf) == GST_FLOW_OK)
	{
		priv->outcount++;

		/* if the buffer is a goo buffer of the peer element */
		if (GST_IS_GOO_BUFFER (outbuf))
		{
			GST_INFO ("It is a OMX buffer!");
			memcpy (GST_GOO_BUFFER (outbuf)->omx_buffer->pBuffer,
				omx_buffer->pBuffer, omx_buffer->nFilledLen);
			GST_GOO_BUFFER (outbuf)->omx_buffer->nFilledLen =
				omx_buffer->nFilledLen;
			GST_GOO_BUFFER (outbuf)->omx_buffer->nFlags =
				omx_buffer->nFlags;
			GST_GOO_BUFFER (outbuf)->omx_buffer->nTimeStamp =
				GST2OMX_TIMESTAMP (timestamp);
			goo_component_release_buffer (self->component,
						      omx_buffer);
		}
		else
		{
			/* @fixme! */
			/* we do this because there is a buffer extarbation
			 * when a filesink is used.
			 * Maybe using multiple buffers it could be solved.
			 */
			memcpy (GST_BUFFER_DATA (outbuf),
				omx_buffer->pBuffer, omx_buffer->nFilledLen);
			goo_component_release_buffer (self->component,
						      omx_buffer);

/* 			gst_buffer_unref (outbuf); */
/* 			outbuf = GST_BUFFER (gst_goo_buffer_new ()); */
/* 			gst_goo_buffer_set_data (outbuf, */
/* 						 self->component, */
/* 						 omx_buffer); */
		}

		GST_BUFFER_TIMESTAMP (outbuf)  = timestamp;
		GST_BUFFER_DURATION (outbuf)   = duration;
		GST_BUFFER_OFFSET (outbuf)     = offset;
		GST_BUFFER_OFFSET_END (outbuf) = offsetend;

		gst_buffer_set_caps (outbuf, GST_PAD_CAPS (self->srcpad));
		g_signal_emit (G_OBJECT (self),
			       gst_goo_encjpeg_signals[FRAME_ENCODED], 0);
	
		ret = gst_pad_push (self->srcpad, outbuf);
		if (omx_buffer->nFlags & OMX_BUFFERFLAG_EOS ||
		   	goo_port_is_eos (self->outport))
		{
			GST_INFO ("EOS flag found in output buffer (%d)",
			  	omx_buffer->nFilledLen);
			goo_component_set_done (self->component);

		}

		goto done;
	}
	else
	{
		ret = GST_FLOW_ERROR;
		goto done;
	}

fail:
	gst_buffer_unref (buffer);
	gst_goo_adapter_clear (adapter);

done:
	gst_object_unref (self);
	return ret;

}
HRESULT AudioFakeSink::DoRenderSample(IMediaSample *pMediaSample)
{
  GstBuffer *out_buf = NULL;
  gboolean in_seg = FALSE;
  GstClockTime buf_start, buf_stop;
  gint64 clip_start = 0, clip_stop = 0;
  guint start_offset = 0, stop_offset;
  GstClockTime duration;

  if(pMediaSample)
  {
    BYTE *pBuffer = NULL;
    LONGLONG lStart = 0, lStop = 0;
    long size = pMediaSample->GetActualDataLength();

    pMediaSample->GetPointer(&pBuffer);
    pMediaSample->GetTime(&lStart, &lStop);
    
    if (!GST_CLOCK_TIME_IS_VALID (mDec->timestamp)) {
      // Convert REFERENCE_TIME to GST_CLOCK_TIME
      mDec->timestamp = (GstClockTime)lStart * 100;
    }
    duration = (lStop - lStart) * 100;

    buf_start = mDec->timestamp;
    buf_stop = mDec->timestamp + duration;

    /* save stop position to start next buffer with it */
    mDec->timestamp = buf_stop;

    /* check if this buffer is in our current segment */
    in_seg = gst_segment_clip (mDec->segment, GST_FORMAT_TIME,
        buf_start, buf_stop, &clip_start, &clip_stop);

    /* if the buffer is out of segment do not push it downstream */
    if (!in_seg) {
      GST_DEBUG_OBJECT (mDec,
          "buffer is out of segment, start %" GST_TIME_FORMAT " stop %"
          GST_TIME_FORMAT, GST_TIME_ARGS (buf_start), GST_TIME_ARGS (buf_stop));
      goto done;
    }

    /* buffer is entirely or partially in-segment, so allocate a
     * GstBuffer for output, and clip if required */

    /* allocate a new buffer for raw audio */
    mDec->last_ret = gst_pad_alloc_buffer (mDec->srcpad, 
        GST_BUFFER_OFFSET_NONE,
        size,
        GST_PAD_CAPS (mDec->srcpad), &out_buf);
    if (!out_buf) {
      GST_WARNING_OBJECT (mDec, "cannot allocate a new GstBuffer");
      goto done;
    }

    /* set buffer properties */
    GST_BUFFER_TIMESTAMP (out_buf) = buf_start;
    GST_BUFFER_DURATION (out_buf) = duration;
    memcpy (GST_BUFFER_DATA (out_buf), pBuffer,
        MIN ((unsigned int)size, GST_BUFFER_SIZE (out_buf)));

    /* we have to remove some heading samples */
    if ((GstClockTime) clip_start > buf_start) {
      start_offset = (guint)gst_util_uint64_scale_int (clip_start - buf_start,
          mDec->rate, GST_SECOND) * mDec->depth / 8 * mDec->channels;
    }
    else
      start_offset = 0;
    /* we have to remove some trailing samples */
    if ((GstClockTime) clip_stop < buf_stop) {
      stop_offset = (guint)gst_util_uint64_scale_int (buf_stop - clip_stop,
          mDec->rate, GST_SECOND) * mDec->depth / 8 * mDec->channels;
    }
    else
      stop_offset = size;

    /* truncating */
    if ((start_offset != 0) || (stop_offset != (size_t) size)) {
      GstBuffer *subbuf = gst_buffer_create_sub (out_buf, start_offset,
          stop_offset - start_offset);

      if (subbuf) {
        gst_buffer_set_caps (subbuf, GST_PAD_CAPS (mDec->srcpad));
        gst_buffer_unref (out_buf);
        out_buf = subbuf;
      }
    }

    GST_BUFFER_TIMESTAMP (out_buf) = clip_start;
    GST_BUFFER_DURATION (out_buf) = clip_stop - clip_start;

    /* replace the saved stop position by the clipped one */
    mDec->timestamp = clip_stop;

    GST_DEBUG_OBJECT (mDec,
        "push_buffer (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
        " duration %" GST_TIME_FORMAT, size,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out_buf)),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out_buf) +
            GST_BUFFER_DURATION (out_buf)),
        GST_TIME_ARGS (GST_BUFFER_DURATION (out_buf)));

    mDec->last_ret = gst_pad_push (mDec->srcpad, out_buf);
  }

done:
  return S_OK;
}
Exemple #5
0
static GstFlowReturn
gst_wavenc_write_toc (GstWavEnc * wavenc)
{
  GList *list;
  GstToc *toc;
  GstTocEntry *entry, *subentry;
  GstBuffer *buf;
  GstMapInfo map;
  guint8 *data;
  guint32 ncues, size, cues_size, labls_size, notes_size;

  if (!wavenc->toc) {
    GST_DEBUG_OBJECT (wavenc, "have no toc, checking toc_setter");
    wavenc->toc = gst_toc_setter_get_toc (GST_TOC_SETTER (wavenc));
  }
  if (!wavenc->toc) {
    GST_WARNING_OBJECT (wavenc, "have no toc");
    return GST_FLOW_OK;
  }

  toc = gst_toc_ref (wavenc->toc);
  size = 0;
  cues_size = 0;
  labls_size = 0;
  notes_size = 0;

  /* check if the TOC entries is valid */
  list = gst_toc_get_entries (toc);
  entry = list->data;
  if (gst_toc_entry_is_alternative (entry)) {
    list = gst_toc_entry_get_sub_entries (entry);
    while (list) {
      subentry = list->data;
      if (!gst_toc_entry_is_sequence (subentry))
        return FALSE;
      list = g_list_next (list);
    }
    list = gst_toc_entry_get_sub_entries (entry);
  }
  if (gst_toc_entry_is_sequence (entry)) {
    while (list) {
      entry = list->data;
      if (!gst_toc_entry_is_sequence (entry))
        return FALSE;
      list = g_list_next (list);
    }
    list = gst_toc_get_entries (toc);
  }

  ncues = g_list_length (list);
  GST_DEBUG_OBJECT (wavenc, "number of cue entries: %d", ncues);

  while (list) {
    guint32 id = 0;
    gint64 id64;
    const gchar *uid;

    entry = list->data;
    uid = gst_toc_entry_get_uid (entry);
    id64 = g_ascii_strtoll (uid, NULL, 0);
    /* check if id unique compatible with guint32 else generate random */
    if (id64 >= 0 && gst_wavenc_is_cue_id_unique (id64, wavenc->cues)) {
      id = (guint32) id64;
    } else {
      do {
        id = g_random_int ();
      } while (!gst_wavenc_is_cue_id_unique (id, wavenc->cues));
    }
    gst_wavenc_parse_cue (wavenc, id, entry);
    gst_wavenc_parse_labl (wavenc, id, entry);
    gst_wavenc_parse_note (wavenc, id, entry);
    list = g_list_next (list);
  }

  /* count cues size */
  if (wavenc->cues) {
    cues_size = 24 * g_list_length (wavenc->cues);
    size += 12 + cues_size;
  } else {
    GST_WARNING_OBJECT (wavenc, "cue's not found");
    return FALSE;
  }
  /* count labls size */
  if (wavenc->labls) {
    list = wavenc->labls;
    while (list) {
      GstWavEncLabl *labl;
      labl = list->data;
      labls_size += 8 + GST_ROUND_UP_2 (labl->chunk_data_size);
      list = g_list_next (list);
    }
    size += labls_size;
  }
  /* count notes size */
  if (wavenc->notes) {
    list = wavenc->notes;
    while (list) {
      GstWavEncNote *note;
      note = list->data;
      notes_size += 8 + GST_ROUND_UP_2 (note->chunk_data_size);
      list = g_list_next (list);
    }
    size += notes_size;
  }
  if (wavenc->labls || wavenc->notes) {
    size += 12;
  }

  buf = gst_buffer_new_and_alloc (size);
  gst_buffer_map (buf, &map, GST_MAP_WRITE);
  data = map.data;
  memset (data, 0, size);

  /* write Cue Chunk */
  if (wavenc->cues) {
    memcpy (data, (gchar *) "cue ", 4);
    GST_WRITE_UINT32_LE (data + 4, 4 + cues_size);
    GST_WRITE_UINT32_LE (data + 8, ncues);
    data += 12;
    gst_wavenc_write_cues (&data, wavenc->cues);

    /* write Associated Data List Chunk */
    if (wavenc->labls || wavenc->notes) {
      memcpy (data, (gchar *) "LIST", 4);
      GST_WRITE_UINT32_LE (data + 4, 4 + labls_size + notes_size);
      memcpy (data + 8, (gchar *) "adtl", 4);
      data += 12;
      if (wavenc->labls)
        gst_wavenc_write_labls (&data, wavenc->labls);
      if (wavenc->notes)
        gst_wavenc_write_notes (&data, wavenc->notes);
    }
  }

  /* free resources */
  if (toc)
    gst_toc_unref (toc);
  if (wavenc->cues)
    g_list_free_full (wavenc->cues, g_free);
  if (wavenc->labls)
    g_list_free_full (wavenc->labls, g_free);
  if (wavenc->notes)
    g_list_free_full (wavenc->notes, g_free);

  gst_buffer_unmap (buf, &map);
  wavenc->meta_length += gst_buffer_get_size (buf);

  return gst_pad_push (wavenc->srcpad, buf);
}
Exemple #6
0
static GstFlowReturn
gst_aiff_parse_stream_data (GstAiffParse * aiff)
{
  GstBuffer *buf = NULL;
  GstFlowReturn res = GST_FLOW_OK;
  guint64 desired, obtained;
  GstClockTime timestamp, next_timestamp, duration;
  guint64 pos, nextpos;

iterate_adapter:
  GST_LOG_OBJECT (aiff,
      "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT " , dataleft: %"
      G_GINT64_FORMAT, aiff->offset, aiff->end_offset, aiff->dataleft);

  /* Get the next n bytes and output them */
  if (aiff->dataleft == 0 || aiff->dataleft < aiff->bytes_per_sample)
    goto found_eos;

  /* scale the amount of data by the segment rate so we get equal
   * amounts of data regardless of the playback rate */
  desired =
      MIN (gst_guint64_to_gdouble (aiff->dataleft),
      MAX_BUFFER_SIZE * aiff->segment.abs_rate);

  if (desired >= aiff->bytes_per_sample && aiff->bytes_per_sample > 0)
    desired -= (desired % aiff->bytes_per_sample);

  GST_LOG_OBJECT (aiff, "Fetching %" G_GINT64_FORMAT " bytes of data "
      "from the sinkpad", desired);

  if (aiff->streaming) {
    guint avail = gst_adapter_available (aiff->adapter);

    if (avail < desired) {
      GST_LOG_OBJECT (aiff, "Got only %d bytes of data from the sinkpad",
          avail);
      return GST_FLOW_OK;
    }

    buf = gst_adapter_take_buffer (aiff->adapter, desired);
  } else {
    if ((res = gst_pad_pull_range (aiff->sinkpad, aiff->offset,
                desired, &buf)) != GST_FLOW_OK)
      goto pull_error;
  }

  /* If we have a pending close/start segment, send it now. */
  if (G_UNLIKELY (aiff->close_segment != NULL)) {
    gst_pad_push_event (aiff->srcpad, aiff->close_segment);
    aiff->close_segment = NULL;
  }
  if (G_UNLIKELY (aiff->start_segment != NULL)) {
    gst_pad_push_event (aiff->srcpad, aiff->start_segment);
    aiff->start_segment = NULL;
  }
  if (G_UNLIKELY (aiff->tags != NULL)) {
    gst_element_found_tags_for_pad (GST_ELEMENT_CAST (aiff), aiff->srcpad,
        aiff->tags);
    aiff->tags = NULL;
  }

  obtained = GST_BUFFER_SIZE (buf);

  /* our positions in bytes */
  pos = aiff->offset - aiff->datastart;
  nextpos = pos + obtained;

  /* update offsets, does not overflow. */
  GST_BUFFER_OFFSET (buf) = pos / aiff->bytes_per_sample;
  GST_BUFFER_OFFSET_END (buf) = nextpos / aiff->bytes_per_sample;

  if (aiff->bps > 0) {
    /* and timestamps if we have a bitrate, be careful for overflows */
    timestamp =
        gst_util_uint64_scale_ceil (pos, GST_SECOND, (guint64) aiff->bps);
    next_timestamp =
        gst_util_uint64_scale_ceil (nextpos, GST_SECOND, (guint64) aiff->bps);
    duration = next_timestamp - timestamp;

    /* update current running segment position */
    gst_segment_set_last_stop (&aiff->segment, GST_FORMAT_TIME, next_timestamp);
  } else {
    /* no bitrate, all we know is that the first sample has timestamp 0, all
     * other positions and durations have unknown timestamp. */
    if (pos == 0)
      timestamp = 0;
    else
      timestamp = GST_CLOCK_TIME_NONE;
    duration = GST_CLOCK_TIME_NONE;
    /* update current running segment position with byte offset */
    gst_segment_set_last_stop (&aiff->segment, GST_FORMAT_BYTES, nextpos);
  }
  if (aiff->discont) {
    GST_DEBUG_OBJECT (aiff, "marking DISCONT");
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
    aiff->discont = FALSE;
  }

  GST_BUFFER_TIMESTAMP (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;
  gst_buffer_set_caps (buf, aiff->caps);

  GST_LOG_OBJECT (aiff,
      "Got buffer. timestamp:%" GST_TIME_FORMAT " , duration:%" GST_TIME_FORMAT
      ", size:%u", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration),
      GST_BUFFER_SIZE (buf));

  if ((res = gst_pad_push (aiff->srcpad, buf)) != GST_FLOW_OK)
    goto push_error;

  if (obtained < aiff->dataleft) {
    aiff->offset += obtained;
    aiff->dataleft -= obtained;
  } else {
    aiff->offset += aiff->dataleft;
    aiff->dataleft = 0;
  }

  /* Iterate until need more data, so adapter size won't grow */
  if (aiff->streaming) {
    GST_LOG_OBJECT (aiff,
        "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT, aiff->offset,
        aiff->end_offset);
    goto iterate_adapter;
  }
  return res;

  /* ERROR */
found_eos:
  {
    GST_DEBUG_OBJECT (aiff, "found EOS");
    return GST_FLOW_UNEXPECTED;
  }
pull_error:
  {
    /* check if we got EOS */
    if (res == GST_FLOW_UNEXPECTED)
      goto found_eos;

    GST_WARNING_OBJECT (aiff,
        "Error getting %" G_GINT64_FORMAT " bytes from the "
        "sinkpad (dataleft = %" G_GINT64_FORMAT ")", desired, aiff->dataleft);
    return res;
  }
push_error:
  {
    GST_INFO_OBJECT (aiff,
        "Error pushing on srcpad %s:%s, reason %s, is linked? = %d",
        GST_DEBUG_PAD_NAME (aiff->srcpad), gst_flow_get_name (res),
        gst_pad_is_linked (aiff->srcpad));
    return res;
  }
}
Exemple #7
0
static GstFlowReturn
fs_funnel_chain (GstPad * pad, GstBuffer * buffer)
{
  GstFlowReturn res;
  FsFunnel *funnel = FS_FUNNEL (gst_pad_get_parent (pad));
  FsFunnelPadPrivate *priv = gst_pad_get_element_private (pad);
  GstEvent *event = NULL;
  GstClockTime newts;
  GstCaps *padcaps;

  GST_DEBUG_OBJECT (funnel, "received buffer %p", buffer);

  GST_OBJECT_LOCK (funnel);
  if (priv->segment.format == GST_FORMAT_UNDEFINED) {
    GST_WARNING_OBJECT (funnel, "Got buffer without segment,"
        " setting segment [0,inf[");
     gst_segment_set_newsegment_full (&priv->segment, FALSE, 1.0, 1.0,
         GST_FORMAT_TIME, 0, -1, 0);
  }

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (buffer)))
    gst_segment_set_last_stop (&priv->segment, priv->segment.format,
        GST_BUFFER_TIMESTAMP (buffer));

  newts = gst_segment_to_running_time (&priv->segment,
      priv->segment.format, GST_BUFFER_TIMESTAMP (buffer));
  if (newts != GST_BUFFER_TIMESTAMP (buffer)) {
    buffer = gst_buffer_make_metadata_writable (buffer);
    GST_BUFFER_TIMESTAMP (buffer) = newts;
  }

  if (!funnel->has_segment)
  {
    event = gst_event_new_new_segment_full (FALSE, 1.0, 1.0, GST_FORMAT_TIME,
        0, -1, 0);
    funnel->has_segment = TRUE;
  }
  GST_OBJECT_UNLOCK (funnel);

  if (event) {
    if (!gst_pad_push_event (funnel->srcpad, event))
      GST_WARNING_OBJECT (funnel, "Could not push out newsegment event");
  }


  GST_OBJECT_LOCK (pad);
  padcaps = GST_PAD_CAPS (funnel->srcpad);
  GST_OBJECT_UNLOCK (pad);

  if (GST_BUFFER_CAPS (buffer) && GST_BUFFER_CAPS (buffer) != padcaps) {
    if (!gst_pad_set_caps (funnel->srcpad, GST_BUFFER_CAPS (buffer))) {
      res = GST_FLOW_NOT_NEGOTIATED;
      goto out;
    }
  }

  res = gst_pad_push (funnel->srcpad, buffer);

  GST_LOG_OBJECT (funnel, "handled buffer %s", gst_flow_get_name (res));

 out:
  gst_object_unref (funnel);

  return res;
}
static void
gst_hls_demux_loop (GstHLSDemux * demux)
{
  GstBuffer *buf;
  GstFlowReturn ret;

  /* Loop for the source pad task. The task is started when we have
   * received the main playlist from the source element. It tries first to
   * cache the first fragments and then it waits until it has more data in the
   * queue. This task is woken up when we push a new fragment to the queue or
   * when we reached the end of the playlist  */

  if (G_UNLIKELY (demux->need_cache)) {
    if (!gst_hls_demux_cache_fragments (demux))
      goto cache_error;

    /* we can start now the updates thread (only if on playing) */
    if (GST_STATE (demux) == GST_STATE_PLAYING)
      gst_hls_demux_start_update (demux);
    GST_INFO_OBJECT (demux, "First fragments cached successfully");
  }

  if (g_queue_is_empty (demux->queue)) {
    if (demux->end_of_playlist)
      goto end_of_playlist;

    goto pause_task;
  }

  buf = g_queue_pop_head (demux->queue);

  /* Figure out if we need to create/switch pads */
  if (G_UNLIKELY (!demux->srcpad
          || GST_BUFFER_CAPS (buf) != GST_PAD_CAPS (demux->srcpad)
          || demux->need_segment)) {
    switch_pads (demux, GST_BUFFER_CAPS (buf));
    demux->need_segment = TRUE;
  }
  if (demux->need_segment) {
    GstClockTime start = demux->position + demux->position_shift;
    /* And send a newsegment */
    GST_DEBUG_OBJECT (demux, "Sending new-segment. segment start:%"
        GST_TIME_FORMAT, GST_TIME_ARGS (start));
    gst_pad_push_event (demux->srcpad,
        gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
            start, GST_CLOCK_TIME_NONE, start));
    demux->need_segment = FALSE;
    demux->position_shift = 0;
  }

  if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))
    demux->position += GST_BUFFER_DURATION (buf);

  ret = gst_pad_push (demux->srcpad, buf);
  if (ret != GST_FLOW_OK)
    goto error;

  return;

end_of_playlist:
  {
    GST_DEBUG_OBJECT (demux, "Reached end of playlist, sending EOS");
    gst_pad_push_event (demux->srcpad, gst_event_new_eos ());
    gst_hls_demux_stop (demux);
    return;
  }

cache_error:
  {
    gst_task_pause (demux->task);
    if (!demux->cancelled) {
      GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND,
          ("Could not cache the first fragments"), (NULL));
      gst_hls_demux_stop (demux);
    }
    return;
  }

error:
  {
    /* FIXME: handle error */
    GST_DEBUG_OBJECT (demux, "error, stopping task");
    gst_hls_demux_stop (demux);
    return;
  }

pause_task:
  {
    gst_task_pause (demux->task);
    return;
  }
}
Exemple #9
0
static void
gst_live_adder_loop (gpointer data)
{
  GstLiveAdder *adder = GST_LIVE_ADDER (data);
  GstClockTime buffer_timestamp = 0;
  GstClockTime sync_time = 0;
  GstClock *clock = NULL;
  GstClockID id = NULL;
  GstClockReturn ret;
  GstBuffer *buffer = NULL;
  GstFlowReturn result;
  GstEvent *newseg_event = NULL;

  GST_OBJECT_LOCK (adder);

again:

  for (;;) {
    if (adder->srcresult != GST_FLOW_OK)
      goto flushing;
    if (!g_queue_is_empty (adder->buffers))
      break;
    if (check_eos_locked (adder))
      goto eos;
    g_cond_wait (adder->not_empty_cond, GST_OBJECT_GET_LOCK (adder));
  }

  buffer_timestamp = GST_BUFFER_TIMESTAMP (g_queue_peek_head (adder->buffers));

  clock = GST_ELEMENT_CLOCK (adder);

  /* If we have no clock, then we can't do anything.. error */
  if (!clock) {
    if (adder->playing)
      goto no_clock;
    else
      goto push_buffer;
  }

  GST_DEBUG_OBJECT (adder, "sync to timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (buffer_timestamp));

  sync_time = buffer_timestamp + GST_ELEMENT_CAST (adder)->base_time;
  /* add latency, this includes our own latency and the peer latency. */
  sync_time += adder->latency_ms * GST_MSECOND;
  sync_time += adder->peer_latency;

  /* create an entry for the clock */
  id = adder->clock_id = gst_clock_new_single_shot_id (clock, sync_time);
  GST_OBJECT_UNLOCK (adder);

  ret = gst_clock_id_wait (id, NULL);

  GST_OBJECT_LOCK (adder);

  /* and free the entry */
  gst_clock_id_unref (id);
  adder->clock_id = NULL;

  /* at this point, the clock could have been unlocked by a timeout, a new
   * head element was added to the queue or because we are shutting down. Check
   * for shutdown first. */

  if (adder->srcresult != GST_FLOW_OK)
    goto flushing;

  if (ret == GST_CLOCK_UNSCHEDULED) {
    GST_DEBUG_OBJECT (adder,
        "Wait got unscheduled, will retry to push with new buffer");
    goto again;
  }

  if (ret != GST_CLOCK_OK && ret != GST_CLOCK_EARLY)
    goto clock_error;

push_buffer:

  buffer = g_queue_pop_head (adder->buffers);

  if (!buffer)
    goto again;

  /*
   * We make sure the timestamps are exactly contiguous
   * If its only small skew (due to rounding errors), we correct it
   * silently. Otherwise we put the discont flag
   */
  if (GST_CLOCK_TIME_IS_VALID (adder->next_timestamp) &&
      GST_BUFFER_TIMESTAMP (buffer) != adder->next_timestamp) {
    GstClockTimeDiff diff = GST_CLOCK_DIFF (GST_BUFFER_TIMESTAMP (buffer),
        adder->next_timestamp);
    if (diff < 0)
      diff = -diff;

    if (diff < GST_SECOND / adder->rate) {
      GST_BUFFER_TIMESTAMP (buffer) = adder->next_timestamp;
      GST_DEBUG_OBJECT (adder, "Correcting slight skew");
      GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
    } else {
      GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
      GST_DEBUG_OBJECT (adder, "Expected buffer at %" GST_TIME_FORMAT
          ", but is at %" GST_TIME_FORMAT ", setting discont",
          GST_TIME_ARGS (adder->next_timestamp),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
    }
  } else {
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
  }

  GST_BUFFER_OFFSET (buffer) = GST_BUFFER_OFFSET_NONE;
  GST_BUFFER_OFFSET_END (buffer) = GST_BUFFER_OFFSET_NONE;

  if (GST_BUFFER_DURATION_IS_VALID (buffer))
    adder->next_timestamp = GST_BUFFER_TIMESTAMP (buffer) +
        GST_BUFFER_DURATION (buffer);
  else
    adder->next_timestamp = GST_CLOCK_TIME_NONE;

  if (adder->segment_pending) {
    /*
     * We set the start at 0, because we re-timestamps to the running time
     */
    newseg_event = gst_event_new_new_segment_full (FALSE, 1.0, 1.0,
        GST_FORMAT_TIME, 0, -1, 0);

    adder->segment_pending = FALSE;
  }

  GST_OBJECT_UNLOCK (adder);

  if (newseg_event)
    gst_pad_push_event (adder->srcpad, newseg_event);

  GST_LOG_OBJECT (adder, "About to push buffer time:%" GST_TIME_FORMAT
      " duration:%" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));

  result = gst_pad_push (adder->srcpad, buffer);
  if (result != GST_FLOW_OK)
    goto pause;

  return;

flushing:
  {
    GST_DEBUG_OBJECT (adder, "we are flushing");
    gst_pad_pause_task (adder->srcpad);
    GST_OBJECT_UNLOCK (adder);
    return;
  }

clock_error:
  {
    gst_pad_pause_task (adder->srcpad);
    GST_OBJECT_UNLOCK (adder);
    GST_ELEMENT_ERROR (adder, STREAM, MUX, ("Error with the clock"),
        ("Error with the clock: %d", ret));
    GST_ERROR_OBJECT (adder, "Error with the clock: %d", ret);
    return;
  }

no_clock:
  {
    gst_pad_pause_task (adder->srcpad);
    GST_OBJECT_UNLOCK (adder);
    GST_ELEMENT_ERROR (adder, STREAM, MUX, ("No available clock"),
        ("No available clock"));
    GST_ERROR_OBJECT (adder, "No available clock");
    return;
  }

pause:
  {
    GST_DEBUG_OBJECT (adder, "pausing task, reason %s",
        gst_flow_get_name (result));

    GST_OBJECT_LOCK (adder);

    /* store result */
    adder->srcresult = result;
    /* we don't post errors or anything because upstream will do that for us
     * when we pass the return value upstream. */
    gst_pad_pause_task (adder->srcpad);
    GST_OBJECT_UNLOCK (adder);
    return;
  }

eos:
  {
    /* store result, we are flushing now */
    GST_DEBUG_OBJECT (adder, "We are EOS, pushing EOS downstream");
    adder->srcresult = GST_FLOW_UNEXPECTED;
    gst_pad_pause_task (adder->srcpad);
    GST_OBJECT_UNLOCK (adder);
    gst_pad_push_event (adder->srcpad, gst_event_new_eos ());
    return;
  }
}
Exemple #10
0
static GstFlowReturn
gst_vp8_enc_pre_push (GstVideoEncoder * video_encoder,
    GstVideoCodecFrame * frame)
{
  GstVP8Enc *encoder;
  GstVPXEnc *vpx_enc;
  GstBuffer *buf;
  GstFlowReturn ret = GST_FLOW_OK;
  GstVP8EncUserData *user_data = gst_video_codec_frame_get_user_data (frame);
  GList *l;
  gint inv_count;
  GstVideoInfo *info;

  GST_DEBUG_OBJECT (video_encoder, "pre_push");

  encoder = GST_VP8_ENC (video_encoder);
  vpx_enc = GST_VPX_ENC (encoder);

  info = &vpx_enc->input_state->info;

  g_assert (user_data != NULL);

  for (inv_count = 0, l = user_data->invisible; l; inv_count++, l = l->next) {
    buf = l->data;
    l->data = NULL;

    /* FIXME : All of this should have already been handled by base classes, no ? */
    if (l == user_data->invisible
        && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
      GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
      encoder->keyframe_distance = 0;
    } else {
      GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
      encoder->keyframe_distance++;
    }

    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DECODE_ONLY);
    GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->output_buffer);
    GST_BUFFER_DURATION (buf) = 0;
    if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) {
      GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
      GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;
    } else {
      GST_BUFFER_OFFSET_END (buf) =
          _to_granulepos (frame->presentation_frame_number + 1,
          inv_count, encoder->keyframe_distance);
      GST_BUFFER_OFFSET (buf) =
          gst_util_uint64_scale (frame->presentation_frame_number + 1,
          GST_SECOND * GST_VIDEO_INFO_FPS_D (info),
          GST_VIDEO_INFO_FPS_N (info));
    }

    ret = gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (video_encoder), buf);

    if (ret != GST_FLOW_OK) {
      GST_WARNING_OBJECT (encoder, "flow error %d", ret);
      goto done;
    }
  }

  buf = frame->output_buffer;

  /* FIXME : All of this should have already been handled by base classes, no ? */
  if (!user_data->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
    GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
    encoder->keyframe_distance = 0;
  } else {
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
    encoder->keyframe_distance++;
  }

  if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) {
    GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
    GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;
  } else {
    GST_BUFFER_OFFSET_END (buf) =
        _to_granulepos (frame->presentation_frame_number + 1, 0,
        encoder->keyframe_distance);
    GST_BUFFER_OFFSET (buf) =
        gst_util_uint64_scale (frame->presentation_frame_number + 1,
        GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info));
  }

  GST_LOG_OBJECT (video_encoder, "src ts: %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

done:
  return ret;
}
Exemple #11
0
static gboolean
brasero_transcode_buffer_handler (GstPad *pad,
				  GstBuffer *buffer,
				  BraseroTranscode *self)
{
	BraseroTranscodePrivate *priv;
	GstPad *peer;
	gint64 size;

	priv = BRASERO_TRANSCODE_PRIVATE (self);

	size = GST_BUFFER_SIZE (buffer);

	if (priv->segment_start <= 0 && priv->segment_end <= 0)
		return TRUE;

	/* what we do here is more or less what gstreamer does when seeking:
	 * it reads and process from 0 to the seek position (I tried).
	 * It even forwards the data before the seek position to the sink (which
	 * is a problem in our case as it would be written) */
	if (priv->size > priv->segment_end) {
		priv->size += size;
		return FALSE;
	}

	if (priv->size + size > priv->segment_end) {
		GstBuffer *new_buffer;
		int data_size;

		/* the entire the buffer is not interesting for us */
		/* create a new buffer and push it on the pad:
		 * NOTE: we're going to receive it ... */
		data_size = priv->segment_end - priv->size;
		new_buffer = gst_buffer_new_and_alloc (data_size);
		memcpy (GST_BUFFER_DATA (new_buffer), GST_BUFFER_DATA (buffer), data_size);

		/* Recursive: the following calls ourselves BEFORE we finish */
		peer = gst_pad_get_peer (pad);
		gst_pad_push (peer, new_buffer);

		priv->size += size - data_size;

		/* post an EOS event to stop pipeline */
		gst_pad_push_event (peer, gst_event_new_eos ());
		gst_object_unref (peer);
		return FALSE;
	}

	/* see if the buffer is in the segment */
	if (priv->size < priv->segment_start) {
		GstBuffer *new_buffer;
		gint data_size;

		/* see if all the buffer is interesting for us */
		if (priv->size + size < priv->segment_start) {
			priv->size += size;
			return FALSE;
		}

		/* create a new buffer and push it on the pad:
		 * NOTE: we're going to receive it ... */
		data_size = priv->size + size - priv->segment_start;
		new_buffer = gst_buffer_new_and_alloc (data_size);
		memcpy (GST_BUFFER_DATA (new_buffer),
			GST_BUFFER_DATA (buffer) +
			GST_BUFFER_SIZE (buffer) -
			data_size,
			data_size);
		GST_BUFFER_TIMESTAMP (new_buffer) = GST_BUFFER_TIMESTAMP (buffer) + data_size;

		/* move forward by the size of bytes we dropped */
		priv->size += size - data_size;

		/* this is recursive the following calls ourselves 
		 * BEFORE we finish */
		peer = gst_pad_get_peer (pad);
		gst_pad_push (peer, new_buffer);
		gst_object_unref (peer);

		return FALSE;
	}

	priv->size += size;
	priv->pos += size;

	return TRUE;
}
static void
gst_ffmpegenc_flush_buffers (GstFFMpegEnc * ffmpegenc, gboolean send)
{
  GstBuffer *outbuf, *inbuf;
  gint ret_size;

  GST_DEBUG_OBJECT (ffmpegenc, "flushing buffers with sending %d", send);

  /* no need to empty codec if there is none */
  if (!ffmpegenc->opened)
    goto flush;

  while (!g_queue_is_empty (ffmpegenc->delay)) {

    ffmpegenc_setup_working_buf (ffmpegenc);

    ret_size = avcodec_encode_video (ffmpegenc->context,
        ffmpegenc->working_buf, ffmpegenc->working_buf_size, NULL);

    if (ret_size < 0) {         /* there should be something, notify and give up */
#ifndef GST_DISABLE_GST_DEBUG
      GstFFMpegEncClass *oclass =
          (GstFFMpegEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
      GST_WARNING_OBJECT (ffmpegenc,
          "ffenc_%s: failed to flush buffer", oclass->in_plugin->name);
#endif /* GST_DISABLE_GST_DEBUG */
      break;
    }

    /* save stats info if there is some as well as a stats file */
    if (ffmpegenc->file && ffmpegenc->context->stats_out)
      if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)
        GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,
            (("Could not write to file \"%s\"."), ffmpegenc->filename),
            GST_ERROR_SYSTEM);

    /* handle b-frame delay when no output, so we don't output empty frames */
    inbuf = g_queue_pop_head (ffmpegenc->delay);

    outbuf = gst_buffer_new_and_alloc (ret_size);
    memcpy (GST_BUFFER_DATA (outbuf), ffmpegenc->working_buf, ret_size);
    GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (inbuf);
    GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (inbuf);

    if (!ffmpegenc->context->coded_frame->key_frame)
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
    gst_buffer_set_caps (outbuf, GST_PAD_CAPS (ffmpegenc->srcpad));

    gst_buffer_unref (inbuf);

    if (send)
      gst_pad_push (ffmpegenc->srcpad, outbuf);
    else
      gst_buffer_unref (outbuf);
  }

flush:
  {
    /* make sure that we empty the queue, is still needed if we had to break */
    while (!g_queue_is_empty (ffmpegenc->delay))
      gst_buffer_unref (g_queue_pop_head (ffmpegenc->delay));
  }
}
static GstFlowReturn
gst_ffmpegenc_chain_video (GstPad * pad, GstBuffer * inbuf)
{
  GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) (GST_PAD_PARENT (pad));
  GstBuffer *outbuf;
  gint ret_size = 0, frame_size;
  gboolean force_keyframe;

  GST_DEBUG_OBJECT (ffmpegenc,
      "Received buffer of time %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf)));

  GST_OBJECT_LOCK (ffmpegenc);
  force_keyframe = ffmpegenc->force_keyframe;
  ffmpegenc->force_keyframe = FALSE;
  GST_OBJECT_UNLOCK (ffmpegenc);

  if (force_keyframe)
    ffmpegenc->picture->pict_type = FF_I_TYPE;

  frame_size = gst_ffmpeg_avpicture_fill ((AVPicture *) ffmpegenc->picture,
      GST_BUFFER_DATA (inbuf),
      ffmpegenc->context->pix_fmt,
      ffmpegenc->context->width, ffmpegenc->context->height);
  g_return_val_if_fail (frame_size == GST_BUFFER_SIZE (inbuf), GST_FLOW_ERROR);

  ffmpegenc->picture->pts =
      gst_ffmpeg_time_gst_to_ff (GST_BUFFER_TIMESTAMP (inbuf) /
      ffmpegenc->context->ticks_per_frame, ffmpegenc->context->time_base);

  ffmpegenc_setup_working_buf (ffmpegenc);

  ret_size = avcodec_encode_video (ffmpegenc->context,
      ffmpegenc->working_buf, ffmpegenc->working_buf_size, ffmpegenc->picture);

  if (ret_size < 0) {
#ifndef GST_DISABLE_GST_DEBUG
    GstFFMpegEncClass *oclass =
        (GstFFMpegEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
    GST_ERROR_OBJECT (ffmpegenc,
        "ffenc_%s: failed to encode buffer", oclass->in_plugin->name);
#endif /* GST_DISABLE_GST_DEBUG */
    gst_buffer_unref (inbuf);
    return GST_FLOW_OK;
  }

  /* handle b-frame delay when no output, so we don't output empty frames;
   * timestamps and so can permute a bit between coding and display order
   * but keyframes should still end up with the proper metadata */
  g_queue_push_tail (ffmpegenc->delay, inbuf);
  if (ret_size)
    inbuf = g_queue_pop_head (ffmpegenc->delay);
  else
    return GST_FLOW_OK;

  /* save stats info if there is some as well as a stats file */
  if (ffmpegenc->file && ffmpegenc->context->stats_out)
    if (fprintf (ffmpegenc->file, "%s", ffmpegenc->context->stats_out) < 0)
      GST_ELEMENT_ERROR (ffmpegenc, RESOURCE, WRITE,
          (("Could not write to file \"%s\"."), ffmpegenc->filename),
          GST_ERROR_SYSTEM);

  outbuf = gst_buffer_new_and_alloc (ret_size);
  memcpy (GST_BUFFER_DATA (outbuf), ffmpegenc->working_buf, ret_size);
  GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (inbuf);
  GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (inbuf);
  /* buggy codec may not set coded_frame */
  if (ffmpegenc->context->coded_frame) {
    if (!ffmpegenc->context->coded_frame->key_frame)
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
  } else
    GST_WARNING_OBJECT (ffmpegenc, "codec did not provide keyframe info");
  gst_buffer_set_caps (outbuf, GST_PAD_CAPS (ffmpegenc->srcpad));

  gst_buffer_unref (inbuf);

  /* Reset frame type */
  if (ffmpegenc->picture->pict_type)
    ffmpegenc->picture->pict_type = 0;

  if (force_keyframe) {
    gst_pad_push_event (ffmpegenc->srcpad,
        gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
            gst_structure_new ("GstForceKeyUnit",
                "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP (outbuf),
                NULL)));
  }

  return gst_pad_push (ffmpegenc->srcpad, outbuf);
}
static GstFlowReturn
gst_kate_parse_push_headers (GstKateParse * parse)
{
    /* mark and put on caps */
    GstCaps *caps;
    GstBuffer *outbuf;
    kate_packet packet;
    GList *headers, *outbuf_list = NULL;
    int ret;
    gboolean res;

    /* get the headers into the caps, passing them to kate as we go */
    caps =
        gst_kate_util_set_header_on_caps (&parse->element,
                                          gst_pad_get_negotiated_caps (parse->sinkpad), parse->streamheader);

    if (G_UNLIKELY (!caps)) {
        GST_ELEMENT_ERROR (parse, STREAM, DECODE, (NULL),
                           ("Failed to set headers on caps"));
        return GST_FLOW_ERROR;
    }

    GST_DEBUG_OBJECT (parse, "here are the caps: %" GST_PTR_FORMAT, caps);
    res = gst_pad_set_caps (parse->srcpad, caps);
    gst_caps_unref (caps);
    if (G_UNLIKELY (!res)) {
        GST_WARNING_OBJECT (parse->srcpad, "Failed to set caps on source pad");
        return GST_FLOW_NOT_NEGOTIATED;
    }

    headers = parse->streamheader;
    while (headers) {
        outbuf = GST_BUFFER_CAST (headers->data);
        kate_packet_wrap (&packet, GST_BUFFER_SIZE (outbuf),
                          GST_BUFFER_DATA (outbuf));
        ret = kate_decode_headerin (&parse->ki, &parse->kc, &packet);
        if (G_UNLIKELY (ret < 0)) {
            GST_WARNING_OBJECT (parse, "kate_decode_headerin returned %d", ret);
        }
        /* takes ownership of outbuf, which was previously in parse->streamheader */
        outbuf_list = g_list_append (outbuf_list, outbuf);
        headers = headers->next;
    }

    /* first process queued events */
    gst_kate_parse_drain_event_queue (parse);

    /* push out buffers, ignoring return value... */
    headers = outbuf_list;
    while (headers) {
        outbuf = GST_BUFFER_CAST (headers->data);
        gst_buffer_set_caps (outbuf, GST_PAD_CAPS (parse->srcpad));
        gst_pad_push (parse->srcpad, outbuf);
        headers = headers->next;
    }

    g_list_free (outbuf_list);
    g_list_free (parse->streamheader);
    parse->streamheader = NULL;

    parse->streamheader_sent = TRUE;

    return GST_FLOW_OK;
}
static gboolean
gst_rtp_ulpfec_dec_handle_packet_loss (GstRtpUlpFecDec * self, guint16 seqnum,
    GstClockTime timestamp, GstClockTime duration)
{
  gint caps_pt = self->have_caps_pt ? self->caps_pt : -1;
  gboolean ret = TRUE;
  GstBufferList *buflist =
      rtp_storage_get_packets_for_recovery (self->storage, self->fec_pt,
      self->caps_ssrc, seqnum);

  if (buflist) {
    GstBuffer *recovered_buffer = NULL;
    guint16 recovered_seq = 0;
    guint8 recovered_pt = 0;

    gst_rtp_ulpfec_dec_start (self, buflist, self->fec_pt, seqnum);

    while (NULL != (recovered_buffer =
            gst_rtp_ulpfec_dec_recover (self, self->caps_ssrc, caps_pt,
                &recovered_pt, &recovered_seq))) {
      if (seqnum == recovered_seq) {
        GstBuffer *sent_buffer;
        GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;

        recovered_buffer = gst_buffer_make_writable (recovered_buffer);
        GST_BUFFER_PTS (recovered_buffer) = timestamp;
        /* GST_BUFFER_DURATION (recovered_buffer) = duration;
         * JB does not set the duration, so we will not too */

        if (!self->lost_packet_from_storage)
          rtp_storage_put_recovered_packet (self->storage,
              recovered_buffer, recovered_pt, self->caps_ssrc, recovered_seq);

        GST_DEBUG_OBJECT (self,
            "Pushing recovered packet ssrc=0x%08x seq=%u %" GST_PTR_FORMAT,
            self->caps_ssrc, seqnum, recovered_buffer);

        sent_buffer = gst_buffer_copy_deep (recovered_buffer);

        gst_rtp_buffer_map (sent_buffer, GST_MAP_WRITE, &rtp);
        gst_rtp_buffer_set_seq (&rtp, self->next_seqnum++);
        gst_rtp_buffer_unmap (&rtp);

        ret = FALSE;
        self->unset_discont_flag = TRUE;
        self->chain_return_val = gst_pad_push (self->srcpad, sent_buffer);
        break;
      }

      rtp_storage_put_recovered_packet (self->storage,
          recovered_buffer, recovered_pt, self->caps_ssrc, recovered_seq);
    }

    gst_rtp_ulpfec_dec_stop (self);
    gst_buffer_list_unref (buflist);
  }

  GST_DEBUG_OBJECT (self, "Packet lost ssrc=0x%08x seq=%u", self->caps_ssrc,
      seqnum);

  return ret;
}
Exemple #16
0
static GstFlowReturn
gst_monoscope_chain (GstPad * pad, GstBuffer * inbuf)
{
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstMonoscope *monoscope;

  monoscope = GST_MONOSCOPE (GST_PAD_PARENT (pad));

  /* don't try to combine samples from discont buffer */
  if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (monoscope->adapter);
    monoscope->next_ts = GST_CLOCK_TIME_NONE;
  }

  /* Match timestamps from the incoming audio */
  if (GST_BUFFER_TIMESTAMP (inbuf) != GST_CLOCK_TIME_NONE)
    monoscope->next_ts = GST_BUFFER_TIMESTAMP (inbuf);

  GST_LOG_OBJECT (monoscope, "in buffer has %d samples, ts=%" GST_TIME_FORMAT,
      GST_BUFFER_SIZE (inbuf) / monoscope->bps,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf)));

  gst_adapter_push (monoscope->adapter, inbuf);
  inbuf = NULL;

  /* Collect samples until we have enough for an output frame */
  while (flow_ret == GST_FLOW_OK) {
    gint16 *samples;
    GstBuffer *outbuf = NULL;
    guint32 *pixels, avail, bytesperframe;

    avail = gst_adapter_available (monoscope->adapter);
    GST_LOG_OBJECT (monoscope, "bytes avail now %u", avail);

    /* do negotiation if not done yet, so ->spf etc. is set */
    if (GST_PAD_CAPS (monoscope->srcpad) == NULL) {
      flow_ret = get_buffer (monoscope, &outbuf);
      if (flow_ret != GST_FLOW_OK)
        goto out;
      gst_buffer_unref (outbuf);
      outbuf = NULL;
    }

    bytesperframe = monoscope->spf * monoscope->bps;
    if (avail < bytesperframe)
      break;

    /* FIXME: something is wrong with QoS, we are skipping way too much
     * stuff even with very low CPU loads */
#if 0
    if (monoscope->next_ts != -1) {
      gboolean need_skip;
      gint64 qostime;

      qostime = gst_segment_to_running_time (&monoscope->segment,
          GST_FORMAT_TIME, monoscope->next_ts);

      GST_OBJECT_LOCK (monoscope);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip =
          GST_CLOCK_TIME_IS_VALID (monoscope->earliest_time) &&
          qostime <= monoscope->earliest_time;
      GST_OBJECT_UNLOCK (monoscope);

      if (need_skip) {
        GST_WARNING_OBJECT (monoscope,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (monoscope->earliest_time));
        goto skip;
      }
    }
#endif

    samples = (gint16 *) gst_adapter_peek (monoscope->adapter, bytesperframe);

    if (monoscope->spf < 512) {
      gint16 in_data[512], i;

      for (i = 0; i < 512; ++i) {
        gdouble off;

        off = ((gdouble) i * (gdouble) monoscope->spf) / 512.0;
        in_data[i] = samples[MIN ((guint) off, monoscope->spf)];
      }
      pixels = monoscope_update (monoscope->visstate, in_data);
    } else {
      /* not really correct, but looks much prettier */
      pixels = monoscope_update (monoscope->visstate, samples);
    }

    flow_ret = get_buffer (monoscope, &outbuf);
    if (flow_ret != GST_FLOW_OK)
      goto out;

    memcpy (GST_BUFFER_DATA (outbuf), pixels, monoscope->outsize);

    GST_BUFFER_TIMESTAMP (outbuf) = monoscope->next_ts;
    GST_BUFFER_DURATION (outbuf) = monoscope->frame_duration;

    flow_ret = gst_pad_push (monoscope->srcpad, outbuf);

#if 0
  skip:
#endif

    if (GST_CLOCK_TIME_IS_VALID (monoscope->next_ts))
      monoscope->next_ts += monoscope->frame_duration;

    gst_adapter_flush (monoscope->adapter, bytesperframe);
  }

out:

  return flow_ret;
}
static GstFlowReturn
gst_cc_extractor_handle_meta (GstCCExtractor * filter, GstBuffer * buf,
    GstVideoCaptionMeta * meta)
{
  GstBuffer *outbuf = NULL;
  GstEvent *event;
  gchar *captionid;
  GstFlowReturn flow;

  GST_DEBUG_OBJECT (filter, "Handling meta");

  /* Check if the meta type matches the configured one */
  if (filter->captionpad != NULL && meta->caption_type != filter->caption_type) {
    GST_ERROR_OBJECT (filter,
        "GstVideoCaptionMeta type changed, Not handled currently");
    flow = GST_FLOW_NOT_NEGOTIATED;
    goto out;
  }

  if (filter->captionpad == NULL) {
    GstCaps *caption_caps = NULL;
    GstEvent *stream_event;

    GST_DEBUG_OBJECT (filter, "Creating new caption pad");
    switch (meta->caption_type) {
      case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:
        caption_caps =
            gst_caps_from_string ("closedcaption/x-cea-608,format=(string)raw");
        break;
      case GST_VIDEO_CAPTION_TYPE_CEA608_IN_CEA708_RAW:
        caption_caps =
            gst_caps_from_string
            ("closedcaption/x-cea-608,format=(string)cc_data");
        break;
      case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:
        caption_caps =
            gst_caps_from_string
            ("closedcaption/x-cea-708,format=(string)cc_data");
        break;
      case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:
        caption_caps =
            gst_caps_from_string ("closedcaption/x-cea-708,format=(string)cdp");
        break;
      default:
        break;
    }
    if (caption_caps == NULL) {
      GST_ERROR_OBJECT (filter, "Unknown/invalid caption type");
      return GST_FLOW_NOT_NEGOTIATED;
    }

    /* Create the caption pad and set the caps */
    filter->captionpad =
        gst_pad_new_from_static_template (&captiontemplate, "caption");
    gst_pad_set_iterate_internal_links_function (filter->sinkpad,
        GST_DEBUG_FUNCPTR (gst_cc_extractor_iterate_internal_links));
    gst_pad_set_active (filter->captionpad, TRUE);
    gst_element_add_pad (GST_ELEMENT (filter), filter->captionpad);
    gst_flow_combiner_add_pad (filter->combiner, filter->captionpad);

    captionid =
        gst_pad_create_stream_id (filter->captionpad, (GstElement *) filter,
        "caption");
    stream_event = gst_event_new_stream_start (captionid);
    g_free (captionid);

    /* FIXME : Create a proper stream-id */
    if ((event =
            gst_pad_get_sticky_event (filter->srcpad, GST_EVENT_STREAM_START,
                0))) {
      guint group_id;
      if (gst_event_parse_group_id (event, &group_id))
        gst_event_set_group_id (stream_event, group_id);
      gst_event_unref (event);
    }
    gst_pad_push_event (filter->captionpad, stream_event);
    gst_pad_set_caps (filter->captionpad, caption_caps);
    gst_caps_unref (caption_caps);

    /* Carry over sticky events */
    if ((event =
            gst_pad_get_sticky_event (filter->srcpad, GST_EVENT_SEGMENT, 0)))
      gst_pad_push_event (filter->captionpad, event);
    if ((event = gst_pad_get_sticky_event (filter->srcpad, GST_EVENT_TAG, 0)))
      gst_pad_push_event (filter->captionpad, event);


    filter->caption_type = meta->caption_type;
  }

  GST_DEBUG_OBJECT (filter,
      "Creating new buffer of size %" G_GSIZE_FORMAT " bytes", meta->size);
  /* Extract caption data into new buffer with identical buffer timestamps */
  outbuf = gst_buffer_new_allocate (NULL, meta->size, NULL);
  gst_buffer_fill (outbuf, 0, meta->data, meta->size);
  GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buf);
  GST_BUFFER_DTS (outbuf) = GST_BUFFER_DTS (buf);
  GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf);

  /* We don't really care about the flow return */
  flow = gst_pad_push (filter->captionpad, outbuf);

out:
  /* Set flow return on pad and return combined value */
  return gst_flow_combiner_update_pad_flow (filter->combiner,
      filter->captionpad, flow);
}
static GstFlowReturn
gst_gdk_pixbuf_flush (GstGdkPixbuf * filter)
{
    GstBuffer *outbuf;
    GdkPixbuf *pixbuf;
    int y;
    guint8 *out_pix;
    guint8 *in_pix;
    int in_rowstride;
    GstFlowReturn ret;
    GstCaps *caps = NULL;
    gint n_channels;

    pixbuf = gdk_pixbuf_loader_get_pixbuf (filter->pixbuf_loader);
    if (pixbuf == NULL)
        goto no_pixbuf;

    if (filter->image_size == 0) {
        filter->width = gdk_pixbuf_get_width (pixbuf);
        filter->height = gdk_pixbuf_get_height (pixbuf);
        filter->rowstride = gdk_pixbuf_get_rowstride (pixbuf);
        filter->image_size = filter->rowstride * filter->height;

        n_channels = gdk_pixbuf_get_n_channels (pixbuf);
        switch (n_channels) {
        case 3:
            caps = gst_caps_from_string (GST_VIDEO_CAPS_RGB);
            break;
        case 4:
            caps = gst_caps_from_string (GST_VIDEO_CAPS_RGBA);
            break;
        default:
            goto channels_not_supported;
        }

        gst_caps_set_simple (caps,
                             "width", G_TYPE_INT, filter->width,
                             "height", G_TYPE_INT, filter->height,
                             "framerate", GST_TYPE_FRACTION, filter->framerate_numerator,
                             filter->framerate_denominator, NULL);

        GST_DEBUG ("Set size to %dx%d", filter->width, filter->height);
        gst_pad_set_caps (filter->srcpad, caps);
        gst_caps_unref (caps);
    }

    ret = gst_pad_alloc_buffer_and_set_caps (filter->srcpad,
            GST_BUFFER_OFFSET_NONE,
            filter->image_size, GST_PAD_CAPS (filter->srcpad), &outbuf);

    if (ret != GST_FLOW_OK)
        goto no_buffer;

    GST_BUFFER_TIMESTAMP (outbuf) = filter->last_timestamp;
    GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;

    in_pix = gdk_pixbuf_get_pixels (pixbuf);
    in_rowstride = gdk_pixbuf_get_rowstride (pixbuf);
    out_pix = GST_BUFFER_DATA (outbuf);

    /* FIXME, last line might not have rowstride pixels */
    for (y = 0; y < filter->height; y++) {
        memcpy (out_pix, in_pix, filter->rowstride);
        in_pix += in_rowstride;
        out_pix += filter->rowstride;
    }

    GST_DEBUG ("pushing... %d bytes", GST_BUFFER_SIZE (outbuf));
    ret = gst_pad_push (filter->srcpad, outbuf);

    if (ret != GST_FLOW_OK)
        GST_DEBUG_OBJECT (filter, "flow: %s", gst_flow_get_name (ret));

    return ret;

    /* ERRORS */
no_pixbuf:
    {
        GST_ELEMENT_ERROR (filter, STREAM, DECODE, (NULL), ("error geting pixbuf"));
        return GST_FLOW_ERROR;
    }
channels_not_supported:
    {
        GST_ELEMENT_ERROR (filter, STREAM, DECODE, (NULL),
                           ("%d channels not supported", n_channels));
        return GST_FLOW_ERROR;
    }
no_buffer:
    {
        GST_DEBUG ("Failed to create outbuffer - %s", gst_flow_get_name (ret));
        return ret;
    }
}
static GstFlowReturn
gst_wavpack_parse_push_buffer (GstWavpackParse * wvparse, GstBuffer * buf,
    WavpackHeader * header)
{
  GstFlowReturn ret;

  wvparse->current_offset += header->ckSize + 8;
  wvparse->segment.last_stop = header->block_index;

  if (wvparse->need_newsegment) {
    if (gst_wavpack_parse_send_newsegment (wvparse, FALSE))
      wvparse->need_newsegment = FALSE;
  }

  /* send any queued events */
  if (wvparse->queued_events) {
    GList *l;

    for (l = wvparse->queued_events; l != NULL; l = l->next) {
      gst_pad_push_event (wvparse->srcpad, GST_EVENT (l->data));
    }
    g_list_free (wvparse->queued_events);
    wvparse->queued_events = NULL;
  }

  if (wvparse->pending_buffer == NULL) {
    wvparse->pending_buffer = buf;
    wvparse->pending_offset = header->block_index;
  } else if (wvparse->pending_offset == header->block_index) {
    wvparse->pending_buffer = gst_buffer_join (wvparse->pending_buffer, buf);
  } else {
    GST_ERROR ("Got incomplete block, dropping");
    gst_buffer_unref (wvparse->pending_buffer);
    wvparse->pending_buffer = buf;
    wvparse->pending_offset = header->block_index;
  }

  if (!(header->flags & FINAL_BLOCK))
    return GST_FLOW_OK;

  buf = wvparse->pending_buffer;
  wvparse->pending_buffer = NULL;

  GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (header->block_index,
      GST_SECOND, wvparse->samplerate);
  GST_BUFFER_DURATION (buf) = gst_util_uint64_scale_int (header->block_samples,
      GST_SECOND, wvparse->samplerate);
  GST_BUFFER_OFFSET (buf) = header->block_index;
  GST_BUFFER_OFFSET_END (buf) = header->block_index + header->block_samples;

  if (wvparse->discont || wvparse->next_block_index != header->block_index) {
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
    wvparse->discont = FALSE;
  }

  wvparse->next_block_index = header->block_index + header->block_samples;

  gst_buffer_set_caps (buf, GST_PAD_CAPS (wvparse->srcpad));

  GST_LOG_OBJECT (wvparse, "Pushing buffer with time %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

  ret = gst_pad_push (wvparse->srcpad, buf);

  wvparse->segment.last_stop = wvparse->next_block_index;

  return ret;
}
static void
gst_cdxa_parse_loop (GstPad * sinkpad)
{
  GstFlowReturn flow_ret;
  GstCDXAParse *cdxa;
  GstBuffer *buf = NULL;
  gint sync_offset = -1;

  cdxa = GST_CDXA_PARSE (GST_PAD_PARENT (sinkpad));

  if (cdxa->datasize <= 0) {
    GstFormat format = GST_FORMAT_BYTES;
    GstPad *peer;

    if ((peer = gst_pad_get_peer (sinkpad))) {
      if (!gst_pad_query_duration (peer, &format, &cdxa->datasize)) {
        GST_DEBUG_OBJECT (cdxa, "Failed to query upstream size!");
        gst_object_unref (peer);
        goto pause;
      }
      gst_object_unref (peer);
    }
    GST_DEBUG_OBJECT (cdxa, "Upstream size: %" G_GINT64_FORMAT, cdxa->datasize);
  }

  do {
    guint req;

    req = 8 + GST_CDXA_SECTOR_SIZE;     /* riff chunk header = 8 bytes */

    buf = NULL;
    flow_ret = gst_pad_pull_range (cdxa->sinkpad, cdxa->offset, req, &buf);

    if (flow_ret != GST_FLOW_OK) {
      GST_DEBUG_OBJECT (cdxa, "Pull flow: %s", gst_flow_get_name (flow_ret));
      goto pause;
    }

    if (GST_BUFFER_SIZE (buf) < req) {
      GST_DEBUG_OBJECT (cdxa, "Short read, only got %u/%u bytes",
          GST_BUFFER_SIZE (buf), req);
      goto eos;
    }

    sync_offset = gst_cdxa_parse_sync (buf);
    gst_buffer_unref (buf);
    buf = NULL;

    if (sync_offset >= 0)
      break;

    cdxa->offset += req;
    cdxa->bytes_skipped += req;
  } while (1);

  cdxa->offset += sync_offset;
  cdxa->bytes_skipped += sync_offset;

  /* first sync frame? */
  if (cdxa->datastart < 0) {
    GST_LOG_OBJECT (cdxa, "datastart=0x%" G_GINT64_MODIFIER "x", cdxa->offset);
    cdxa->datastart = cdxa->offset;
    cdxa->bytes_skipped = 0;
    cdxa->bytes_sent = 0;
  }

  GST_DEBUG_OBJECT (cdxa, "pulling buffer at offset 0x%" G_GINT64_MODIFIER "x",
      cdxa->offset);

  buf = NULL;
  flow_ret = gst_pad_pull_range (cdxa->sinkpad, cdxa->offset,
      GST_CDXA_SECTOR_SIZE, &buf);

  if (flow_ret != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (cdxa, "Flow: %s", gst_flow_get_name (flow_ret));
    goto pause;
  }

  if (GST_BUFFER_SIZE (buf) < GST_CDXA_SECTOR_SIZE) {
    GST_DEBUG_OBJECT (cdxa, "Short read, only got %u/%u bytes",
        GST_BUFFER_SIZE (buf), GST_CDXA_SECTOR_SIZE);
    goto eos;
  }

  buf = gst_cdxa_parse_strip (buf);

  GST_DEBUG_OBJECT (cdxa, "pushing buffer %p", buf);
  gst_buffer_set_caps (buf, GST_PAD_CAPS (cdxa->srcpad));

  cdxa->offset += GST_BUFFER_SIZE (buf);
  cdxa->bytes_sent += GST_BUFFER_SIZE (buf);

  flow_ret = gst_pad_push (cdxa->srcpad, buf);
  if (flow_ret != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (cdxa, "Push flow: %s", gst_flow_get_name (flow_ret));
    goto pause;
  }

  return;

eos:
  {
    GST_DEBUG_OBJECT (cdxa, "Sending EOS");
    if (buf)
      gst_buffer_unref (buf);
    buf = NULL;
    gst_pad_push_event (cdxa->srcpad, gst_event_new_eos ());
    /* fallthrough */
  }
pause:
  {
    GST_DEBUG_OBJECT (cdxa, "Pausing");
    gst_pad_pause_task (cdxa->sinkpad);
    return;
  }
}
static void
src_task_loop (GstPad * pad)
{
  GstDtlsEnc *self = GST_DTLS_ENC (GST_PAD_PARENT (pad));
  GstFlowReturn ret;
  GstBuffer *buffer;
  gboolean check_connection_timeout = FALSE;

  GST_TRACE_OBJECT (self, "src loop: acquiring lock");
  g_mutex_lock (&self->queue_lock);
  GST_TRACE_OBJECT (self, "src loop: acquired lock");

  if (self->flushing) {
    GST_LOG_OBJECT (self, "src task loop entered on inactive pad");
    GST_TRACE_OBJECT (self, "src loop: releasing lock");
    g_mutex_unlock (&self->queue_lock);
    return;
  }

  while (g_queue_is_empty (&self->queue)) {
    GST_TRACE_OBJECT (self, "src loop: queue empty, waiting for add");
    g_cond_wait (&self->queue_cond_add, &self->queue_lock);
    GST_TRACE_OBJECT (self, "src loop: add signaled");

    if (self->flushing) {
      GST_LOG_OBJECT (self, "pad inactive, task returning");
      GST_TRACE_OBJECT (self, "src loop: releasing lock");
      g_mutex_unlock (&self->queue_lock);
      return;
    }
  }
  GST_TRACE_OBJECT (self, "src loop: queue has element");

  buffer = g_queue_pop_head (&self->queue);
  g_mutex_unlock (&self->queue_lock);

  if (self->send_initial_events) {
    GstSegment segment;
    gchar s_id[32];
    GstCaps *caps;

    self->send_initial_events = FALSE;

    g_snprintf (s_id, sizeof (s_id), "dtlsenc-%08x", g_random_int ());
    gst_pad_push_event (self->src, gst_event_new_stream_start (s_id));
    caps = gst_caps_new_empty_simple ("application/x-dtls");
    gst_pad_push_event (self->src, gst_event_new_caps (caps));
    gst_caps_unref (caps);
    gst_segment_init (&segment, GST_FORMAT_BYTES);
    gst_pad_push_event (self->src, gst_event_new_segment (&segment));
    check_connection_timeout = TRUE;
  }

  GST_TRACE_OBJECT (self, "src loop: releasing lock");

  ret = gst_pad_push (self->src, buffer);
  if (check_connection_timeout)
    gst_dtls_connection_check_timeout (self->connection);

  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_WARNING_OBJECT (self, "failed to push buffer on src pad: %s",
        gst_flow_get_name (ret));
  }
}
static void
gst_droid_cam_src_vfsrc_loop (gpointer data)
{
  GstPad *pad = (GstPad *) data;
  GstDroidCamSrc *src = GST_DROID_CAM_SRC (GST_OBJECT_PARENT (pad));
  GstDroidCamSrcClass *klass = GST_DROID_CAM_SRC_GET_CLASS (src);
  GstCameraBufferPool *pool = gst_camera_buffer_pool_ref (src->pool);
  GstNativeBuffer *buff;
  GstFlowReturn ret;
  GList *events = NULL;

  GST_LOG_OBJECT (src, "loop");

  GST_CAMERA_BUFFER_POOL_LOCK (pool);

  if (pool->flushing) {

    GST_CAMERA_BUFFER_POOL_UNLOCK (pool);

    goto pool_flushing;
  }

  GST_CAMERA_BUFFER_POOL_UNLOCK (pool);

  g_mutex_lock (&pool->app_lock);
  if (pool->app_queue->length > 0) {
    buff = g_queue_pop_head (pool->app_queue);

    g_mutex_unlock (&pool->app_lock);

    goto push_buffer;
  }

  GST_LOG_OBJECT (src, "empty app queue. waiting for buffer");
  g_cond_wait (&pool->app_cond, &pool->app_lock);
  GST_LOG_OBJECT (src, "done waiting for buffer");

  if (pool->app_queue->length == 0) {
    /* pool is flushing. */
    g_mutex_unlock (&pool->app_lock);

    goto pool_flushing;
  }

  buff = g_queue_pop_head (pool->app_queue);

  g_mutex_unlock (&pool->app_lock);
  goto push_buffer;

pool_flushing:
  /* pause task. */
  gst_camera_buffer_pool_unref (src->pool);

  GST_DEBUG_OBJECT (src, "pool is flushing. pausing task");
  gst_pad_pause_task (pad);
  return;

push_buffer:
  /* push buffer */
  gst_camera_buffer_pool_unref (src->pool);
  if (G_UNLIKELY (src->send_new_segment)) {
    GST_DEBUG_OBJECT (src, "sending new segment event");

    if (!klass->open_segment (src, src->vfsrc)) {
      GST_WARNING_OBJECT (src, "failed to push new segment");
    }

    src->send_new_segment = FALSE;
  }

  GST_OBJECT_LOCK (src);

  if (src->events) {
    events = src->events;
    src->events = NULL;
  }

  GST_OBJECT_UNLOCK (src);

  while (events) {
    GstEvent *ev = g_list_nth_data (events, 0);
    events = g_list_remove (events, ev);
    GST_DEBUG_OBJECT (src, "pushed event %" GST_PTR_FORMAT, ev);

    gst_pad_push_event (src->vfsrc, ev);
  }

  g_list_free (events);

  klass->update_segment (src, GST_BUFFER (buff));

  GST_LOG_OBJECT (src, "pushing buffer %p", buff);
  ret = gst_pad_push (pad, GST_BUFFER (buff));

  if (ret != GST_FLOW_OK) {
    goto pause;
  }

  return;

pause:
  GST_DEBUG_OBJECT (src, "pausing task. reason: %s", gst_flow_get_name (ret));
  gst_pad_pause_task (pad);

  if (ret == GST_FLOW_UNEXPECTED) {
    /* perform EOS */
    gst_pad_push_event (pad, gst_event_new_eos ());
  } else if (ret == GST_FLOW_NOT_LINKED || ret <= GST_FLOW_UNEXPECTED) {
    GST_ELEMENT_ERROR (src, STREAM, FAILED,
        ("Internal data flow error."),
        ("streaming task paused, reason %s (%d)", gst_flow_get_name (ret),
            ret));

    /* perform EOS */
    gst_pad_push_event (pad, gst_event_new_eos ());
  }
}
static GstFlowReturn
gst_tta_dec_chain (GstPad * pad, GstBuffer * in)
{
  GstTtaDec *ttadec;
  GstBuffer *outbuf, *buf = GST_BUFFER (in);
  guchar *data, *p;
  decoder *dec;
  unsigned long outsize;
  unsigned long size;
  guint32 frame_samples;
  long res;
  long *prev;

  ttadec = GST_TTA_DEC (GST_OBJECT_PARENT (pad));

  data = GST_BUFFER_DATA (buf);
  size = GST_BUFFER_SIZE (buf);

  ttadec->tta_buf.bit_count = 0;
  ttadec->tta_buf.bit_cache = 0;
  ttadec->tta_buf.bitpos = ttadec->tta_buf.buffer_end;
  ttadec->tta_buf.offset = 0;
  decoder_init (ttadec->tta, ttadec->channels, ttadec->bytes);

  if (GST_BUFFER_DURATION_IS_VALID (buf)) {
    frame_samples =
        ceil ((gdouble) (GST_BUFFER_DURATION (buf) * ttadec->samplerate) /
        (gdouble) GST_SECOND);
  } else {
    frame_samples = ttadec->samplerate * FRAME_TIME;
  }
  outsize = ttadec->channels * frame_samples * ttadec->bytes;

  dec = ttadec->tta;
  p = ttadec->decdata;
  prev = ttadec->cache;
  for (res = 0;
      p < ttadec->decdata + frame_samples * ttadec->channels * ttadec->bytes;) {
    unsigned long unary, binary, depth, k;
    long value, temp_value;
    fltst *fst = &dec->fst;
    adapt *rice = &dec->rice;
    long *last = &dec->last;

    // decode Rice unsigned
    get_unary (&ttadec->tta_buf, data, size, &unary);

    switch (unary) {
      case 0:
        depth = 0;
        k = rice->k0;
        break;
      default:
        depth = 1;
        k = rice->k1;
        unary--;
    }

    if (k) {
      get_binary (&ttadec->tta_buf, data, size, &binary, k);
      value = (unary << k) + binary;
    } else
      value = unary;

    switch (depth) {
      case 1:
        rice->sum1 += value - (rice->sum1 >> 4);
        if (rice->k1 > 0 && rice->sum1 < shift_16[rice->k1])
          rice->k1--;
        else if (rice->sum1 > shift_16[rice->k1 + 1])
          rice->k1++;
        value += bit_shift[rice->k0];
      default:
        rice->sum0 += value - (rice->sum0 >> 4);
        if (rice->k0 > 0 && rice->sum0 < shift_16[rice->k0])
          rice->k0--;
        else if (rice->sum0 > shift_16[rice->k0 + 1])
          rice->k0++;
    }

    /* this only uses a temporary variable to silence a gcc warning */
    temp_value = DEC (value);
    value = temp_value;

    // decompress stage 1: adaptive hybrid filter
    hybrid_filter (fst, &value);

    // decompress stage 2: fixed order 1 prediction
    switch (ttadec->bytes) {
      case 1:
        value += PREDICTOR1 (*last, 4);
        break;                  // bps 8
      case 2:
        value += PREDICTOR1 (*last, 5);
        break;                  // bps 16
      case 3:
        value += PREDICTOR1 (*last, 5);
        break;                  // bps 24
      case 4:
        value += *last;
        break;                  // bps 32
    }
    *last = value;

    if (dec < ttadec->tta + ttadec->channels - 1) {
      *prev++ = value;
      dec++;
    } else {
      *prev = value;
      if (ttadec->channels > 1) {
        long *r = prev - 1;

        for (*prev += *r / 2; r >= ttadec->cache; r--)
          *r = *(r + 1) - *r;
        for (r = ttadec->cache; r < prev; r++)
          WRITE_BUFFER (r, ttadec->bytes, p);
      }
      WRITE_BUFFER (prev, ttadec->bytes, p);
      prev = ttadec->cache;
      res++;
      dec = ttadec->tta;
    }
  }

  outbuf = gst_buffer_new_and_alloc (outsize);
  memcpy (GST_BUFFER_DATA (outbuf), ttadec->decdata, outsize);
  GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
  GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf);
  gst_buffer_set_caps (outbuf, GST_PAD_CAPS (ttadec->srcpad));
  return gst_pad_push (ttadec->srcpad, outbuf);
}
static GstFlowReturn
gst_webvtt_enc_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstWebvttEnc *webvttenc = GST_WEBVTT_ENC (parent);
  GstClockTime ts, dur = GST_SECOND;
  GstBuffer *new_buffer;
  GstMapInfo map_info;
  GstFlowReturn ret;
  GString *s;
  gsize buf_size;

  if (!webvttenc->pushed_header) {
    const char *header = "WEBVTT\n\n";

    new_buffer = gst_buffer_new_wrapped (g_strdup (header), strlen (header));

    GST_BUFFER_PTS (new_buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_DURATION (new_buffer) = GST_CLOCK_TIME_NONE;

    ret = gst_pad_push (webvttenc->srcpad, new_buffer);

    if (ret != GST_FLOW_OK)
      goto out;

    webvttenc->pushed_header = TRUE;
  }

  gst_object_sync_values (GST_OBJECT (webvttenc), GST_BUFFER_PTS (buf));

  ts = GST_BUFFER_PTS (buf) + webvttenc->timestamp;
  if (GST_BUFFER_DURATION_IS_VALID (buf))
    dur = GST_BUFFER_DURATION (buf) + webvttenc->duration;
  else if (webvttenc->duration > 0)
    dur = webvttenc->duration;
  else
    dur = GST_SECOND;

  buf_size = gst_buffer_get_size (buf);
  s = g_string_sized_new (50 + buf_size + 1 + 1);

  /* start_time --> end_time */
  gst_webvtt_enc_append_timestamp_to_string (ts, s);
  g_string_append_printf (s, " --> ");
  gst_webvtt_enc_append_timestamp_to_string (ts + dur, s);
  g_string_append_c (s, '\n');

  /* text */
  if (gst_buffer_map (buf, &map_info, GST_MAP_READ)) {
    g_string_append_len (s, (const gchar *) map_info.data, map_info.size);
    gst_buffer_unmap (buf, &map_info);
  }

  g_string_append_c (s, '\n');

  buf_size = s->len;
  new_buffer = gst_buffer_new_wrapped (g_string_free (s, FALSE), buf_size);

  GST_BUFFER_TIMESTAMP (new_buffer) = GST_BUFFER_TIMESTAMP (buf);
  GST_BUFFER_DURATION (new_buffer) = GST_BUFFER_DURATION (buf);

  ret = gst_pad_push (webvttenc->srcpad, new_buffer);

out:

  gst_buffer_unref (buf);

  return ret;
}
Exemple #25
0
static GstFlowReturn
gst_xvidenc_chain (GstPad * pad, GstBuffer * buf)
{
  GstXvidEnc *xvidenc = GST_XVIDENC (GST_PAD_PARENT (pad));
  GstBuffer *outbuf;
  xvid_enc_frame_t xframe;

  const gint motion_presets[] = {
    0, 0, 0, 0,
    XVID_ME_HALFPELREFINE16,
    XVID_ME_HALFPELREFINE16 | XVID_ME_ADVANCEDDIAMOND16,
    XVID_ME_HALFPELREFINE16 | XVID_ME_EXTSEARCH16
        | XVID_ME_HALFPELREFINE8 | XVID_ME_USESQUARES16
  };

  if (!xvidenc->handle) {
    GST_ELEMENT_ERROR (xvidenc, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated before chain function"));
    gst_buffer_unref (buf);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  GST_DEBUG_OBJECT (xvidenc,
      "Received buffer of time %" GST_TIME_FORMAT ", size %d",
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_BUFFER_SIZE (buf));

  if (xvidenc->xframe_cache)
    memcpy (&xframe, xvidenc->xframe_cache, sizeof (xframe));
  else {                        /* need to build some inital xframe to be cached */
    /* encode and so ... */
    gst_xvid_init_struct (xframe);

    if (xvidenc->par_width == xvidenc->par_height)
      xframe.par = XVID_PAR_11_VGA;
    else {
      xframe.par = XVID_PAR_EXT;
      xframe.par_width = xvidenc->par_width;
      xframe.par_height = xvidenc->par_height;
    }

    /* handle options */
    xframe.vol_flags |= xvidenc->quant_type;
    xframe.vop_flags = XVID_VOP_HALFPEL;
    xframe.motion = motion_presets[xvidenc->motion];

    if (xvidenc->me_chroma) {
      xframe.motion |= XVID_ME_CHROMA_PVOP;
      xframe.motion |= XVID_ME_CHROMA_BVOP;
    }

    if (xvidenc->me_vhq >= 1) {
      xframe.vop_flags |= XVID_VOP_MODEDECISION_RD;
    }
    if (xvidenc->me_vhq >= 2) {
      xframe.motion |= XVID_ME_HALFPELREFINE16_RD;
      xframe.motion |= XVID_ME_QUARTERPELREFINE16_RD;
    }
    if (xvidenc->me_vhq >= 3) {
      xframe.motion |= XVID_ME_HALFPELREFINE8_RD;
      xframe.motion |= XVID_ME_QUARTERPELREFINE8_RD;
      xframe.motion |= XVID_ME_CHECKPREDICTION_RD;
    }
    if (xvidenc->me_vhq >= 4) {
      xframe.motion |= XVID_ME_EXTSEARCH_RD;
    }

    /* no motion estimation, then only intra */
    if (xvidenc->motion == 0) {
      xframe.type = XVID_TYPE_IVOP;
    } else {
      xframe.type = XVID_TYPE_AUTO;
    }

    if (xvidenc->motion > 4) {
      xframe.vop_flags |= XVID_VOP_INTER4V;
    }

    if (xvidenc->me_quarterpel) {
      xframe.vol_flags |= XVID_VOL_QUARTERPEL;
      xframe.motion |= XVID_ME_QUARTERPELREFINE16;
      xframe.motion |= XVID_ME_QUARTERPELREFINE8;
    }

    if (xvidenc->gmc) {
      xframe.vol_flags |= XVID_VOL_GMC;
      xframe.motion |= XVID_ME_GME_REFINE;
    }

    if (xvidenc->interlaced) {
      xframe.vol_flags |= XVID_VOL_INTERLACING;
    }

    if (xvidenc->trellis) {
      xframe.vop_flags |= XVID_VOP_TRELLISQUANT;
    }

    if (xvidenc->hqacpred) {
      xframe.vop_flags |= XVID_VOP_HQACPRED;
    }

    if (xvidenc->greyscale) {
      xframe.vop_flags |= XVID_VOP_GREYSCALE;
    }

    if (xvidenc->cartoon) {
      xframe.vop_flags |= XVID_VOP_CARTOON;
      xframe.motion |= XVID_ME_DETECT_STATIC_MOTION;
    }

    xframe.bframe_threshold = xvidenc->bframe_threshold;
    xframe.input.csp = xvidenc->csp;

    /* save in cache */
    xvidenc->xframe_cache = g_memdup (&xframe, sizeof (xframe));
  }

  outbuf = gst_xvidenc_encode (xvidenc, buf, xframe);

  if (!outbuf)                  /* error or no data yet */
    return GST_FLOW_OK;

  /* go out, multiply! */
  return gst_pad_push (xvidenc->srcpad, outbuf);
}
static GstFlowReturn
gst_rtp_ssrc_demux_rtcp_chain (GstPad * pad, GstBuffer * buf)
{
  GstFlowReturn ret;
  GstRtpSsrcDemux *demux;
  guint32 ssrc;
  GstRtpSsrcDemuxPad *dpad;
  GstRTCPPacket packet;

  demux = GST_RTP_SSRC_DEMUX (GST_OBJECT_PARENT (pad));

  if (!gst_rtcp_buffer_validate (buf))
    goto invalid_rtcp;

  if (!gst_rtcp_buffer_get_first_packet (buf, &packet))
    goto invalid_rtcp;

  /* first packet must be SR or RR or else the validate would have failed */
  switch (gst_rtcp_packet_get_type (&packet)) {
    case GST_RTCP_TYPE_SR:
      /* get the ssrc so that we can route it to the right source pad */
      gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, NULL, NULL, NULL,
          NULL);
      break;
    default:
      goto unexpected_rtcp;
  }

  GST_DEBUG_OBJECT (demux, "received RTCP of SSRC %08x", ssrc);

  GST_PAD_LOCK (demux);
  dpad = find_demux_pad_for_ssrc (demux, ssrc);
  if (dpad == NULL) {
    GST_DEBUG_OBJECT (demux, "creating pad for SSRC %08x", ssrc);
    if (!(dpad = create_demux_pad_for_ssrc (demux, ssrc, -1)))
      goto create_failed;
  }
  GST_PAD_UNLOCK (demux);

  /* push to srcpad */
  ret = gst_pad_push (dpad->rtcp_pad, buf);

  return ret;

  /* ERRORS */
invalid_rtcp:
  {
    /* this is fatal and should be filtered earlier */
    GST_ELEMENT_ERROR (demux, STREAM, DECODE, (NULL),
        ("Dropping invalid RTCP packet"));
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
unexpected_rtcp:
  {
    GST_DEBUG_OBJECT (demux, "dropping unexpected RTCP packet");
    gst_buffer_unref (buf);
    return GST_FLOW_OK;
  }
create_failed:
  {
    GST_ELEMENT_ERROR (demux, STREAM, DECODE, (NULL),
        ("Could not create new pad"));
    GST_PAD_UNLOCK (demux);
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
}
static GstFlowReturn
gst_mulawenc_chain (GstPad * pad, GstBuffer * buffer)
{
  GstMuLawEnc *mulawenc;
  gint16 *linear_data;
  guint linear_size;
  guint8 *mulaw_data;
  guint mulaw_size;
  GstBuffer *outbuf;
  GstFlowReturn ret;
  GstClockTime timestamp, duration;

  mulawenc = GST_MULAWENC (gst_pad_get_parent (pad));

  if (!mulawenc->rate || !mulawenc->channels)
    goto not_negotiated;

  linear_data = (gint16 *) GST_BUFFER_DATA (buffer);
  linear_size = GST_BUFFER_SIZE (buffer);

  mulaw_size = linear_size / 2;

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);

  ret = gst_pad_alloc_buffer_and_set_caps (mulawenc->srcpad,
      GST_BUFFER_OFFSET_NONE, mulaw_size, GST_PAD_CAPS (mulawenc->srcpad),
      &outbuf);
  if (ret != GST_FLOW_OK)
    goto alloc_failed;

  if (duration == -1) {
    duration = gst_util_uint64_scale_int (mulaw_size,
        GST_SECOND, mulawenc->rate * mulawenc->channels);
  }

  if (GST_BUFFER_SIZE (outbuf) < mulaw_size) {
    /* pad-alloc can suggest a smaller size */
    gst_buffer_unref (outbuf);
    outbuf = gst_buffer_new_and_alloc (mulaw_size);
  }

  mulaw_data = (guint8 *) GST_BUFFER_DATA (outbuf);

  /* copy discont flag */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);

  GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
  GST_BUFFER_DURATION (outbuf) = duration;

  gst_buffer_set_caps (outbuf, GST_PAD_CAPS (mulawenc->srcpad));

  mulaw_encode (linear_data, mulaw_data, mulaw_size);

  gst_buffer_unref (buffer);

  ret = gst_pad_push (mulawenc->srcpad, outbuf);

done:
  gst_object_unref (mulawenc);

  return ret;

not_negotiated:
  {
    GST_DEBUG_OBJECT (mulawenc, "no format negotiated");
    ret = GST_FLOW_NOT_NEGOTIATED;
    gst_buffer_unref (buffer);
    goto done;
  }
alloc_failed:
  {
    GST_DEBUG_OBJECT (mulawenc, "pad alloc failed");
    gst_buffer_unref (buffer);
    goto done;
  }
}
Exemple #28
0
static void
spc_play (GstPad * pad)
{
  GstSpcDec *spc = GST_SPC_DEC (gst_pad_get_parent (pad));
  GstFlowReturn flow_return;
  GstBuffer *out;
  gboolean seeking = spc->seeking;
  gint64 duration, fade, end, position;

  if (!seeking) {
    out = gst_buffer_new_and_alloc (1600 * 4);
    gst_buffer_set_caps (out, GST_PAD_CAPS (pad));
    GST_BUFFER_TIMESTAMP (out) =
        (gint64) gst_util_uint64_scale ((guint64) spc->byte_pos, GST_SECOND,
        32000 * 2 * 2);
    spc->byte_pos += OSPC_Run (-1, (short *) GST_BUFFER_DATA (out), 1600 * 4);
  } else {
    if (spc->seekpoint < spc->byte_pos) {
      OSPC_Init (GST_BUFFER_DATA (spc->buf), GST_BUFFER_SIZE (spc->buf));
      spc->byte_pos = 0;
    }
    spc->byte_pos += OSPC_Run (-1, NULL, 1600 * 4);
    if (spc->byte_pos >= spc->seekpoint) {
      spc->seeking = FALSE;
    }
    out = gst_buffer_new ();
    gst_buffer_set_caps (out, GST_PAD_CAPS (pad));
  }

  duration = gst_spc_duration (spc);
  fade = gst_spc_fadeout (spc);
  end = duration + fade;
  position =
      (gint64) gst_util_uint64_scale ((guint64) spc->byte_pos, GST_SECOND,
      32000 * 2 * 2);

  if (position >= duration) {
    gint16 *data = (gint16 *) GST_BUFFER_DATA (out);
    guint32 size = GST_BUFFER_SIZE (out) / sizeof (gint16);
    unsigned int i;

    gint64 num = (fade - (position - duration));

    for (i = 0; i < size; i++) {
      /* Apply a parabolic volume envelope */
      data[i] = (gint16) (data[i] * num / fade * num / fade);
    }
  }

  if ((flow_return = gst_pad_push (spc->srcpad, out)) != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (spc, "pausing task, reason %s",
        gst_flow_get_name (flow_return));

    gst_pad_pause_task (pad);

    if (flow_return <= GST_FLOW_UNEXPECTED
        || flow_return == GST_FLOW_NOT_LINKED) {
      gst_pad_push_event (pad, gst_event_new_eos ());
    }
  }

  if (position >= end) {
    gst_pad_pause_task (pad);
    gst_pad_push_event (pad, gst_event_new_eos ());
  }

  gst_object_unref (spc);

  return;
}
Exemple #29
0
static GstFlowReturn
test_injector_chain (GstPad * pad, GstBuffer * buf)
{
  GstFlowReturn ret;
  GstPad *srcpad;

  srcpad = gst_element_get_pad (GST_ELEMENT (GST_PAD_PARENT (pad)), "src");

  /* since we're increasing timestamp/offsets, push this one first */
  GST_LOG (" passing buffer   [t=%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT
      "], offset=%" G_GINT64_FORMAT ", offset_end=%" G_GINT64_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
      GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf));

  gst_buffer_ref (buf);

  ret = gst_pad_push (srcpad, buf);

  if (g_random_double () < injector_inject_probability) {
    GstBuffer *ibuf;

    ibuf = gst_buffer_copy (buf);

    if (GST_BUFFER_OFFSET_IS_VALID (buf) &&
        GST_BUFFER_OFFSET_END_IS_VALID (buf)) {
      guint64 delta;

      delta = GST_BUFFER_OFFSET_END (buf) - GST_BUFFER_OFFSET (buf);
      GST_BUFFER_OFFSET (ibuf) += delta / 4;
      GST_BUFFER_OFFSET_END (ibuf) += delta / 4;
    } else {
      GST_BUFFER_OFFSET (ibuf) = GST_BUFFER_OFFSET_NONE;
      GST_BUFFER_OFFSET_END (ibuf) = GST_BUFFER_OFFSET_NONE;
    }

    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf) &&
        GST_BUFFER_DURATION_IS_VALID (buf)) {
      GstClockTime delta;

      delta = GST_BUFFER_DURATION (buf);
      GST_BUFFER_TIMESTAMP (ibuf) += delta / 4;
    } else {
      GST_BUFFER_TIMESTAMP (ibuf) = GST_CLOCK_TIME_NONE;
      GST_BUFFER_DURATION (ibuf) = GST_CLOCK_TIME_NONE;
    }

    if (GST_BUFFER_TIMESTAMP_IS_VALID (ibuf) ||
        GST_BUFFER_OFFSET_IS_VALID (ibuf)) {
      GST_LOG ("injecting buffer [t=%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT
          "], offset=%" G_GINT64_FORMAT ", offset_end=%" G_GINT64_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (ibuf)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (ibuf) +
              GST_BUFFER_DURATION (ibuf)), GST_BUFFER_OFFSET (ibuf),
          GST_BUFFER_OFFSET_END (ibuf));

      if (gst_pad_push (srcpad, ibuf) != GST_FLOW_OK) {
        /* ignore return value */
      }
    } else {
      GST_WARNING ("couldn't inject buffer, no incoming timestamps or offsets");
      gst_buffer_unref (ibuf);
    }
  }

  gst_buffer_unref (buf);

  return ret;
}
Exemple #30
0
static GstFlowReturn
gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
    GstVideoFrame * frame)
{
  GstVP8Enc *encoder;
  GstBuffer *buf;
  const GstVideoState *state;
  GstFlowReturn ret;
  GstVP8EncCoderHook *hook = frame->coder_hook;
  GList *l;
  gint inv_count;

  GST_DEBUG_OBJECT (base_video_encoder, "shape_output");

  encoder = GST_VP8_ENC (base_video_encoder);

  state = gst_base_video_encoder_get_state (base_video_encoder);

  g_assert (hook != NULL);

  for (inv_count = 0, l = hook->invisible; l; inv_count++, l = l->next) {
    buf = l->data;
    l->data = NULL;

    if (l == hook->invisible && frame->is_sync_point) {
      GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
      encoder->keyframe_distance = 0;
    } else {
      GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
      encoder->keyframe_distance++;
    }

    GST_BUFFER_OFFSET_END (buf) =
        _to_granulepos (frame->presentation_frame_number + 1,
        inv_count, encoder->keyframe_distance);
    GST_BUFFER_OFFSET (buf) =
        gst_util_uint64_scale (frame->presentation_frame_number + 1,
        GST_SECOND * state->fps_d, state->fps_n);

    gst_buffer_set_caps (buf, GST_BASE_VIDEO_CODEC (base_video_encoder)->caps);
    ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);

    if (ret != GST_FLOW_OK) {
      GST_WARNING_OBJECT (encoder, "flow error %d", ret);
      goto done;
    }
  }

  buf = frame->src_buffer;
  frame->src_buffer = NULL;

  if (!hook->invisible && frame->is_sync_point) {
    GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
    encoder->keyframe_distance = 0;
  } else {
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
    encoder->keyframe_distance++;
  }

  GST_BUFFER_OFFSET_END (buf) =
      _to_granulepos (frame->presentation_frame_number + 1, 0,
      encoder->keyframe_distance);
  GST_BUFFER_OFFSET (buf) =
      gst_util_uint64_scale (frame->presentation_frame_number + 1,
      GST_SECOND * state->fps_d, state->fps_n);

  ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
  if (ret != GST_FLOW_OK) {
    GST_WARNING_OBJECT (encoder, "flow error %d", ret);
  }

done:
  if (hook) {
    g_list_foreach (hook->invisible, (GFunc) _gst_mini_object_unref0, NULL);
    g_list_free (hook->invisible);
    g_slice_free (GstVP8EncCoderHook, hook);
    frame->coder_hook = NULL;
  }

  return ret;
}