Ejemplo n.º 1
0
static void
gst_deinterlace2_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstDeinterlace2 *self;

  g_return_if_fail (GST_IS_DEINTERLACE2 (object));
  self = GST_DEINTERLACE2 (object);

  switch (prop_id) {
    case PROP_METHOD:
      gst_deinterlace2_set_method (self, g_value_get_enum (value));
      break;
    case PROP_FIELDS:{
      gint oldfields;

      GST_OBJECT_LOCK (self);
      oldfields = self->fields;
      self->fields = g_value_get_enum (value);
      if (self->fields != oldfields && GST_PAD_CAPS (self->srcpad))
        gst_deinterlace2_setcaps (self->sinkpad, GST_PAD_CAPS (self->sinkpad));
      GST_OBJECT_UNLOCK (self);
      break;
    }
    case PROP_FIELD_LAYOUT:
      self->field_layout = g_value_get_enum (value);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
  }

}
Ejemplo n.º 2
0
/* allocate and output buffer, if no format was negotiated, this
 * function will negotiate one. After calling this function, a
 * reverse negotiation could have happened. */
static GstFlowReturn
get_buffer (GstVisual * visual, GstBuffer ** outbuf)
{
  GstFlowReturn ret;

  /* we don't know an output format yet, pick one */
  if (GST_PAD_CAPS (visual->srcpad) == NULL) {
    if (!gst_vis_src_negotiate (visual))
      return GST_FLOW_NOT_NEGOTIATED;
  }

  GST_DEBUG_OBJECT (visual, "allocating output buffer with caps %"
      GST_PTR_FORMAT, GST_PAD_CAPS (visual->srcpad));

  /* now allocate a buffer with the last negotiated format. 
   * Downstream could renegotiate a new format, which will trigger
   * our setcaps function on the source pad. */
  ret =
      gst_pad_alloc_buffer_and_set_caps (visual->srcpad,
      GST_BUFFER_OFFSET_NONE, visual->outsize,
      GST_PAD_CAPS (visual->srcpad), outbuf);

  /* no buffer allocated, we don't care why. */
  if (ret != GST_FLOW_OK)
    return ret;

  return GST_FLOW_OK;
}
Ejemplo n.º 3
0
static void
gst_aspect_ratio_crop_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstAspectRatioCrop *aspect_ratio_crop;
  gboolean recheck = FALSE;

  aspect_ratio_crop = GST_ASPECT_RATIO_CROP (object);

  GST_OBJECT_LOCK (aspect_ratio_crop);
  switch (prop_id) {
    case ARG_ASPECT_RATIO_CROP:
      if (GST_VALUE_HOLDS_FRACTION (value)) {
        aspect_ratio_crop->ar_num = gst_value_get_fraction_numerator (value);
        aspect_ratio_crop->ar_denom =
            gst_value_get_fraction_denominator (value);
        recheck = (GST_PAD_CAPS (aspect_ratio_crop->sink) != NULL);
      }
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
  GST_OBJECT_UNLOCK (aspect_ratio_crop);

  if (recheck) {
    gst_aspect_ratio_crop_set_caps (aspect_ratio_crop->sink,
        GST_PAD_CAPS (aspect_ratio_crop->sink));
  }
}
Ejemplo n.º 4
0
static GstFlowReturn
get_buffer (GstMonoscope * monoscope, GstBuffer ** outbuf)
{
  GstFlowReturn ret;

  if (GST_PAD_CAPS (monoscope->srcpad) == NULL) {
    if (!gst_monoscope_src_negotiate (monoscope))
      return GST_FLOW_NOT_NEGOTIATED;
  }

  GST_LOG_OBJECT (monoscope, "allocating output buffer of size %d with caps %"
      GST_PTR_FORMAT, monoscope->outsize, GST_PAD_CAPS (monoscope->srcpad));

  ret =
      gst_pad_alloc_buffer_and_set_caps (monoscope->srcpad,
      GST_BUFFER_OFFSET_NONE, monoscope->outsize,
      GST_PAD_CAPS (monoscope->srcpad), outbuf);

  if (ret != GST_FLOW_OK)
    return ret;

  if (*outbuf == NULL)
    return GST_FLOW_ERROR;

  return GST_FLOW_OK;
}
Ejemplo n.º 5
0
GstFlowReturn
gst_vdp_output_src_pad_push (GstVdpOutputSrcPad * vdp_pad,
    GstVdpOutputBuffer * output_buf, GError ** error)
{
  GstPad *pad;
  GstBuffer *outbuf;

  g_return_val_if_fail (GST_IS_VDP_OUTPUT_SRC_PAD (vdp_pad), GST_FLOW_ERROR);
  g_return_val_if_fail (GST_IS_VDP_OUTPUT_BUFFER (output_buf), GST_FLOW_ERROR);

  pad = (GstPad *) vdp_pad;

  if (G_UNLIKELY (!GST_PAD_CAPS (pad)))
    return GST_FLOW_NOT_NEGOTIATED;

  switch (vdp_pad->output_format) {
    case GST_VDP_OUTPUT_SRC_PAD_FORMAT_RGB:
    {
      GstFlowReturn ret;
      guint size;

      gst_vdp_output_buffer_calculate_size (output_buf, &size);

      vdp_pad->lock_caps = TRUE;
      ret = gst_pad_alloc_buffer (pad, 0, size, GST_PAD_CAPS (vdp_pad),
          &outbuf);
      vdp_pad->lock_caps = FALSE;

      if (ret != GST_FLOW_OK) {
        gst_buffer_unref (GST_BUFFER_CAST (output_buf));
        return ret;
      }

      if (!gst_vdp_output_buffer_download (output_buf, outbuf, error)) {
        gst_buffer_unref (GST_BUFFER_CAST (output_buf));
        gst_buffer_unref (outbuf);
        return GST_FLOW_ERROR;
      }

      gst_buffer_copy_metadata (outbuf, (const GstBuffer *) output_buf,
          GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);
      gst_buffer_unref (GST_BUFFER_CAST (output_buf));
      break;
    }
    case GST_VDP_OUTPUT_SRC_PAD_FORMAT_VDPAU:
    {
      outbuf = GST_BUFFER_CAST (output_buf);
      break;
    }

    default:
      g_assert_not_reached ();
      break;
  }

  gst_buffer_set_caps (outbuf, GST_PAD_CAPS (vdp_pad));

  return gst_pad_push (pad, outbuf);
}
Ejemplo n.º 6
0
/* Output buffer preparation... if the buffer has no caps, and
 * our allowed output caps is fixed, then give the caps to the
 * buffer.
 * This ensures that outgoing buffers have caps if we can, so
 * that pipelines like:
 *   gst-launch filesrc location=rawsamples.raw !
 *       audio/x-raw-int,width=16,depth=16,rate=48000,channels=2,
 *       endianness=4321,signed='(boolean)'true ! alsasink
 * will work.
 */
static GstFlowReturn
gst_capsfilter_prepare_buf (GstBaseTransform * trans, GstBuffer * input,
    gint size, GstCaps * caps, GstBuffer ** buf)
{
  if (GST_BUFFER_CAPS (input) != NULL) {
    /* Output buffer already has caps */
    GST_DEBUG_OBJECT (trans,
        "Input buffer already has caps (implicitely fixed)");
    /* FIXME : Move this behaviour to basetransform. The given caps are the ones
     * of the source pad, therefore our outgoing buffers should always have
     * those caps. */
    gst_buffer_set_caps (input, caps);
    gst_buffer_ref (input);
    *buf = input;
  } else {
    /* Buffer has no caps. See if the output pad only supports fixed caps */
    GstCaps *out_caps;

    out_caps = GST_PAD_CAPS (trans->srcpad);

    if (out_caps != NULL) {
      gst_caps_ref (out_caps);
    } else {
      out_caps = gst_pad_get_allowed_caps (trans->srcpad);
      g_return_val_if_fail (out_caps != NULL, GST_FLOW_ERROR);
    }

    out_caps = gst_caps_make_writable (out_caps);
    gst_caps_do_simplify (out_caps);

    if (gst_caps_is_fixed (out_caps) && !gst_caps_is_empty (out_caps)) {
      GST_DEBUG_OBJECT (trans, "Have fixed output caps %"
          GST_PTR_FORMAT " to apply to buffer with no caps", out_caps);
      if (gst_buffer_is_metadata_writable (input)) {
        gst_buffer_ref (input);
        *buf = input;
      } else {
        GST_DEBUG_OBJECT (trans, "Creating sub-buffer and setting caps");
        *buf = gst_buffer_create_sub (input, 0, GST_BUFFER_SIZE (input));
      }
      GST_BUFFER_CAPS (*buf) = out_caps;

      if (GST_PAD_CAPS (trans->srcpad) == NULL)
        gst_pad_set_caps (trans->srcpad, out_caps);
    } else {
      GST_DEBUG_OBJECT (trans, "Have unfixed output caps %" GST_PTR_FORMAT,
          out_caps);
      gst_caps_unref (out_caps);
    }
  }

  return GST_FLOW_OK;
}
Ejemplo n.º 7
0
static GstFlowReturn
gst_ffmpegdeinterlace_chain (GstPad * pad, GstBuffer * inbuf)
{
  GstFFMpegDeinterlace *deinterlace =
      GST_FFMPEGDEINTERLACE (gst_pad_get_parent (pad));
  GstBuffer *outbuf = NULL;
  GstFlowReturn result;

  GST_OBJECT_LOCK (deinterlace);
  if (deinterlace->reconfigure) {
    if (deinterlace->new_mode != -1)
      deinterlace->mode = deinterlace->new_mode;
    deinterlace->new_mode = -1;

    deinterlace->reconfigure = FALSE;
    GST_OBJECT_UNLOCK (deinterlace);
    if (GST_PAD_CAPS (deinterlace->srcpad))
      gst_ffmpegdeinterlace_sink_setcaps (deinterlace->sinkpad,
          GST_PAD_CAPS (deinterlace->sinkpad));
  } else {
    GST_OBJECT_UNLOCK (deinterlace);
  }

  if (deinterlace->passthrough)
    return gst_pad_push (deinterlace->srcpad, inbuf);

  result =
      gst_pad_alloc_buffer (deinterlace->srcpad, GST_BUFFER_OFFSET_NONE,
      deinterlace->to_size, GST_PAD_CAPS (deinterlace->srcpad), &outbuf);
  if (result == GST_FLOW_OK) {
    gst_ffmpeg_avpicture_fill (&deinterlace->from_frame,
        GST_BUFFER_DATA (inbuf), deinterlace->pixfmt, deinterlace->width,
        deinterlace->height);

    gst_ffmpeg_avpicture_fill (&deinterlace->to_frame, GST_BUFFER_DATA (outbuf),
        deinterlace->pixfmt, deinterlace->width, deinterlace->height);

    avpicture_deinterlace (&deinterlace->to_frame, &deinterlace->from_frame,
        deinterlace->pixfmt, deinterlace->width, deinterlace->height);

    gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS);

    result = gst_pad_push (deinterlace->srcpad, outbuf);
  }

  gst_buffer_unref (inbuf);

  return result;
}
Ejemplo n.º 8
0
static GstCaps *
gst_xvidenc_getcaps (GstPad * pad)
{
  GstXvidEnc *xvidenc;
  GstPad *peer;
  GstCaps *caps;

  /* If we already have caps return them */
  if (GST_PAD_CAPS (pad))
    return gst_caps_ref (GST_PAD_CAPS (pad));

  xvidenc = GST_XVIDENC (gst_pad_get_parent (pad));
  if (!xvidenc)
    return gst_caps_new_empty ();

  peer = gst_pad_get_peer (xvidenc->srcpad);
  if (peer) {
    const GstCaps *templcaps;
    GstCaps *peercaps;
    guint i, n;

    peercaps = gst_pad_get_caps (peer);

    /* Translate peercaps to YUV */
    peercaps = gst_caps_make_writable (peercaps);
    n = gst_caps_get_size (peercaps);
    for (i = 0; i < n; i++) {
      GstStructure *s = gst_caps_get_structure (peercaps, i);

      gst_structure_set_name (s, "video/x-raw-yuv");
      gst_structure_remove_field (s, "mpegversion");
      gst_structure_remove_field (s, "systemstream");
    }

    templcaps = gst_pad_get_pad_template_caps (pad);

    caps = gst_caps_intersect (peercaps, templcaps);
    gst_caps_unref (peercaps);
    gst_object_unref (peer);
    peer = NULL;
  } else {
    caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
  }

  gst_object_unref (xvidenc);

  return caps;
}
Ejemplo n.º 9
0
static GstFlowReturn
gst_wildmidi_do_play (GstWildmidi * wildmidi)
{
  GstBuffer *out;
  GstFlowReturn ret;

  if (!(out = gst_wildmidi_get_buffer (wildmidi)))
    goto eos;

  if (wildmidi->discont) {
    GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT);
    wildmidi->discont = FALSE;
  }

  gst_buffer_set_caps (out, GST_PAD_CAPS (wildmidi->srcpad));
  ret = gst_pad_push (wildmidi->srcpad, out);

  return ret;

  /* ERRORS */
eos:
  {
    GST_LOG_OBJECT (wildmidi, "Song ended");
    return GST_FLOW_UNEXPECTED;
  }
}
Ejemplo n.º 10
0
static GstCaps *
gst_rtp_pt_demux_get_caps (GstRtpPtDemux * rtpdemux, guint pt)
{
  GstCaps *caps;
  GValue ret = { 0 };
  GValue args[2] = { {0}, {0} };

  /* figure out the caps */
  g_value_init (&args[0], GST_TYPE_ELEMENT);
  g_value_set_object (&args[0], rtpdemux);
  g_value_init (&args[1], G_TYPE_UINT);
  g_value_set_uint (&args[1], pt);

  g_value_init (&ret, GST_TYPE_CAPS);
  g_value_set_boxed (&ret, NULL);

  g_signal_emitv (args, gst_rtp_pt_demux_signals[SIGNAL_REQUEST_PT_MAP], 0,
      &ret);

  g_value_unset (&args[0]);
  g_value_unset (&args[1]);
  caps = g_value_dup_boxed (&ret);
  g_value_unset (&ret);
  if (caps == NULL) {
    caps = GST_PAD_CAPS (rtpdemux->sink);
    if (caps)
      gst_caps_ref (caps);
  }

  GST_DEBUG ("pt %d, got caps %" GST_PTR_FORMAT, pt, caps);

  return caps;
}
Ejemplo n.º 11
0
/**
 * gst_video_get_size:
 * @pad: pointer to a #GstPad
 * @width: pointer to integer to hold pixel width of the video frames (output)
 * @height: pointer to integer to hold pixel height of the video frames (output)
 *
 * Inspect the caps of the provided pad and retrieve the width and height of
 * the video frames it is configured for.
 * 
 * The pad needs to have negotiated caps containing width and height properties.
 *
 * Returns: TRUE if the width and height could be retrieved.
 *
 */
gboolean
gst_video_get_size (GstPad * pad, gint * width, gint * height)
{
  const GstCaps *caps = NULL;
  GstStructure *structure;
  gboolean ret;

  g_return_val_if_fail (pad != NULL, FALSE);
  g_return_val_if_fail (width != NULL, FALSE);
  g_return_val_if_fail (height != NULL, FALSE);

  caps = GST_PAD_CAPS (pad);

  if (caps == NULL) {
    g_warning ("gstvideo: failed to get caps of pad %s:%s",
        GST_DEBUG_PAD_NAME (pad));
    return FALSE;
  }

  structure = gst_caps_get_structure (caps, 0);
  ret = gst_structure_get_int (structure, "width", width);
  ret &= gst_structure_get_int (structure, "height", height);

  if (!ret) {
    g_warning ("gstvideo: failed to get size properties on pad %s:%s",
        GST_DEBUG_PAD_NAME (pad));
    return FALSE;
  }

  GST_DEBUG ("size request on pad %s:%s: %dx%d",
      GST_DEBUG_PAD_NAME (pad), width ? *width : -1, height ? *height : -1);

  return TRUE;
}
Ejemplo n.º 12
0
static GstFlowReturn
gst_kate_parse_push_buffer (GstKateParse * parse, GstBuffer * buf,
    gint64 granulepos)
{
  GST_LOG_OBJECT (parse, "granulepos %16" G_GINT64_MODIFIER "x", granulepos);
  if (granulepos < 0) {
    /* packets coming not from Ogg won't have a granpos in the offset end,
       so we have to synthesize one here - only problem is we don't know
       the backlink - pretend there's none for now */
    GST_INFO_OBJECT (parse, "No granulepos on buffer, synthesizing one");
    granulepos =
        kate_duration_granule (&parse->ki,
        GST_BUFFER_TIMESTAMP (buf) /
        (double) GST_SECOND) << kate_granule_shift (&parse->ki);
  }
  GST_BUFFER_OFFSET (buf) =
      kate_granule_time (&parse->ki, granulepos) * GST_SECOND;
  GST_BUFFER_OFFSET_END (buf) = granulepos;
  GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_OFFSET (buf);

  /* Hack to flush each packet on its own page - taken off the CMML encoder element */
  /* TODO: this is s***e and needs to go once I find a way to tell Ogg to flush
     as it messes up Matroska's track duration */
  GST_BUFFER_DURATION (buf) = G_MAXINT64;

  gst_buffer_set_caps (buf, GST_PAD_CAPS (parse->srcpad));

  return gst_pad_push (parse->srcpad, buf);
}
Ejemplo n.º 13
0
static GstFlowReturn
gst_rdt_depay_push (GstRDTDepay * rdtdepay, GstBuffer * buffer)
{
  GstFlowReturn ret;

  if (rdtdepay->need_newsegment) {
    GstEvent *event;

    event = create_segment_event (rdtdepay, FALSE, 0);
    gst_pad_push_event (rdtdepay->srcpad, event);

    rdtdepay->need_newsegment = FALSE;
  }

  buffer = gst_buffer_make_metadata_writable (buffer);
  gst_buffer_set_caps (buffer, GST_PAD_CAPS (rdtdepay->srcpad));

  if (rdtdepay->discont) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
    rdtdepay->discont = FALSE;
  }
  ret = gst_pad_push (rdtdepay->srcpad, buffer);

  return ret;
}
Ejemplo n.º 14
0
static void
new_packet_common_init (MpegTsMux * mux, GstBuffer * buf, guint8 * data,
    guint len)
{
  /* Packets should be at least 188 bytes, but check anyway */
  g_return_if_fail (len >= 2);

  if (!mux->streamheader_sent) {
    guint pid = ((data[1] & 0x1f) << 8) | data[2];
    /* if it's a PAT or a PMT */
    if (pid == 0x00 || (pid >= TSMUX_START_PMT_PID && pid < TSMUX_START_ES_PID)) {
      mux->streamheader =
          g_list_append (mux->streamheader, gst_buffer_copy (buf));
    } else if (mux->streamheader) {
      mpegtsdemux_set_header_on_caps (mux);
      mux->streamheader_sent = TRUE;
    }
  }

  /* Set the caps on the buffer only after possibly setting the stream headers
   * into the pad caps above */
  gst_buffer_set_caps (buf, GST_PAD_CAPS (mux->srcpad));

  if (mux->is_delta) {
    GST_LOG_OBJECT (mux, "marking as delta unit");
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
  } else {
    GST_DEBUG_OBJECT (mux, "marking as non-delta unit");
    mux->is_delta = TRUE;
  }
}
Ejemplo n.º 15
0
/*****************************************************************************
 * gst_tiprepencbuf_prepare_output_buffer
 *    Function is used to allocate output buffer
 *****************************************************************************/
static GstFlowReturn
gst_tiprepencbuf_prepare_output_buffer(GstBaseTransform * trans,
    GstBuffer * inBuf, gint size, GstCaps * caps, GstBuffer ** outBuf)
{
    GstTIPrepEncBuf *prepencbuf = GST_TIPREPENCBUF(trans);
    Buffer_Handle    hOutBuf;

    GST_LOG("begin prepare output buffer\n");

    /* Get free buffer from buftab */
    if (!(hOutBuf = gst_tidmaibuftab_get_buf(prepencbuf->hOutBufTab))) {
        GST_ELEMENT_ERROR(prepencbuf, RESOURCE, READ,
            ("failed to get free buffer\n"), (NULL));
        return GST_FLOW_ERROR;
    }

    /* Create a DMAI transport buffer object to carry a DMAI buffer to
     * the source pad.  The transport buffer knows how to release the
     * buffer for re-use in this element when the source pad calls
     * gst_buffer_unref().
     */
    GST_LOG("creating dmai transport buffer\n");
    *outBuf = gst_tidmaibuffertransport_new(hOutBuf, prepencbuf->hOutBufTab, NULL, NULL);
    gst_buffer_set_data(*outBuf, (guint8 *) Buffer_getUserPtr(hOutBuf),
        Buffer_getSize(hOutBuf));
    gst_buffer_set_caps(*outBuf, GST_PAD_CAPS(trans->srcpad));

    GST_LOG("end prepare output buffer\n");

    return GST_FLOW_OK;
}
static void
_rtpbin_pad_added (GstElement *rtpbin, GstPad *new_pad,
  gpointer user_data)
{
  FsRtpConference *self = FS_RTP_CONFERENCE (user_data);
  gchar *name;

  GST_DEBUG_OBJECT (self, "pad %s added %" GST_PTR_FORMAT,
      GST_PAD_NAME (new_pad), GST_PAD_CAPS (new_pad));

  name = gst_pad_get_name (new_pad);

  if (g_str_has_prefix (name, "recv_rtp_src_"))
  {
    guint session_id, ssrc, pt;

    if (sscanf (name, "recv_rtp_src_%u_%u_%u",
            &session_id, &ssrc, &pt) == 3 && ssrc <= G_MAXUINT32)
    {
      FsRtpSession *session =
        fs_rtp_conference_get_session_by_id (self, session_id);

      if (session)
      {
        fs_rtp_session_new_recv_pad (session, new_pad, ssrc, pt);
        g_object_unref (session);
      }
    }
  }

  g_free (name);
}
Ejemplo n.º 17
0
static void
user_info_callback (png_structp png_ptr, png_infop info)
{
  GstPngDec *pngdec = NULL;
  GstFlowReturn ret = GST_FLOW_OK;
  size_t buffer_size;
  GstBuffer *buffer = NULL;

  pngdec = GST_PNGDEC (png_ptr->io_ptr);

  GST_LOG ("info ready");

  /* Generate the caps and configure */
  ret = gst_pngdec_caps_create_and_set (pngdec);
  if (ret != GST_FLOW_OK) {
    goto beach;
  }

  /* Allocate output buffer */
  pngdec->rowbytes = png_get_rowbytes (pngdec->png, pngdec->info);
  buffer_size = pngdec->height * GST_ROUND_UP_4 (pngdec->rowbytes);
  ret =
      gst_pad_alloc_buffer_and_set_caps (pngdec->srcpad, GST_BUFFER_OFFSET_NONE,
      buffer_size, GST_PAD_CAPS (pngdec->srcpad), &buffer);
  if (ret != GST_FLOW_OK) {
    goto beach;
  }

  pngdec->buffer_out = buffer;

beach:
  pngdec->ret = ret;
}
// Returns the size of the video
IntSize MediaPlayerPrivate::naturalSize() const
{
    if (!hasVideo())
        return IntSize();

    // TODO: handle possible clean aperture data. See
    // https://bugzilla.gnome.org/show_bug.cgi?id=596571
    // TODO: handle possible transformation matrix. See
    // https://bugzilla.gnome.org/show_bug.cgi?id=596326
    int width = 0, height = 0;
    if (GstPad* pad = gst_element_get_static_pad(m_videoSink, "sink")) {
        GstCaps* caps = GST_PAD_CAPS(pad);
        gfloat pixelAspectRatio;
        gint pixelAspectRatioNumerator, pixelAspectRatioDenominator;

        if (!GST_IS_CAPS(caps) || !gst_caps_is_fixed(caps) ||
            !gst_video_format_parse_caps(caps, NULL, &width, &height) ||
            !gst_video_parse_caps_pixel_aspect_ratio(caps, &pixelAspectRatioNumerator,
                                                           &pixelAspectRatioDenominator)) {
            gst_object_unref(GST_OBJECT(pad));
            return IntSize();
        }

        pixelAspectRatio = (gfloat) pixelAspectRatioNumerator / (gfloat) pixelAspectRatioDenominator;
        width *= pixelAspectRatio;
        height /= pixelAspectRatio;
        gst_object_unref(GST_OBJECT(pad));
    }

    return IntSize(width, height);
}
Ejemplo n.º 19
0
static GstFlowReturn
gst_vdp_mpeg_dec_alloc_buffer (GstVdpMpegDec * mpeg_dec, GstBuffer ** outbuf)
{
    GstFlowReturn ret;

    ret = gst_pad_alloc_buffer_and_set_caps (mpeg_dec->src, 0, 0,
            GST_PAD_CAPS (mpeg_dec->src), outbuf);
    if (ret != GST_FLOW_OK)
        return ret;

    if (!mpeg_dec->device) {
        GstVdpDevice *device;
        VdpStatus status;

        device = mpeg_dec->device =
                     g_object_ref (GST_VDP_VIDEO_BUFFER (*outbuf)->device);

        status = device->vdp_decoder_create (device->device, mpeg_dec->profile,
                                             mpeg_dec->width, mpeg_dec->height, 2, &mpeg_dec->decoder);
        if (status != VDP_STATUS_OK) {
            GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
                               ("Could not create vdpau decoder"),
                               ("Error returned from vdpau was: %s",
                                device->vdp_get_error_string (status)));
            ret = GST_FLOW_ERROR;
        }
    }

    return ret;
}
Ejemplo n.º 20
0
static void
mpegtsdemux_set_header_on_caps (MpegTsMux * mux)
{
  GstBuffer *buf;
  GstStructure *structure;
  GValue array = { 0 };
  GValue value = { 0 };
  GstCaps *caps = GST_PAD_CAPS (mux->srcpad);
  GList *sh;

  caps = gst_caps_make_writable (caps);
  structure = gst_caps_get_structure (caps, 0);

  g_value_init (&array, GST_TYPE_ARRAY);

  sh = mux->streamheader;
  while (sh) {
    buf = sh->data;
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
    g_value_init (&value, GST_TYPE_BUFFER);
    gst_value_take_buffer (&value, buf);
    gst_value_array_append_value (&array, &value);
    g_value_unset (&value);
    sh = g_list_next (sh);
  }

  g_list_free (mux->streamheader);
  mux->streamheader = NULL;

  gst_structure_set_value (structure, "streamheader", &array);
  gst_pad_set_caps (mux->srcpad, caps);
  g_value_unset (&array);
  gst_caps_unref (caps);
}
Ejemplo n.º 21
0
static GstFlowReturn
gst_ffmpegdeinterlace_chain (GstPad * pad, GstBuffer * inbuf)
{
  GstFFMpegDeinterlace *deinterlace =
      GST_FFMPEGDEINTERLACE (gst_pad_get_parent (pad));
  GstBuffer *outbuf = NULL;
  GstFlowReturn result;

  result =
      gst_pad_alloc_buffer (deinterlace->srcpad, GST_BUFFER_OFFSET_NONE,
      deinterlace->to_size, GST_PAD_CAPS (deinterlace->srcpad), &outbuf);
  if (result == GST_FLOW_OK) {
    gst_ffmpeg_avpicture_fill (&deinterlace->from_frame,
        GST_BUFFER_DATA (inbuf), deinterlace->pixfmt, deinterlace->width,
        deinterlace->height);

    gst_ffmpeg_avpicture_fill (&deinterlace->to_frame, GST_BUFFER_DATA (outbuf),
        deinterlace->pixfmt, deinterlace->width, deinterlace->height);

    avpicture_deinterlace (&deinterlace->to_frame, &deinterlace->from_frame,
        deinterlace->pixfmt, deinterlace->width, deinterlace->height);

    gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS);

    result = gst_pad_push (deinterlace->srcpad, outbuf);
  }

  gst_buffer_unref (inbuf);

  return result;
}
Ejemplo n.º 22
0
EXPORT_C
#endif

int
gst_audio_frame_byte_size (GstPad * pad)
{
  /* FIXME: this should be moved closer to the gstreamer core
   * and be implemented for every mime type IMO
   */

  int width = 0;
  int channels = 0;
  const GstCaps *caps = NULL;
  GstStructure *structure;

  /* get caps of pad */
  caps = GST_PAD_CAPS (pad);

  if (caps == NULL) {
    /* ERROR: could not get caps of pad */
    g_warning ("gstaudio: could not get caps of pad %s:%s\n",
        GST_DEBUG_PAD_NAME (pad));
    return 0;
  }

  structure = gst_caps_get_structure (caps, 0);

  gst_structure_get_int (structure, "width", &width);
  gst_structure_get_int (structure, "channels", &channels);
  return (width / 8) * channels;
}
Ejemplo n.º 23
0
static gboolean
new_packet_cb (guint8 * data, guint len, void *user_data)
{
  /* Called when the PsMux has prepared a packet for output. Return FALSE
   * on error */

  MpegPsMux *mux = (MpegPsMux *) user_data;
  GstBuffer *buf;
  GstFlowReturn ret;

  GST_LOG_OBJECT (mux, "Outputting a packet of length %d", len);
  buf = gst_buffer_new_and_alloc (len);
  if (G_UNLIKELY (buf == NULL)) {
    mux->last_flow_ret = GST_FLOW_ERROR;
    return FALSE;
  }
  gst_buffer_set_caps (buf, GST_PAD_CAPS (mux->srcpad));

  memcpy (GST_BUFFER_DATA (buf), data, len);
  GST_BUFFER_TIMESTAMP (buf) = mux->last_ts;
  ret = gst_pad_push (mux->srcpad, buf);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    mux->last_flow_ret = ret;
    return FALSE;
  }

  return TRUE;
}
Ejemplo n.º 24
0
static boolean
gst_jpegenc_flush_destination (j_compress_ptr cinfo)
{
  GstBuffer *overflow_buffer;
  guint32 old_buffer_size;
  GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data);
  GST_DEBUG_OBJECT (jpegenc,
      "gst_jpegenc_chain: flush_destination: buffer too small");

  /* Our output buffer wasn't big enough.
   * Make a new buffer that's twice the size, */
  old_buffer_size = GST_BUFFER_SIZE (jpegenc->output_buffer);
  gst_pad_alloc_buffer_and_set_caps (jpegenc->srcpad,
      GST_BUFFER_OFFSET_NONE, old_buffer_size * 2,
      GST_PAD_CAPS (jpegenc->srcpad), &overflow_buffer);
  memcpy (GST_BUFFER_DATA (overflow_buffer),
      GST_BUFFER_DATA (jpegenc->output_buffer), old_buffer_size);

  gst_buffer_copy_metadata (overflow_buffer, jpegenc->output_buffer,
      GST_BUFFER_COPY_TIMESTAMPS);

  /* drop it into place, */
  gst_buffer_unref (jpegenc->output_buffer);
  jpegenc->output_buffer = overflow_buffer;

  /* and last, update libjpeg on where to work. */
  jpegenc->jdest.next_output_byte =
      GST_BUFFER_DATA (jpegenc->output_buffer) + old_buffer_size;
  jpegenc->jdest.free_in_buffer =
      GST_BUFFER_SIZE (jpegenc->output_buffer) - old_buffer_size;

  return TRUE;
}
Ejemplo n.º 25
0
static void
gst_ffmpegdeinterlace_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstFFMpegDeinterlace *self;

  g_return_if_fail (GST_IS_FFMPEGDEINTERLACE (object));
  self = GST_FFMPEGDEINTERLACE (object);

  switch (prop_id) {
    case PROP_MODE:{
      gint new_mode;

      GST_OBJECT_LOCK (self);
      new_mode = g_value_get_enum (value);
      if (self->mode != new_mode && GST_PAD_CAPS (self->srcpad)) {
        self->reconfigure = TRUE;
        self->new_mode = new_mode;
      } else {
        self->mode = new_mode;
        gst_ffmpegdeinterlace_update_passthrough (self);
      }
      GST_OBJECT_UNLOCK (self);
      break;
    }
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
  }

}
/**
 * hls_progress_buffer_loop()
 *
 * Primary function for push-mode.  Pulls data from progressbuffer's cache queue.
 */
static void hls_progress_buffer_loop(void *data)
{
    HLSProgressBuffer* element = HLS_PROGRESS_BUFFER(data);
    GstFlowReturn      result = GST_FLOW_OK;

    g_mutex_lock(element->lock);

    while (element->srcresult == GST_FLOW_OK && !cache_has_enough_data(element->cache[element->cache_read_index]))
    {
        if (element->is_eos)
        {
            gst_pad_push_event(element->srcpad, gst_event_new_eos());
            element->srcresult = GST_FLOW_WRONG_STATE;
            break;
        }

        if (!element->is_eos)
        {
            g_cond_wait(element->add_cond, element->lock);
        }
    }

    result = element->srcresult;

    if (result == GST_FLOW_OK)
    {
        GstBuffer *buffer = NULL;
        guint64 read_position = cache_read_buffer(element->cache[element->cache_read_index], &buffer);

        if (read_position == element->cache_size[element->cache_read_index])
        {
            element->cache_write_ready[element->cache_read_index] = TRUE;
            element->cache_read_index = (element->cache_read_index + 1) % NUM_OF_CACHED_SEGMENTS;
            send_hls_not_full_message(element);
            g_cond_signal(element->del_cond);
        }

        g_mutex_unlock(element->lock);

        gst_buffer_set_caps(buffer, GST_PAD_CAPS(element->sinkpad));

        // Send the data to the hls progressbuffer source pad
        result = gst_pad_push(element->srcpad, buffer);

        g_mutex_lock(element->lock);
        if (GST_FLOW_OK == element->srcresult || GST_FLOW_OK != result)
            element->srcresult = result;
        else
            result = element->srcresult;
        g_mutex_unlock(element->lock);
    }
    else
    {
        g_mutex_unlock(element->lock);
    }

    if (result != GST_FLOW_OK && !element->is_flushing)
        gst_pad_pause_task(element->srcpad);
}
Ejemplo n.º 27
0
static void
gst_mikmod_loop (GstElement * element)
{
  GstMikMod *mikmod;
  GstBuffer *buffer_in;

  g_return_if_fail (element != NULL);
  g_return_if_fail (GST_IS_MIKMOD (element));

  mikmod = GST_MIKMOD (element);
  srcpad = mikmod->srcpad;
  mikmod->Buffer = NULL;

  if (!mikmod->initialized) {
    while ((buffer_in = GST_BUFFER (gst_pad_pull (mikmod->sinkpad)))) {
      if (GST_IS_EVENT (buffer_in)) {
        GstEvent *event = GST_EVENT (buffer_in);

        if (GST_EVENT_TYPE (event) == GST_EVENT_EOS)
          break;
      } else {
        if (mikmod->Buffer) {
          mikmod->Buffer = gst_buffer_append (mikmod->Buffer, buffer_in);
        } else {
          mikmod->Buffer = buffer_in;
        }
      }
    }

    if (!GST_PAD_CAPS (mikmod->srcpad)) {
      if (GST_PAD_LINK_SUCCESSFUL (gst_pad_renegotiate (mikmod->srcpad))) {
        GST_ELEMENT_ERROR (mikmod, CORE, NEGOTIATION, (NULL), (NULL));
        return;
      }
    }

    MikMod_RegisterDriver (&drv_gst);
    MikMod_RegisterAllLoaders ();

    MikMod_Init ("");
    reader = GST_READER_new (mikmod);
    module = Player_LoadGeneric (reader, 64, 0);

    gst_buffer_unref (mikmod->Buffer);

    if (!Player_Active ())
      Player_Start (module);

    mikmod->initialized = TRUE;
  }

  if (Player_Active ()) {
    timestamp = (module->sngtime / 1024.0) * GST_SECOND;
    drv_gst.Update ();
  } else {
    gst_element_set_eos (GST_ELEMENT (mikmod));
    gst_pad_push (mikmod->srcpad, GST_DATA (gst_event_new (GST_EVENT_EOS)));
  }
}
static gboolean videodecoder_configure_sourcepad(VideoDecoder *decoder)
{
    BaseDecoder *base = BASEDECODER(decoder);

    if (GST_PAD_CAPS(base->srcpad) == NULL ||
        decoder->width != base->context->width ||
        decoder->height != base->context->height)
    {
        decoder->width = base->context->width;
        decoder->height = base->context->height;

        decoder->discont = (GST_PAD_CAPS(base->srcpad) != NULL);

        decoder->u_offset = base->frame->linesize[0] * decoder->height;
        decoder->uv_blocksize = base->frame->linesize[1] * decoder->height / 2;

        decoder->v_offset = decoder->u_offset + decoder->uv_blocksize;
        decoder->frame_size = (base->frame->linesize[0] + base->frame->linesize[1]) * decoder->height;

        GstCaps *src_caps = gst_caps_new_simple("video/x-raw-yuv",
                                                "format", GST_TYPE_FOURCC, GST_STR_FOURCC("YV12"),
                                                "width", G_TYPE_INT, decoder->width,
                                                "height", G_TYPE_INT, decoder->height,
                                                "stride-y", G_TYPE_INT, base->frame->linesize[0],
                                                "stride-u", G_TYPE_INT, base->frame->linesize[1],
                                                "stride-v", G_TYPE_INT, base->frame->linesize[2],
                                                "offset-y", G_TYPE_INT, 0,
                                                "offset-u", G_TYPE_INT, decoder->u_offset,
                                                "offset-v", G_TYPE_INT, decoder->v_offset,
                                                "framerate", GST_TYPE_FRACTION, 2997, 100,
                                                NULL);


        if (!gst_pad_set_caps (base->srcpad, src_caps))
        {
            gst_element_message_full(GST_ELEMENT(decoder), GST_MESSAGE_ERROR, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
                                     g_strdup("Failed to set caps on the sourcepad"), NULL,
                                     ("videodecoder.c"), ("videodecoder_configure"), 0);
            gst_caps_unref(src_caps);
            return FALSE;
        }
        gst_caps_unref(src_caps);
    }

    return TRUE;
}
Ejemplo n.º 29
0
static gboolean
gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
    GstClockTime duration)
{
  GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (src_object);
  GstBuffer *buf = NULL;
  IPin *pPin = NULL;
  HRESULT hres = S_FALSE;
  AM_MEDIA_TYPE *pMediaType = NULL;

  if (!buffer || size == 0 || !src) {
    return FALSE;
  }

  /* create a new buffer assign to it the clock time as timestamp */
  buf = gst_buffer_new_and_alloc (size);

  GST_BUFFER_SIZE (buf) = size;

  GstClock *clock = gst_element_get_clock (GST_ELEMENT (src));
  GST_BUFFER_TIMESTAMP (buf) =
    GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)), gst_clock_get_time (clock));
  gst_object_unref (clock);

  GST_BUFFER_DURATION (buf) = duration;

  if (src->is_rgb) {
    /* FOR RGB directshow decoder will return bottom-up BITMAP
     * There is probably a way to get top-bottom video frames from
     * the decoder...
     */
    gint line = 0;
    gint stride = size / src->height;

    for (; line < src->height; line++) {
      memcpy (GST_BUFFER_DATA (buf) + (line * stride),
          buffer + (size - ((line + 1) * (stride))), stride);
    }
  } else {
    memcpy (GST_BUFFER_DATA (buf), buffer, size);
  }

  GST_DEBUG ("push_buffer => pts %" GST_TIME_FORMAT "duration %"
      GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (duration));

  /* the negotiate() method already set caps on the source pad */
  gst_buffer_set_caps (buf, GST_PAD_CAPS (GST_BASE_SRC_PAD (src)));

  g_mutex_lock (src->buffer_mutex);
  if (src->buffer != NULL)
    gst_buffer_unref (src->buffer);
  src->buffer = buf;
  g_cond_signal (src->buffer_cond);
  g_mutex_unlock (src->buffer_mutex);

  return TRUE;
}
Ejemplo n.º 30
0
static GstFlowReturn
gst_mms_create (GstPushSrc * psrc, GstBuffer ** buf)
{
  GstMMS *mmssrc;
  guint8 *data;
  guint blocksize;
  gint result;
  mms_off_t offset;

  *buf = NULL;

  mmssrc = GST_MMS (psrc);

  offset = mmsx_get_current_pos (mmssrc->connection);

  /* Check if a seek perhaps has wrecked our connection */
  if (offset == -1) {
    GST_DEBUG_OBJECT (mmssrc,
        "connection broken (probably an error during mmsx_seek_time during a convert query) returning FLOW_ERROR");
    return GST_FLOW_ERROR;
  }

  /* Choose blocksize best for optimum performance */
  if (offset == 0)
    blocksize = mmsx_get_asf_header_len (mmssrc->connection);
  else
    blocksize = mmsx_get_asf_packet_len (mmssrc->connection);

  *buf = gst_buffer_new_and_alloc (blocksize);

  data = GST_BUFFER_DATA (*buf);
  GST_BUFFER_SIZE (*buf) = 0;
  GST_LOG_OBJECT (mmssrc, "reading %d bytes", blocksize);
  result = mmsx_read (NULL, mmssrc->connection, (char *) data, blocksize);

  /* EOS? */
  if (result == 0)
    goto eos;

  GST_BUFFER_OFFSET (*buf) = offset;
  GST_BUFFER_SIZE (*buf) = result;

  GST_LOG_OBJECT (mmssrc, "Returning buffer with offset %" G_GINT64_FORMAT
      " and size %u", GST_BUFFER_OFFSET (*buf), GST_BUFFER_SIZE (*buf));

  gst_buffer_set_caps (*buf, GST_PAD_CAPS (GST_BASE_SRC_PAD (mmssrc)));

  return GST_FLOW_OK;

eos:
  {
    GST_DEBUG_OBJECT (mmssrc, "EOS");
    gst_buffer_unref (*buf);
    *buf = NULL;
    return GST_FLOW_UNEXPECTED;
  }
}