示例#1
0
static GstFlowReturn
audioresample_do_output (GstAudioresample * audioresample, GstBuffer * outbuf)
{
  int outsize;
  int outsamples;
  ResampleState *r;

  r = audioresample->resample;

  outsize = resample_get_output_size (r);
  GST_LOG_OBJECT (audioresample, "audioresample can give me %d bytes", outsize);

  /* protect against mem corruption */
  if (outsize > GST_BUFFER_SIZE (outbuf)) {
    GST_WARNING_OBJECT (audioresample,
        "overriding audioresample's outsize %d with outbuffer's size %d",
        outsize, GST_BUFFER_SIZE (outbuf));
    outsize = GST_BUFFER_SIZE (outbuf);
  }
  /* catch possibly wrong size differences */
  if (GST_BUFFER_SIZE (outbuf) - outsize > r->sample_size) {
    GST_WARNING_OBJECT (audioresample,
        "audioresample's outsize %d too far from outbuffer's size %d",
        outsize, GST_BUFFER_SIZE (outbuf));
  }

  outsize = resample_get_output_data (r, GST_BUFFER_DATA (outbuf), outsize);
  outsamples = outsize / r->sample_size;
  GST_LOG_OBJECT (audioresample, "resample gave me %d bytes or %d samples",
      outsize, outsamples);

  GST_BUFFER_OFFSET (outbuf) = audioresample->offset;
  GST_BUFFER_TIMESTAMP (outbuf) = audioresample->next_ts;

  if (audioresample->ts_offset != -1) {
    audioresample->offset += outsamples;
    audioresample->ts_offset += outsamples;
    audioresample->next_ts =
        gst_util_uint64_scale_int (audioresample->ts_offset, GST_SECOND,
        audioresample->o_rate);
    GST_BUFFER_OFFSET_END (outbuf) = audioresample->offset;

    /* we calculate DURATION as the difference between "next" timestamp
     * and current timestamp so we ensure a contiguous stream, instead of
     * having rounding errors. */
    GST_BUFFER_DURATION (outbuf) = audioresample->next_ts -
        GST_BUFFER_TIMESTAMP (outbuf);
  } else {
    /* no valid offset know, we can still sortof calculate the duration though */
    GST_BUFFER_DURATION (outbuf) =
        gst_util_uint64_scale_int (outsamples, GST_SECOND,
        audioresample->o_rate);
  }

  /* check for possible mem corruption */
  if (outsize > GST_BUFFER_SIZE (outbuf)) {
    /* this is an error that when it happens, would need fixing in the
     * resample library; we told it we wanted only GST_BUFFER_SIZE (outbuf),
     * and it gave us more ! */
    GST_WARNING_OBJECT (audioresample,
        "audioresample, you memory corrupting bastard. "
        "you gave me outsize %d while my buffer was size %d",
        outsize, GST_BUFFER_SIZE (outbuf));
    return GST_FLOW_ERROR;
  }
  /* catch possibly wrong size differences */
  if (GST_BUFFER_SIZE (outbuf) - outsize > r->sample_size) {
    GST_WARNING_OBJECT (audioresample,
        "audioresample's written outsize %d too far from outbuffer's size %d",
        outsize, GST_BUFFER_SIZE (outbuf));
  }
  GST_BUFFER_SIZE (outbuf) = outsize;

  if (G_UNLIKELY (audioresample->need_discont)) {
    GST_DEBUG_OBJECT (audioresample,
        "marking this buffer with the DISCONT flag");
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    audioresample->need_discont = FALSE;
  }

  GST_LOG_OBJECT (audioresample, "transformed to buffer of %d bytes, ts %"
      GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT ", offset %"
      G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT,
      outsize, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)),
      GST_BUFFER_OFFSET (outbuf), GST_BUFFER_OFFSET_END (outbuf));


  return GST_FLOW_OK;
}
static GstFlowReturn
gst_audio_fx_base_fir_filter_transform (GstBaseTransform * base,
    GstBuffer * inbuf, GstBuffer * outbuf)
{
  GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
  GstClockTime timestamp, expected_timestamp;
  gint channels = GST_AUDIO_FILTER_CAST (self)->format.channels;
  gint rate = GST_AUDIO_FILTER_CAST (self)->format.rate;
  gint width = GST_AUDIO_FILTER_CAST (self)->format.width / 8;
  guint input_samples = (GST_BUFFER_SIZE (inbuf) / width) / channels;
  guint output_samples = (GST_BUFFER_SIZE (outbuf) / width) / channels;
  guint generated_samples;
  guint64 output_offset;
  gint64 diff = 0;
  GstClockTime stream_time;

  timestamp = GST_BUFFER_TIMESTAMP (outbuf);

  if (!GST_CLOCK_TIME_IS_VALID (timestamp)
      && !GST_CLOCK_TIME_IS_VALID (self->start_ts)) {
    GST_ERROR_OBJECT (self, "Invalid timestamp");
    return GST_FLOW_ERROR;
  }

  stream_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);

  GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT,
      GST_TIME_ARGS (timestamp));

  if (GST_CLOCK_TIME_IS_VALID (stream_time))
    gst_object_sync_values (G_OBJECT (self), stream_time);

  g_return_val_if_fail (self->kernel != NULL, GST_FLOW_ERROR);
  g_return_val_if_fail (channels != 0, GST_FLOW_ERROR);

  if (GST_CLOCK_TIME_IS_VALID (self->start_ts))
    expected_timestamp =
        self->start_ts + gst_util_uint64_scale_int (self->nsamples_in,
        GST_SECOND, rate);
  else
    expected_timestamp = GST_CLOCK_TIME_NONE;

  /* Reset the residue if already existing on discont buffers */
  if (GST_BUFFER_IS_DISCONT (inbuf)
      || (GST_CLOCK_TIME_IS_VALID (expected_timestamp)
          && (ABS (GST_CLOCK_DIFF (timestamp,
                      expected_timestamp) > 5 * GST_MSECOND)))) {
    GST_DEBUG_OBJECT (self, "Discontinuity detected - flushing");
    if (GST_CLOCK_TIME_IS_VALID (expected_timestamp))
      gst_audio_fx_base_fir_filter_push_residue (self);
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    self->start_ts = timestamp;
    self->start_off = GST_BUFFER_OFFSET (inbuf);
    self->nsamples_out = 0;
    self->nsamples_in = 0;
  } else if (!GST_CLOCK_TIME_IS_VALID (self->start_ts)) {
    self->start_ts = timestamp;
    self->start_off = GST_BUFFER_OFFSET (inbuf);
  }

  self->nsamples_in += input_samples;

  generated_samples =
      self->process (self, GST_BUFFER_DATA (inbuf), GST_BUFFER_DATA (outbuf),
      input_samples);

  g_assert (generated_samples <= output_samples);
  self->nsamples_out += generated_samples;
  if (generated_samples == 0)
    return GST_BASE_TRANSFORM_FLOW_DROPPED;

  /* Calculate the number of samples we can push out now without outputting
   * latency zeros in the beginning */
  diff = ((gint64) self->nsamples_out) - ((gint64) self->latency);
  if (diff < 0) {
    return GST_BASE_TRANSFORM_FLOW_DROPPED;
  } else if (diff < generated_samples) {
    gint64 tmp = diff;
    diff = generated_samples - diff;
    generated_samples = tmp;
    GST_BUFFER_DATA (outbuf) += diff * width * channels;
  }
  GST_BUFFER_SIZE (outbuf) = generated_samples * width * channels;

  output_offset = self->nsamples_out - self->latency - generated_samples;
  GST_BUFFER_TIMESTAMP (outbuf) =
      self->start_ts + gst_util_uint64_scale_int (output_offset, GST_SECOND,
      rate);
  GST_BUFFER_DURATION (outbuf) =
      gst_util_uint64_scale_int (output_samples, GST_SECOND, rate);
  if (self->start_off != GST_BUFFER_OFFSET_NONE) {
    GST_BUFFER_OFFSET (outbuf) = self->start_off + output_offset;
    GST_BUFFER_OFFSET_END (outbuf) =
        GST_BUFFER_OFFSET (outbuf) + generated_samples;
  } else {
    GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET_NONE;
    GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_NONE;
  }

  GST_DEBUG_OBJECT (self, "Pushing buffer of size %d with timestamp: %"
      GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
      G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
      GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
      GST_BUFFER_OFFSET_END (outbuf), generated_samples);

  return GST_FLOW_OK;
}
示例#3
0
static GstFlowReturn
dvdspu_handle_vid_buffer (GstDVDSpu * dvdspu, GstBuffer * buf)
{
  GstClockTime new_ts;
  GstFlowReturn ret;
  gboolean using_ref = FALSE;

  DVD_SPU_LOCK (dvdspu);

  if (buf == NULL) {
    GstClockTime next_ts = dvdspu->video_seg.position;

    next_ts += gst_util_uint64_scale_int (GST_SECOND,
        dvdspu->spu_state.info.fps_d, dvdspu->spu_state.info.fps_n);

    /* NULL buffer was passed - use the reference frame and update the timestamp,
     * or else there's nothing to draw, and just return GST_FLOW_OK */
    if (dvdspu->ref_frame == NULL) {
      dvdspu->video_seg.position = next_ts;
      goto no_ref_frame;
    }

    buf = gst_buffer_copy (dvdspu->ref_frame);

#if 0
    g_print ("Duping frame %" GST_TIME_FORMAT " with new TS %" GST_TIME_FORMAT
        "\n", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (next_ts));
#endif

    GST_BUFFER_TIMESTAMP (buf) = next_ts;
    using_ref = TRUE;
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    dvdspu->video_seg.position = GST_BUFFER_TIMESTAMP (buf);
  }

  new_ts = gst_segment_to_running_time (&dvdspu->video_seg, GST_FORMAT_TIME,
      dvdspu->video_seg.position);

#if 0
  g_print ("TS %" GST_TIME_FORMAT " running: %" GST_TIME_FORMAT "\n",
      GST_TIME_ARGS (dvdspu->video_seg.position), GST_TIME_ARGS (new_ts));
#endif

  gst_dvd_spu_advance_spu (dvdspu, new_ts);

  /* If we have an active SPU command set, we store a copy of the frame in case
   * we hit a still and need to draw on it. Otherwise, a reference is
   * sufficient in case we later encounter a still */
  if ((dvdspu->spu_state.flags & SPU_STATE_FORCED_DSP) ||
      ((dvdspu->spu_state.flags & SPU_STATE_FORCED_ONLY) == 0 &&
          (dvdspu->spu_state.flags & SPU_STATE_DISPLAY))) {
    if (using_ref == FALSE) {
      GstBuffer *copy;

      /* Take a copy in case we hit a still frame and need the pristine 
       * frame around */
      copy = gst_buffer_copy (buf);
      gst_buffer_replace (&dvdspu->ref_frame, copy);
      gst_buffer_unref (copy);
    }

    /* Render the SPU overlay onto the buffer */
    buf = gst_buffer_make_writable (buf);

    gstspu_render (dvdspu, buf);
  } else {
    if (using_ref == FALSE) {
      /* Not going to draw anything on this frame, just store a reference
       * in case we hit a still frame and need it */
      gst_buffer_replace (&dvdspu->ref_frame, buf);
    }
  }

  if (dvdspu->spu_state.flags & SPU_STATE_STILL_FRAME) {
    GST_DEBUG_OBJECT (dvdspu, "Outputting buffer with TS %" GST_TIME_FORMAT
        "from chain while in still",
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
  }

  DVD_SPU_UNLOCK (dvdspu);

  /* just push out the incoming buffer without touching it */
  ret = gst_pad_push (dvdspu->srcpad, buf);

  return ret;

no_ref_frame:

  DVD_SPU_UNLOCK (dvdspu);

  return GST_FLOW_OK;
}
示例#4
0
void test_reuse()
{
  GstElement *audioresample;
  GstEvent *newseg;
  GstBuffer *inbuffer;
  GstCaps *caps;
  xmlfile = "test_reuse";
std_log(LOG_FILENAME_LINE, "Test Started test_reuse");
  audioresample = setup_audioresample (1, 9343, 48000, 16, FALSE);
  caps = gst_pad_get_negotiated_caps (mysrcpad);
  fail_unless (gst_caps_is_fixed (caps));

  fail_unless (gst_element_set_state (audioresample,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");

  newseg = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0);
  fail_unless (gst_pad_push_event (mysrcpad, newseg) != FALSE);

  inbuffer = gst_buffer_new_and_alloc (9343 * 4);
  memset (GST_BUFFER_DATA (inbuffer), 0, GST_BUFFER_SIZE (inbuffer));
  GST_BUFFER_DURATION (inbuffer) = GST_SECOND;
  GST_BUFFER_TIMESTAMP (inbuffer) = 0;
  GST_BUFFER_OFFSET (inbuffer) = 0;
  gst_buffer_set_caps (inbuffer, caps);

  /* pushing gives away my reference ... */
  fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);

  /* ... but it ends up being collected on the global buffer list */
  fail_unless_equals_int (g_list_length (buffers), 1);

  /* now reset and try again ... */
  fail_unless (gst_element_set_state (audioresample,
          GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS, "could not set to NULL");

  fail_unless (gst_element_set_state (audioresample,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");

  newseg = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0);
  fail_unless (gst_pad_push_event (mysrcpad, newseg) != FALSE);

  inbuffer = gst_buffer_new_and_alloc (9343 * 4);
  memset (GST_BUFFER_DATA (inbuffer), 0, GST_BUFFER_SIZE (inbuffer));
  GST_BUFFER_DURATION (inbuffer) = GST_SECOND;
  GST_BUFFER_TIMESTAMP (inbuffer) = 0;
  GST_BUFFER_OFFSET (inbuffer) = 0;
  gst_buffer_set_caps (inbuffer, caps);

  fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);

  /* ... it also ends up being collected on the global buffer list. If we
   * now have more than 2 buffers, then audioresample probably didn't clean
   * up its internal buffer properly and tried to push the remaining samples
   * when it got the second NEWSEGMENT event */
  fail_unless_equals_int (g_list_length (buffers), 2);

  cleanup_audioresample (audioresample);
  gst_caps_unref (caps);
  
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
示例#5
0
static GstFlowReturn
gst_ffmpegmux_collected (GstCollectPads * pads, gpointer user_data)
{
  GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) user_data;
  GSList *collected;
  GstFFMpegMuxPad *best_pad;
  GstClockTime best_time;
#if 0
  /* Re-enable once converted to new AVMetaData API
   * See #566605
   */
  const GstTagList *tags;
#endif

  /* open "file" (gstreamer protocol to next element) */
  if (!ffmpegmux->opened) {
    int open_flags = AVIO_FLAG_WRITE;

    /* we do need all streams to have started capsnego,
     * or things will go horribly wrong */
    for (collected = ffmpegmux->collect->data; collected;
        collected = g_slist_next (collected)) {
      GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data;
      AVStream *st = ffmpegmux->context->streams[collect_pad->padnum];

      /* check whether the pad has successfully completed capsnego */
      if (st->codec->codec_id == AV_CODEC_ID_NONE) {
        GST_ELEMENT_ERROR (ffmpegmux, CORE, NEGOTIATION, (NULL),
            ("no caps set on stream %d (%s)", collect_pad->padnum,
                (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) ?
                "video" : "audio"));
        return GST_FLOW_ERROR;
      }
      /* set framerate for audio */
      if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
        switch (st->codec->codec_id) {
          case AV_CODEC_ID_PCM_S16LE:
          case AV_CODEC_ID_PCM_S16BE:
          case AV_CODEC_ID_PCM_U16LE:
          case AV_CODEC_ID_PCM_U16BE:
          case AV_CODEC_ID_PCM_S8:
          case AV_CODEC_ID_PCM_U8:
            st->codec->frame_size = 1;
            break;
          default:
          {
            GstBuffer *buffer;

            /* FIXME : This doesn't work for RAW AUDIO...
             * in fact I'm wondering if it even works for any kind of audio... */
            buffer = gst_collect_pads_peek (ffmpegmux->collect,
                (GstCollectData *) collect_pad);
            if (buffer) {
              st->codec->frame_size =
                  st->codec->sample_rate *
                  GST_BUFFER_DURATION (buffer) / GST_SECOND;
              gst_buffer_unref (buffer);
            }
          }
        }
      }
    }

#if 0
    /* Re-enable once converted to new AVMetaData API
     * See #566605
     */

    /* tags */
    tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (ffmpegmux));
    if (tags) {
      gint i;
      gchar *s;

      /* get the interesting ones */
      if (gst_tag_list_get_string (tags, GST_TAG_TITLE, &s)) {
        strncpy (ffmpegmux->context->title, s,
            sizeof (ffmpegmux->context->title));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_ARTIST, &s)) {
        strncpy (ffmpegmux->context->author, s,
            sizeof (ffmpegmux->context->author));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_COPYRIGHT, &s)) {
        strncpy (ffmpegmux->context->copyright, s,
            sizeof (ffmpegmux->context->copyright));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_COMMENT, &s)) {
        strncpy (ffmpegmux->context->comment, s,
            sizeof (ffmpegmux->context->comment));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_ALBUM, &s)) {
        strncpy (ffmpegmux->context->album, s,
            sizeof (ffmpegmux->context->album));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_GENRE, &s)) {
        strncpy (ffmpegmux->context->genre, s,
            sizeof (ffmpegmux->context->genre));
      }
      if (gst_tag_list_get_int (tags, GST_TAG_TRACK_NUMBER, &i)) {
        ffmpegmux->context->track = i;
      }
    }
#endif

    /* set the streamheader flag for gstffmpegprotocol if codec supports it */
    if (!strcmp (ffmpegmux->context->oformat->name, "flv")) {
      open_flags |= GST_FFMPEG_URL_STREAMHEADER;
    }

    /* some house-keeping for downstream before starting data flow */
    /* stream-start (FIXME: create id based on input ids) */
    {
      gchar s_id[32];

      g_snprintf (s_id, sizeof (s_id), "avmux-%08x", g_random_int ());
      gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_stream_start (s_id));
    }
    /* segment */
    {
      GstSegment segment;

      /* let downstream know we think in BYTES and expect to do seeking later on */
      gst_segment_init (&segment, GST_FORMAT_BYTES);
      gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_segment (&segment));
    }

    if (gst_ffmpegdata_open (ffmpegmux->srcpad, open_flags,
            &ffmpegmux->context->pb) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, TOO_LAZY, (NULL),
          ("Failed to open stream context in avmux"));
      return GST_FLOW_ERROR;
    }

    /* now open the mux format */
    if (avformat_write_header (ffmpegmux->context, NULL) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, SETTINGS, (NULL),
          ("Failed to write file header - check codec settings"));
      return GST_FLOW_ERROR;
    }

    /* we're now opened */
    ffmpegmux->opened = TRUE;

    /* flush the header so it will be used as streamheader */
    avio_flush (ffmpegmux->context->pb);
  }

  /* take the one with earliest timestamp,
   * and push it forward */
  best_pad = NULL;
  best_time = GST_CLOCK_TIME_NONE;
  for (collected = ffmpegmux->collect->data; collected;
      collected = g_slist_next (collected)) {
    GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data;
    GstBuffer *buffer = gst_collect_pads_peek (ffmpegmux->collect,
        (GstCollectData *) collect_pad);

    /* if there's no buffer, just continue */
    if (buffer == NULL) {
      continue;
    }

    /* if we have no buffer yet, just use the first one */
    if (best_pad == NULL) {
      best_pad = collect_pad;
      best_time = GST_BUFFER_TIMESTAMP (buffer);
      goto next_pad;
    }

    /* if we do have one, only use this one if it's older */
    if (GST_BUFFER_TIMESTAMP (buffer) < best_time) {
      best_time = GST_BUFFER_TIMESTAMP (buffer);
      best_pad = collect_pad;
    }

  next_pad:
    gst_buffer_unref (buffer);

    /* Mux buffers with invalid timestamp first */
    if (!GST_CLOCK_TIME_IS_VALID (best_time))
      break;
  }

  /* now handle the buffer, or signal EOS if we have
   * no buffers left */
  if (best_pad != NULL) {
    GstBuffer *buf;
    AVPacket pkt;
    gboolean need_free = FALSE;
    GstMapInfo map;

    /* push out current buffer */
    buf =
        gst_collect_pads_pop (ffmpegmux->collect, (GstCollectData *) best_pad);

    ffmpegmux->context->streams[best_pad->padnum]->codec->frame_number++;

    /* set time */
    pkt.pts = gst_ffmpeg_time_gst_to_ff (GST_BUFFER_TIMESTAMP (buf),
        ffmpegmux->context->streams[best_pad->padnum]->time_base);
    pkt.dts = pkt.pts;

    if (strcmp (ffmpegmux->context->oformat->name, "gif") == 0) {
      AVStream *st = ffmpegmux->context->streams[best_pad->padnum];
      AVPicture src, dst;

      need_free = TRUE;
      pkt.size = st->codec->width * st->codec->height * 3;
      pkt.data = g_malloc (pkt.size);

      dst.data[0] = pkt.data;
      dst.data[1] = NULL;
      dst.data[2] = NULL;
      dst.linesize[0] = st->codec->width * 3;

      gst_buffer_map (buf, &map, GST_MAP_READ);
      gst_ffmpeg_avpicture_fill (&src, map.data,
          AV_PIX_FMT_RGB24, st->codec->width, st->codec->height);

      av_picture_copy (&dst, &src, AV_PIX_FMT_RGB24,
          st->codec->width, st->codec->height);
      gst_buffer_unmap (buf, &map);
    } else {
      gst_buffer_map (buf, &map, GST_MAP_READ);
      pkt.data = map.data;
      pkt.size = map.size;
    }

    pkt.stream_index = best_pad->padnum;
    pkt.flags = 0;

    if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT))
      pkt.flags |= AV_PKT_FLAG_KEY;

    if (GST_BUFFER_DURATION_IS_VALID (buf))
      pkt.duration =
          gst_ffmpeg_time_gst_to_ff (GST_BUFFER_DURATION (buf),
          ffmpegmux->context->streams[best_pad->padnum]->time_base);
    else
      pkt.duration = 0;
    av_write_frame (ffmpegmux->context, &pkt);
    if (need_free) {
      g_free (pkt.data);
    } else {
      gst_buffer_unmap (buf, &map);
    }
    gst_buffer_unref (buf);
  } else {
    /* close down */
    av_write_trailer (ffmpegmux->context);
    ffmpegmux->opened = FALSE;
    avio_flush (ffmpegmux->context->pb);
    gst_ffmpegdata_close (ffmpegmux->context->pb);
    gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_eos ());
    return GST_FLOW_EOS;
  }

  return GST_FLOW_OK;
}
示例#6
0
static GstFlowReturn
gst_bml_transform_transform_mono_to_stereo (GstBaseTransform * base,
    GstBuffer * inbuf, GstBuffer * outbuf)
{
  GstMapInfo infoi, infoo;
  GstBMLTransform *bml_transform = GST_BML_TRANSFORM (base);
  GstBMLTransformClass *klass = GST_BML_TRANSFORM_GET_CLASS (bml_transform);
  GstBML *bml = GST_BML (bml_transform);
  GstBMLClass *bml_class = GST_BML_CLASS (klass);
  BMLData *datai, *datao, *seg_datai, *seg_datao;
  gpointer bm = bml->bm;
  guint todo, seg_size, samples_per_buffer;
  gboolean has_data;
  guint mode = 3;               /*WM_READWRITE */

  bml->running_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME,
      GST_BUFFER_TIMESTAMP (inbuf));

  if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_DISCONT)) {
    bml->subtick_count = (!bml->reverse) ? bml->subticks_per_tick : 1;
  }

  if (bml->subtick_count >= bml->subticks_per_tick) {
    bml (gstbml_reset_triggers (bml, bml_class));
    bml (gstbml_sync_values (bml, bml_class, GST_BUFFER_TIMESTAMP (outbuf)));
    bml (tick (bm));
    bml->subtick_count = 1;
  } else {
    bml->subtick_count++;
  }

  /* don't process data in passthrough-mode */
  if (gst_base_transform_is_passthrough (base)) {
    // we would actually need to convert mono to stereo here
    // but this is not even called
    GST_WARNING_OBJECT (bml_transform, "m2s in passthrough mode");
    //return GST_FLOW_OK;
  }

  if (!gst_buffer_map (inbuf, &infoi, GST_MAP_READ)) {
    GST_WARNING_OBJECT (base, "unable to map input buffer for read");
    return GST_FLOW_ERROR;
  }
  datai = (BMLData *) infoi.data;
  samples_per_buffer = infoi.size / sizeof (BMLData);
  if (!gst_buffer_map (outbuf, &infoo, GST_MAP_READ | GST_MAP_WRITE)) {
    GST_WARNING_OBJECT (base, "unable to map output buffer for read & write");
    return GST_FLOW_ERROR;
  }
  datao = (BMLData *) infoo.data;

  // some buzzmachines expect a cleared buffer
  //for(i=0;i<samples_per_buffer*2;i++) datao[i]=0.0f;
  memset (datao, 0, samples_per_buffer * 2 * sizeof (BMLData));

  /* if buffer has only silence process with different mode */
  if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_GAP)) {
    mode = 2;                   /* WM_WRITE */
  } else {
    gfloat fc = 32768.0;
    orc_scalarmultiply_f32_ns (datai, datai, fc, samples_per_buffer);
  }

  GST_DEBUG_OBJECT (bml_transform, "  calling work_m2s(%d,%d)",
      samples_per_buffer, mode);
  todo = samples_per_buffer;
  seg_datai = datai;
  seg_datao = datao;
  has_data = FALSE;
  while (todo) {
    // 256 is MachineInterface.h::MAX_BUFFER_LENGTH
    seg_size = (todo > 256) ? 256 : todo;
    has_data |= bml (work_m2s (bm, seg_datai, seg_datao, (int) seg_size, mode));
    seg_datai = &seg_datai[seg_size];
    seg_datao = &seg_datao[seg_size * 2];
    todo -= seg_size;
  }
  if (gstbml_fix_data ((GstElement *) bml_transform, &infoo, has_data)) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
  } else {
    GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP);
  }

  gst_buffer_unmap (inbuf, &infoi);
  gst_buffer_unmap (outbuf, &infoo);
  return GST_FLOW_OK;
}
示例#7
0
static GstFlowReturn
gst_rtp_asf_pay_handle_packet (GstRtpAsfPay * rtpasfpay, GstBuffer * buffer)
{
    GstRTPBasePayload *rtppay;
    GstAsfPacketInfo *packetinfo;
    guint8 flags;
    guint8 *data;
    guint32 packet_util_size;
    guint32 packet_offset;
    guint32 size_left;
    GstFlowReturn ret = GST_FLOW_OK;

    rtppay = GST_RTP_BASE_PAYLOAD (rtpasfpay);
    packetinfo = &rtpasfpay->packetinfo;

    if (!gst_asf_parse_packet (buffer, packetinfo, TRUE,
                               rtpasfpay->asfinfo.packet_size)) {
        GST_ERROR_OBJECT (rtpasfpay, "Error while parsing asf packet");
        gst_buffer_unref (buffer);
        return GST_FLOW_ERROR;
    }

    if (packetinfo->packet_size == 0)
        packetinfo->packet_size = rtpasfpay->asfinfo.packet_size;

    GST_LOG_OBJECT (rtpasfpay, "Packet size: %" G_GUINT32_FORMAT
                    ", padding: %" G_GUINT32_FORMAT, packetinfo->packet_size,
                    packetinfo->padding);

    /* update padding field to 0 */
    if (packetinfo->padding > 0) {
        GstAsfPacketInfo info;
        /* find padding field offset */
        guint offset = packetinfo->err_cor_len + 2 +
                       gst_asf_get_var_size_field_len (packetinfo->packet_field_type) +
                       gst_asf_get_var_size_field_len (packetinfo->seq_field_type);
        buffer = gst_buffer_make_writable (buffer);
        switch (packetinfo->padd_field_type) {
        case ASF_FIELD_TYPE_DWORD:
            gst_buffer_memset (buffer, offset, 0, 4);
            break;
        case ASF_FIELD_TYPE_WORD:
            gst_buffer_memset (buffer, offset, 0, 2);
            break;
        case ASF_FIELD_TYPE_BYTE:
            gst_buffer_memset (buffer, offset, 0, 1);
            break;
        case ASF_FIELD_TYPE_NONE:
        default:
            break;
        }
        gst_asf_parse_packet (buffer, &info, FALSE, 0);
    }

    if (packetinfo->padding != 0)
        packet_util_size = rtpasfpay->asfinfo.packet_size - packetinfo->padding;
    else
        packet_util_size = packetinfo->packet_size;
    packet_offset = 0;
    while (packet_util_size > 0) {
        /* Even if we don't fill completely an output buffer we
         * push it when we add an fragment. Because it seems that
         * it is not possible to determine where a asf packet
         * fragment ends inside a rtp packet payload.
         * This flag tells us to push the packet.
         */
        gboolean force_push = FALSE;
        GstRTPBuffer rtp;

        /* we have no output buffer pending, create one */
        if (rtpasfpay->current == NULL) {
            GST_LOG_OBJECT (rtpasfpay, "Creating new output buffer");
            rtpasfpay->current =
                gst_rtp_buffer_new_allocate_len (GST_RTP_BASE_PAYLOAD_MTU (rtpasfpay),
                                                 0, 0);
            rtpasfpay->cur_off = 0;
            rtpasfpay->has_ts = FALSE;
            rtpasfpay->marker = FALSE;
        }
        gst_rtp_buffer_map (rtpasfpay->current, GST_MAP_READWRITE, &rtp);
        data = gst_rtp_buffer_get_payload (&rtp);
        data += rtpasfpay->cur_off;
        size_left = gst_rtp_buffer_get_payload_len (&rtp) - rtpasfpay->cur_off;

        GST_DEBUG_OBJECT (rtpasfpay, "Input buffer bytes consumed: %"
                          G_GUINT32_FORMAT "/%" G_GSIZE_FORMAT, packet_offset,
                          gst_buffer_get_size (buffer));

        GST_DEBUG_OBJECT (rtpasfpay, "Output rtpbuffer status");
        GST_DEBUG_OBJECT (rtpasfpay, "Current offset: %" G_GUINT32_FORMAT,
                          rtpasfpay->cur_off);
        GST_DEBUG_OBJECT (rtpasfpay, "Size left: %" G_GUINT32_FORMAT, size_left);
        GST_DEBUG_OBJECT (rtpasfpay, "Has ts: %s",
                          rtpasfpay->has_ts ? "yes" : "no");
        if (rtpasfpay->has_ts) {
            GST_DEBUG_OBJECT (rtpasfpay, "Ts: %" G_GUINT32_FORMAT, rtpasfpay->ts);
        }

        flags = 0;
        if (packetinfo->has_keyframe) {
            flags = flags | 0x80;
        }
        flags = flags | 0x20;       /* Relative timestamp is present */

        if (!rtpasfpay->has_ts) {
            /* this is the first asf packet, its send time is the
             * rtp packet timestamp */
            rtpasfpay->has_ts = TRUE;
            rtpasfpay->ts = packetinfo->send_time;
        }

        if (size_left >= packet_util_size + 8) {
            /* enough space for the rest of the packet */
            if (packet_offset == 0) {
                flags = flags | 0x40;
                GST_WRITE_UINT24_BE (data + 1, packet_util_size);
            } else {
                GST_WRITE_UINT24_BE (data + 1, packet_offset);
                force_push = TRUE;
            }
            data[0] = flags;
            GST_WRITE_UINT32_BE (data + 4,
                                 (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts);
            gst_buffer_extract (buffer, packet_offset, data + 8, packet_util_size);

            /* updating status variables */
            rtpasfpay->cur_off += 8 + packet_util_size;
            size_left -= packet_util_size + 8;
            packet_offset += packet_util_size;
            packet_util_size = 0;
            rtpasfpay->marker = TRUE;
        } else {
            /* fragment packet */
            data[0] = flags;
            GST_WRITE_UINT24_BE (data + 1, packet_offset);
            GST_WRITE_UINT32_BE (data + 4,
                                 (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts);
            gst_buffer_extract (buffer, packet_offset, data + 8, size_left - 8);

            /* updating status variables */
            rtpasfpay->cur_off += size_left;
            packet_offset += size_left - 8;
            packet_util_size -= size_left - 8;
            size_left = 0;
            force_push = TRUE;
        }

        /* there is not enough room for any more buffers */
        if (force_push || size_left <= 8) {

            gst_rtp_buffer_set_ssrc (&rtp, rtppay->current_ssrc);
            gst_rtp_buffer_set_marker (&rtp, rtpasfpay->marker);
            gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (rtppay));
            gst_rtp_buffer_set_seq (&rtp, rtppay->seqnum + 1);
            gst_rtp_buffer_set_timestamp (&rtp, packetinfo->send_time);
            gst_rtp_buffer_unmap (&rtp);

            /* trim remaining bytes not used */
            if (size_left != 0) {
                gst_buffer_set_size (rtpasfpay->current,
                                     gst_buffer_get_size (rtpasfpay->current) - size_left);
            }

            GST_BUFFER_TIMESTAMP (rtpasfpay->current) = GST_BUFFER_TIMESTAMP (buffer);

            rtppay->seqnum++;
            rtppay->timestamp = packetinfo->send_time;

            GST_DEBUG_OBJECT (rtpasfpay, "Pushing rtp buffer");
            ret =
                gst_pad_push (GST_RTP_BASE_PAYLOAD_SRCPAD (rtppay),
                              rtpasfpay->current);
            rtpasfpay->current = NULL;
            if (ret != GST_FLOW_OK) {
                gst_buffer_unref (buffer);
                return ret;
            }
        }
    }
    gst_buffer_unref (buffer);
    return ret;
}
GstPadProbeReturn GstEnginePipeline::HandoffCallback(GstPad*,
                                                     GstPadProbeInfo* info,
                                                     gpointer self) {
  GstEnginePipeline* instance = reinterpret_cast<GstEnginePipeline*>(self);
  GstBuffer* buf = gst_pad_probe_info_get_buffer(info);

  QList<BufferConsumer*> consumers;
  {
    QMutexLocker l(&instance->buffer_consumers_mutex_);
    consumers = instance->buffer_consumers_;
  }

  for (BufferConsumer* consumer : consumers) {
    gst_buffer_ref(buf);
    consumer->ConsumeBuffer(buf, instance->id());
  }

  // Calculate the end time of this buffer so we can stop playback if it's
  // after the end time of this song.
  if (instance->end_offset_nanosec_ > 0) {
    quint64 start_time = GST_BUFFER_TIMESTAMP(buf) - instance->segment_start_;
    quint64 duration = GST_BUFFER_DURATION(buf);
    quint64 end_time = start_time + duration;

    if (end_time > instance->end_offset_nanosec_) {
      if (instance->has_next_valid_url()) {
        if (instance->next_url_ == instance->url_ &&
            instance->next_beginning_offset_nanosec_ ==
                instance->end_offset_nanosec_) {
          // The "next" song is actually the next segment of this file - so
          // cheat and keep on playing, but just tell the Engine we've moved on.
          instance->end_offset_nanosec_ = instance->next_end_offset_nanosec_;
          instance->next_url_ = QUrl();
          instance->next_beginning_offset_nanosec_ = 0;
          instance->next_end_offset_nanosec_ = 0;

          // GstEngine will try to seek to the start of the new section, but
          // we're already there so ignore it.
          instance->ignore_next_seek_ = true;
          emit instance->EndOfStreamReached(instance->id(), true);
        } else {
          // We have a next song but we can't cheat, so move to it normally.
          instance->TransitionToNext();
        }
      } else {
        // There's no next song
        emit instance->EndOfStreamReached(instance->id(), false);
      }
    }
  }

  if (instance->emit_track_ended_on_time_discontinuity_) {
    if (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT) ||
        GST_BUFFER_OFFSET(buf) < instance->last_buffer_offset_) {
      qLog(Debug) << "Buffer discontinuity - emitting EOS";
      instance->emit_track_ended_on_time_discontinuity_ = false;
      emit instance->EndOfStreamReached(instance->id(), true);
    }
  }

  instance->last_buffer_offset_ = GST_BUFFER_OFFSET(buf);

  return GST_PAD_PROBE_OK;
}
static GstBuffer *
gst_rtp_qdm2_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
  GstRtpQDM2Depay *rtpqdm2depay;
  GstBuffer *outbuf = NULL;
  guint16 seq;
  GstRTPBuffer rtp = { NULL };

  rtpqdm2depay = GST_RTP_QDM2_DEPAY (depayload);

  {
    gint payload_len;
    guint8 *payload;
    guint avail;
    guint pos = 0;

    gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
    payload_len = gst_rtp_buffer_get_payload_len (&rtp);
    if (payload_len < 3)
      goto bad_packet;

    payload = gst_rtp_buffer_get_payload (&rtp);
    seq = gst_rtp_buffer_get_seq (&rtp);
    if (G_UNLIKELY (seq != rtpqdm2depay->nextseq)) {
      GST_DEBUG ("GAP in sequence number, Resetting data !");
      /* Flush previous data */
      flush_data (rtpqdm2depay);
      /* And store new timestamp */
      rtpqdm2depay->ptimestamp = rtpqdm2depay->timestamp;
      rtpqdm2depay->timestamp = GST_BUFFER_TIMESTAMP (buf);
      /* And that previous data will be pushed at the bottom */
    }
    rtpqdm2depay->nextseq = seq + 1;

    GST_DEBUG ("Payload size %d 0x%x sequence:%d", payload_len, payload_len,
        seq);

    GST_MEMDUMP ("Incoming payload", payload, payload_len);

    while (pos < payload_len) {
      switch (payload[pos]) {
        case 0x80:{
          GST_DEBUG ("Unrecognized 0x80 marker, skipping 12 bytes");
          pos += 12;
        }
          break;
        case 0xff:
          /* HEADERS */
          GST_DEBUG ("Headers");
          /* Store the incoming timestamp */
          rtpqdm2depay->ptimestamp = rtpqdm2depay->timestamp;
          rtpqdm2depay->timestamp = GST_BUFFER_TIMESTAMP (buf);
          /* flush the internal data if needed */
          flush_data (rtpqdm2depay);
          if (G_UNLIKELY (!rtpqdm2depay->configured)) {
            guint8 *ourdata;
            GstBuffer *codecdata;
            GstMapInfo cmap;
            GstCaps *caps;

            /* First bytes are unknown */
            GST_MEMDUMP ("Header", payload + pos, 32);
            ourdata = payload + pos + 10;
            pos += 10;
            rtpqdm2depay->channs = GST_READ_UINT32_BE (payload + pos + 4);
            rtpqdm2depay->samplerate = GST_READ_UINT32_BE (payload + pos + 8);
            rtpqdm2depay->bitrate = GST_READ_UINT32_BE (payload + pos + 12);
            rtpqdm2depay->blocksize = GST_READ_UINT32_BE (payload + pos + 16);
            rtpqdm2depay->framesize = GST_READ_UINT32_BE (payload + pos + 20);
            rtpqdm2depay->packetsize = GST_READ_UINT32_BE (payload + pos + 24);
            /* 16 bit empty block (0x02 0x00) */
            pos += 30;
            GST_DEBUG
                ("channs:%d, samplerate:%d, bitrate:%d, blocksize:%d, framesize:%d, packetsize:%d",
                rtpqdm2depay->channs, rtpqdm2depay->samplerate,
                rtpqdm2depay->bitrate, rtpqdm2depay->blocksize,
                rtpqdm2depay->framesize, rtpqdm2depay->packetsize);

            /* Caps */
            codecdata = gst_buffer_new_and_alloc (48);
            gst_buffer_map (codecdata, &cmap, GST_MAP_WRITE);
            memcpy (cmap.data, headheader, 20);
            memcpy (cmap.data + 20, ourdata, 28);
            gst_buffer_unmap (codecdata, &cmap);

            caps = gst_caps_new_simple ("audio/x-qdm2",
                "samplesize", G_TYPE_INT, 16,
                "rate", G_TYPE_INT, rtpqdm2depay->samplerate,
                "channels", G_TYPE_INT, rtpqdm2depay->channs,
                "codec_data", GST_TYPE_BUFFER, codecdata, NULL);
            gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), caps);
            gst_caps_unref (caps);
            rtpqdm2depay->configured = TRUE;
          } else {
            GST_DEBUG ("Already configured, skipping headers");
            pos += 40;
          }
          break;
        default:{
          /* Shuffled packet contents */
          guint packetid = payload[pos++];
          guint packettype = payload[pos++];
          guint packlen = payload[pos++];
          guint hsize = 2;

          GST_DEBUG ("Packet id:%d, type:0x%x, len:%d",
              packetid, packettype, packlen);

          /* Packets bigger than 0xff bytes have a type with the high bit set */
          if (G_UNLIKELY (packettype & 0x80)) {
            packettype &= 0x7f;
            packlen <<= 8;
            packlen |= payload[pos++];
            hsize = 3;
            GST_DEBUG ("Packet id:%d, type:0x%x, len:%d",
                packetid, packettype, packlen);
          }

          if (packettype > 0x7f) {
            GST_ERROR ("HOUSTON WE HAVE A PROBLEM !!!!");
          }
          add_packet (rtpqdm2depay, packetid, packlen + hsize,
              payload + pos - hsize);
          pos += packlen;
        }
      }
    }

    GST_DEBUG ("final pos %d", pos);

    avail = gst_adapter_available (rtpqdm2depay->adapter);
    if (G_UNLIKELY (avail)) {
      GST_DEBUG ("Pushing out %d bytes of collected data", avail);
      outbuf = gst_adapter_take_buffer (rtpqdm2depay->adapter, avail);
      GST_BUFFER_TIMESTAMP (outbuf) = rtpqdm2depay->ptimestamp;
      GST_DEBUG ("Outgoing buffer timestamp %" GST_TIME_FORMAT,
          GST_TIME_ARGS (rtpqdm2depay->ptimestamp));
    }
  }

  gst_rtp_buffer_unmap (&rtp);
  return outbuf;

  /* ERRORS */
bad_packet:
  {
    GST_ELEMENT_WARNING (rtpqdm2depay, STREAM, DECODE,
        (NULL), ("Packet was too short"));
    gst_rtp_buffer_unmap (&rtp);
    return NULL;
  }
}
示例#10
0
static GstBuffer *
gst_rtp_dtmf_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{

  GstRtpDTMFDepay *rtpdtmfdepay = NULL;
  GstBuffer *outbuf = NULL;
  gint payload_len;
  guint8 *payload = NULL;
  guint32 timestamp;
  GstRTPDTMFPayload dtmf_payload;
  gboolean marker;
  GstStructure *structure = NULL;
  GstMessage *dtmf_message = NULL;

  rtpdtmfdepay = GST_RTP_DTMF_DEPAY (depayload);

  if (!gst_rtp_buffer_validate (buf))
    goto bad_packet;

  payload_len = gst_rtp_buffer_get_payload_len (buf);
  payload = gst_rtp_buffer_get_payload (buf);

  if (payload_len != sizeof (GstRTPDTMFPayload))
    goto bad_packet;

  memcpy (&dtmf_payload, payload, sizeof (GstRTPDTMFPayload));

  if (dtmf_payload.event > MAX_EVENT)
    goto bad_packet;


  marker = gst_rtp_buffer_get_marker (buf);

  timestamp = gst_rtp_buffer_get_timestamp (buf);

  dtmf_payload.duration = g_ntohs (dtmf_payload.duration);

  /* clip to whole units of unit_time */
  if (rtpdtmfdepay->unit_time) {
    guint unit_time_clock =
        (rtpdtmfdepay->unit_time * depayload->clock_rate) / 1000;
    if (dtmf_payload.duration % unit_time_clock) {
      /* Make sure we don't overflow the duration */
      if (dtmf_payload.duration < G_MAXUINT16 - unit_time_clock)
        dtmf_payload.duration += unit_time_clock -
            (dtmf_payload.duration % unit_time_clock);
      else
        dtmf_payload.duration -= dtmf_payload.duration % unit_time_clock;
    }
  }

  /* clip to max duration */
  if (rtpdtmfdepay->max_duration) {
    guint max_duration_clock =
        (rtpdtmfdepay->max_duration * depayload->clock_rate) / 1000;

    if (max_duration_clock < G_MAXUINT16 &&
        dtmf_payload.duration > max_duration_clock)
      dtmf_payload.duration = max_duration_clock;
  }

  GST_DEBUG_OBJECT (depayload, "Received new RTP DTMF packet : "
      "marker=%d - timestamp=%u - event=%d - duration=%d",
      marker, timestamp, dtmf_payload.event, dtmf_payload.duration);

  GST_DEBUG_OBJECT (depayload,
      "Previous information : timestamp=%u - duration=%d",
      rtpdtmfdepay->previous_ts, rtpdtmfdepay->previous_duration);

  /* First packet */
  if (marker || rtpdtmfdepay->previous_ts != timestamp) {
    rtpdtmfdepay->sample = 0;
    rtpdtmfdepay->previous_ts = timestamp;
    rtpdtmfdepay->previous_duration = dtmf_payload.duration;
    rtpdtmfdepay->first_gst_ts = GST_BUFFER_TIMESTAMP (buf);

    structure = gst_structure_new ("dtmf-event",
        "number", G_TYPE_INT, dtmf_payload.event,
        "volume", G_TYPE_INT, dtmf_payload.volume,
        "type", G_TYPE_INT, 1, "method", G_TYPE_INT, 1, NULL);
    if (structure) {
      dtmf_message =
          gst_message_new_element (GST_OBJECT (depayload), structure);
      if (dtmf_message) {
        if (!gst_element_post_message (GST_ELEMENT (depayload), dtmf_message)) {
          GST_ERROR_OBJECT (depayload,
              "Unable to send dtmf-event message to bus");
        }
      } else {
        GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event message");
      }
    } else {
      GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event structure");
    }
  } else {
    guint16 duration = dtmf_payload.duration;
    dtmf_payload.duration -= rtpdtmfdepay->previous_duration;
    /* If late buffer, ignore */
    if (duration > rtpdtmfdepay->previous_duration)
      rtpdtmfdepay->previous_duration = duration;
  }

  GST_DEBUG_OBJECT (depayload, "new previous duration : %d - new duration : %d"
      " - diff  : %d - clock rate : %d - timestamp : %" G_GUINT64_FORMAT,
      rtpdtmfdepay->previous_duration, dtmf_payload.duration,
      (rtpdtmfdepay->previous_duration - dtmf_payload.duration),
      depayload->clock_rate, GST_BUFFER_TIMESTAMP (buf));

  /* If late or duplicate packet (like the redundant end packet). Ignore */
  if (dtmf_payload.duration > 0) {
    outbuf = gst_buffer_new ();
    gst_dtmf_src_generate_tone (rtpdtmfdepay, dtmf_payload, outbuf);


    GST_BUFFER_TIMESTAMP (outbuf) = rtpdtmfdepay->first_gst_ts +
        (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
        GST_SECOND / depayload->clock_rate;
    GST_BUFFER_OFFSET (outbuf) =
        (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
        GST_SECOND / depayload->clock_rate;
    GST_BUFFER_OFFSET_END (outbuf) = rtpdtmfdepay->previous_duration *
        GST_SECOND / depayload->clock_rate;

    GST_DEBUG_OBJECT (depayload,
        "timestamp : %" G_GUINT64_FORMAT " - time %" GST_TIME_FORMAT,
        GST_BUFFER_TIMESTAMP (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

  }

  return outbuf;


bad_packet:
  GST_ELEMENT_WARNING (rtpdtmfdepay, STREAM, DECODE,
      ("Packet did not validate"), (NULL));
  return NULL;
}
示例#11
0
static MpegPsPadData *
mpegpsmux_choose_best_stream (MpegPsMux * mux)
{
  /* Choose from which stream to mux with */

  MpegPsPadData *best = NULL;
  GstCollectData *c_best = NULL;
  GSList *walk;

  for (walk = mux->collect->data; walk != NULL; walk = g_slist_next (walk)) {
    GstCollectData *c_data = (GstCollectData *) walk->data;
    MpegPsPadData *ps_data = (MpegPsPadData *) walk->data;

    if (ps_data->eos == FALSE) {
      if (ps_data->queued_buf == NULL) {
        GstBuffer *buf;

        ps_data->queued_buf = buf =
            gst_collect_pads_peek (mux->collect, c_data);

        if (buf != NULL) {
          if (ps_data->prepare_func) {
            buf = ps_data->prepare_func (buf, ps_data, mux);
            if (buf) {          /* Take the prepared buffer instead */
              gst_buffer_unref (ps_data->queued_buf);
              ps_data->queued_buf = buf;
            } else {            /* If data preparation returned NULL, use unprepared one */
              buf = ps_data->queued_buf;
            }
          }
          if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
            /* Ignore timestamps that go backward for now. FIXME: Handle all
             * incoming PTS */
            if (ps_data->last_ts == GST_CLOCK_TIME_NONE ||
                ps_data->last_ts < GST_BUFFER_TIMESTAMP (buf)) {
              ps_data->cur_ts = ps_data->last_ts =
                  gst_segment_to_running_time (&c_data->segment,
                  GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf));
            } else {
              GST_DEBUG_OBJECT (mux, "Ignoring PTS that has gone backward");
            }
          } else
            ps_data->cur_ts = GST_CLOCK_TIME_NONE;

          GST_DEBUG_OBJECT (mux, "Pulled buffer with ts %" GST_TIME_FORMAT
              " (uncorrected ts %" GST_TIME_FORMAT " %" G_GUINT64_FORMAT
              ") for PID 0x%04x",
              GST_TIME_ARGS (ps_data->cur_ts),
              GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
              GST_BUFFER_TIMESTAMP (buf), ps_data->stream_id);

          /* Choose a stream we've never seen a timestamp for to ensure
           * we push enough buffers from it to reach a timestamp */
          if (ps_data->last_ts == GST_CLOCK_TIME_NONE) {
            best = ps_data;
            c_best = c_data;
          }
        } else {
          ps_data->eos = TRUE;
          continue;
        }
      }

      /* If we don't yet have a best pad, take this one, otherwise take
       * whichever has the oldest timestamp */
      if (best != NULL) {
        if (ps_data->last_ts != GST_CLOCK_TIME_NONE &&
            best->last_ts != GST_CLOCK_TIME_NONE &&
            ps_data->last_ts < best->last_ts) {
          best = ps_data;
          c_best = c_data;
        }
      } else {
        best = ps_data;
        c_best = c_data;
      }
    }
  }
  if (c_best) {
    gst_buffer_unref (gst_collect_pads_pop (mux->collect, c_best));
  }

  return best;
}
示例#12
0
static GstFlowReturn
gst_vader_chain(GstPad * pad, GstBuffer * buf)
{
    GstVader *filter;
    gint16 *in_data;
    guint num_samples;
    gint i, vote;
    guint power, rms;

    g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR);
    g_return_val_if_fail(GST_IS_PAD(pad), GST_FLOW_ERROR);
    g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR);

    filter = GST_VADER(GST_OBJECT_PARENT(pad));
    g_return_val_if_fail(filter != NULL, GST_FLOW_ERROR);
    g_return_val_if_fail(GST_IS_VADER(filter), GST_FLOW_ERROR);

    in_data = (gint16 *) GST_BUFFER_DATA(buf);
    num_samples = GST_BUFFER_SIZE(buf) / 2;

    /* Enter a critical section. */
    g_static_rec_mutex_lock(&filter->mtx);
    filter->silent_prev = filter->silent;
    /* If we are in auto-threshold mode, check to see if we have
     * enough data to estimate a threshold.  (FIXME: we should be
     * estimating at the sample level rather than the frame level,
     * probably) */
    if (filter->threshold_level == -1) {
        if (filter->silence_frames > 5) {
            filter->silence_mean /= filter->silence_frames;
            filter->silence_stddev /= filter->silence_frames;
            filter->silence_stddev -= filter->silence_mean * filter->silence_mean;
            filter->silence_stddev = fixpoint_bogus_sqrt(filter->silence_stddev);
            /* Set threshold three standard deviations from the mean. */
            filter->threshold_level = filter->silence_mean + 3 * filter->silence_stddev;
            GST_DEBUG_OBJECT(filter, "silence_mean %d stddev %d auto_threshold %d\n",
                             filter->silence_mean, filter->silence_stddev,
                             filter->threshold_level);
        }
    }

    /* Divide buffer into reasonably sized parts. */
    for (i = 0; i < num_samples; i += VADER_FRAME) {
        gint frame_len, j;

        frame_len = MIN(num_samples - i, VADER_FRAME);
        power = compute_normed_power(in_data + i, frame_len, &filter->prior_sample);
        rms = fixpoint_sqrt_q15(power);

        /* If we are in auto-threshold mode, don't do any voting etc. */
        if (filter->threshold_level == -1) {
            filter->silence_mean += rms;
            filter->silence_stddev += rms * rms;
            filter->silence_frames += 1;
            GST_DEBUG_OBJECT(filter, "silence_mean_acc %d silence_stddev_acc %d frames %d\n",
                             filter->silence_mean, filter->silence_stddev, filter->silence_frames);
            continue;
        }
        /* Shift back window values. */
        memmove(filter->window, filter->window + 1,
                (VADER_WINDOW - 1) * sizeof(*filter->window));

        /* Decide if this buffer is silence or not. */
        if (rms > filter->threshold_level)
            filter->window[VADER_WINDOW-1] = TRUE;
        else
            filter->window[VADER_WINDOW-1] = FALSE;

        /* Vote on whether we have entered a region of non-silence. */
        vote = 0;
        for (j = 0; j < VADER_WINDOW; ++j)
            vote += filter->window[j];

        GST_DEBUG_OBJECT(filter, "frame_len %d rms power %d threshold %d vote %d\n",
                         frame_len, rms, filter->threshold_level, vote);

        if (vote > VADER_WINDOW / 2) {
            filter->silent_run_length = 0;
            filter->silent = FALSE;
        }
        else {
            filter->silent_run_length
                += gst_audio_duration_from_pad_buffer(filter->sinkpad, buf);
        }

        if (filter->silent_run_length > filter->threshold_length)
            /* it has been silent long enough, flag it */
            filter->silent = TRUE;
    }

    /* Handle transitions between silence and non-silence. */
    if (filter->silent != filter->silent_prev) {
        gst_vader_transition(filter, GST_BUFFER_TIMESTAMP(buf));
    }
    /* Handling of silence detection is done. */
    g_static_rec_mutex_unlock(&filter->mtx);

    /* now check if we have to send the new buffer to the internal buffer cache
     * or to the srcpad */
    if (filter->silent) {
        /* Claim the lock while manipulating the queue. */
        g_static_rec_mutex_lock(&filter->mtx);
        filter->pre_buffer = g_list_append(filter->pre_buffer, buf);
        filter->pre_run_length +=
            gst_audio_duration_from_pad_buffer(filter->sinkpad, buf);
        while (filter->pre_run_length > filter->pre_length) {
            GstBuffer *prebuf;

            prebuf = (g_list_first(filter->pre_buffer))->data;
            g_assert(GST_IS_BUFFER(prebuf));
            filter->pre_buffer = g_list_remove(filter->pre_buffer, prebuf);
            filter->pre_run_length -=
                gst_audio_duration_from_pad_buffer(filter->sinkpad, prebuf);
            gst_buffer_unref(prebuf);
        }
        g_static_rec_mutex_unlock(&filter->mtx);
    } else {
        if (filter->dumpfile)
            fwrite(GST_BUFFER_DATA(buf), 1, GST_BUFFER_SIZE(buf),
                   filter->dumpfile);
        gst_pad_push(filter->srcpad, buf);
    }

    return GST_FLOW_OK;
}
示例#13
0
static void
gst_vader_transition(GstVader *filter, GstClockTime ts)
{
    /* NOTE: This function MUST be called with filter->mtx held! */
    /* has the silent status changed ? if so, send right signal
     * and, if from silent -> not silent, flush pre_record buffer
     */
    if (filter->silent) {
        /* Sound to silence transition. */
        GstMessage *m =
            gst_vader_message_new(filter, FALSE, ts);
        GstEvent *e =
            gst_vader_event_new(filter, GST_EVENT_VADER_STOP, ts);
        GST_DEBUG_OBJECT(filter, "signaling CUT_STOP");
        gst_element_post_message(GST_ELEMENT(filter), m);
        /* Insert a custom event in the stream to mark the end of a cut. */
        /* This will block if the pipeline is paused so we have to unlock. */
        g_static_rec_mutex_unlock(&filter->mtx);
        gst_pad_push_event(filter->srcpad, e);
        g_static_rec_mutex_lock(&filter->mtx);
        /* FIXME: That event's timestamp is wrong... as is this one. */
        g_signal_emit(filter, gst_vader_signals[SIGNAL_VADER_STOP], 0, ts);
        /* Stop dumping audio */
        if (filter->dumpfile) {
            fclose(filter->dumpfile);
            filter->dumpfile = NULL;
            ++filter->dumpidx;
        }
    } else {
        /* Silence to sound transition. */
        gint count = 0;
        GstMessage *m;
        GstEvent *e;

        GST_DEBUG_OBJECT(filter, "signaling CUT_START");
        /* Use the first pre_buffer's timestamp for the signal if possible. */
        if (filter->pre_buffer) {
            GstBuffer *prebuf;

            prebuf = (g_list_first(filter->pre_buffer))->data;
            ts = GST_BUFFER_TIMESTAMP(prebuf);
        }

        g_signal_emit(filter, gst_vader_signals[SIGNAL_VADER_START],
                      0, ts);
        m = gst_vader_message_new(filter, TRUE, ts);
        e = gst_vader_event_new(filter, GST_EVENT_VADER_START, ts);
        gst_element_post_message(GST_ELEMENT(filter), m);

        /* Insert a custom event in the stream to mark the beginning of a cut. */
        /* This will block if the pipeline is paused so we have to unlock. */
        g_static_rec_mutex_unlock(&filter->mtx);
        gst_pad_push_event(filter->srcpad, e);
        g_static_rec_mutex_lock(&filter->mtx);

        /* Start dumping audio */
        if (filter->dumpdir) {
            gchar *filename = g_strdup_printf("%s/%08d.raw", filter->dumpdir,
                                              filter->dumpidx);
            filter->dumpfile = fopen(filename, "wb");
            g_free(filename);
        }

        /* first of all, flush current buffer */
        GST_DEBUG_OBJECT(filter, "flushing buffer of length %" GST_TIME_FORMAT,
                         GST_TIME_ARGS(filter->pre_run_length));
        while (filter->pre_buffer) {
            GstBuffer *prebuf;

            prebuf = (g_list_first(filter->pre_buffer))->data;
            filter->pre_buffer = g_list_remove(filter->pre_buffer, prebuf);
            if (filter->dumpfile)
                fwrite(GST_BUFFER_DATA(prebuf), 1, GST_BUFFER_SIZE(prebuf),
                       filter->dumpfile);
            /* This will block if the pipeline is paused so we have to unlock. */
            g_static_rec_mutex_unlock(&filter->mtx);
            gst_pad_push(filter->srcpad, prebuf);
            g_static_rec_mutex_lock(&filter->mtx);
            ++count;
        }
        GST_DEBUG_OBJECT(filter, "flushed %d buffers", count);
        filter->pre_run_length = 0;
    }
}
示例#14
0
static GstFlowReturn
audioresample_transform (GstBaseTransform * base, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstAudioresample *audioresample;
  ResampleState *r;
  guchar *data, *datacopy;
  gulong size;
  GstClockTime timestamp;

  audioresample = GST_AUDIORESAMPLE (base);
  r = audioresample->resample;

  data = GST_BUFFER_DATA (inbuf);
  size = GST_BUFFER_SIZE (inbuf);
  timestamp = GST_BUFFER_TIMESTAMP (inbuf);

  GST_LOG_OBJECT (audioresample, "transforming buffer of %ld bytes, ts %"
      GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT ", offset %"
      G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT,
      size, GST_TIME_ARGS (timestamp),
      GST_TIME_ARGS (GST_BUFFER_DURATION (inbuf)),
      GST_BUFFER_OFFSET (inbuf), GST_BUFFER_OFFSET_END (inbuf));

  /* check for timestamp discontinuities and flush/reset if needed */
  if (G_UNLIKELY (audioresample_check_discont (audioresample, timestamp))) {
    /* Flush internal samples */
    audioresample_pushthrough (audioresample);
    /* Inform downstream element about discontinuity */
    audioresample->need_discont = TRUE;
    /* We want to recalculate the offset */
    audioresample->ts_offset = -1;
  }

  if (audioresample->ts_offset == -1) {
    /* if we don't know the initial offset yet, calculate it based on the 
     * input timestamp. */
    if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
      GstClockTime stime;

      /* offset used to calculate the timestamps. We use the sample offset for
       * this to make it more accurate. We want the first buffer to have the
       * same timestamp as the incoming timestamp. */
      audioresample->next_ts = timestamp;
      audioresample->ts_offset =
          gst_util_uint64_scale_int (timestamp, r->o_rate, GST_SECOND);
      /* offset used to set as the buffer offset, this offset is always
       * relative to the stream time, note that timestamp is not... */
      stime = (timestamp - base->segment.start) + base->segment.time;
      audioresample->offset =
          gst_util_uint64_scale_int (stime, r->o_rate, GST_SECOND);
    }
  }
  audioresample->prev_ts = timestamp;
  audioresample->prev_duration = GST_BUFFER_DURATION (inbuf);

  /* need to memdup, resample takes ownership. */
  datacopy = g_memdup (data, size);
  resample_add_input_data (r, datacopy, size, g_free, datacopy);

  return audioresample_do_output (audioresample, outbuf);
}
示例#15
0
static GstFlowReturn
gst_a52dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstA52Dec *a52dec = GST_A52DEC (parent);
  GstFlowReturn ret = GST_FLOW_OK;
  gint first_access;

  if (a52dec->dvdmode) {
    gsize size;
    guint8 data[2];
    gint offset;
    gint len;
    GstBuffer *subbuf;

    size = gst_buffer_get_size (buf);
    if (size < 2)
      goto not_enough_data;

    gst_buffer_extract (buf, 0, data, 2);
    first_access = (data[0] << 8) | data[1];

    /* Skip the first_access header */
    offset = 2;

    if (first_access > 1) {
      /* Length of data before first_access */
      len = first_access - 1;

      if (len <= 0 || offset + len > size)
        goto bad_first_access_parameter;

      subbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, len);
      GST_BUFFER_TIMESTAMP (subbuf) = GST_CLOCK_TIME_NONE;
      ret = a52dec->base_chain (pad, parent, subbuf);
      if (ret != GST_FLOW_OK) {
        gst_buffer_unref (buf);
        goto done;
      }

      offset += len;
      len = size - offset;

      if (len > 0) {
        subbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, len);
        GST_BUFFER_TIMESTAMP (subbuf) = GST_BUFFER_TIMESTAMP (buf);

        ret = a52dec->base_chain (pad, parent, subbuf);
      }
      gst_buffer_unref (buf);
    } else {
      /* first_access = 0 or 1, so if there's a timestamp it applies to the first byte */
      subbuf =
          gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset,
          size - offset);
      GST_BUFFER_TIMESTAMP (subbuf) = GST_BUFFER_TIMESTAMP (buf);
      gst_buffer_unref (buf);
      ret = a52dec->base_chain (pad, parent, subbuf);
    }
  } else {
    ret = a52dec->base_chain (pad, parent, buf);
  }

done:
  return ret;

/* ERRORS */
not_enough_data:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (a52dec), STREAM, DECODE, (NULL),
        ("Insufficient data in buffer. Can't determine first_acess"));
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
bad_first_access_parameter:
  {
    GST_ELEMENT_ERROR (GST_ELEMENT (a52dec), STREAM, DECODE, (NULL),
        ("Bad first_access parameter (%d) in buffer", first_access));
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
}
示例#16
0
static GstFlowReturn
gst_rdt_manager_chain_rdt (GstPad * pad, GstBuffer * buffer)
{
  GstFlowReturn res;
  GstRDTManager *rdtmanager;
  GstRDTManagerSession *session;
  GstClockTime timestamp;
  GstRDTPacket packet;
  guint32 ssrc;
  guint8 pt;
  gboolean more;

  rdtmanager = GST_RDT_MANAGER (GST_PAD_PARENT (pad));

  GST_DEBUG_OBJECT (rdtmanager, "got RDT packet");

  ssrc = 0;
  pt = 0;

  GST_DEBUG_OBJECT (rdtmanager, "SSRC %08x, PT %d", ssrc, pt);

  /* find session */
  session = gst_pad_get_element_private (pad);

  /* see if we have the pad */
  if (!session->active) {
    activate_session (rdtmanager, session, ssrc, pt);
    session->active = TRUE;
  }

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG_OBJECT (rdtmanager, "received discont");
    session->discont = TRUE;
  }

  res = GST_FLOW_OK;

  /* take the timestamp of the buffer. This is the time when the packet was
   * received and is used to calculate jitter and clock skew. We will adjust
   * this timestamp with the smoothed value after processing it in the
   * jitterbuffer. */
  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  /* bring to running time */
  timestamp = gst_segment_to_running_time (&session->segment, GST_FORMAT_TIME,
      timestamp);

  more = gst_rdt_buffer_get_first_packet (buffer, &packet);
  while (more) {
    GstRDTType type;

    type = gst_rdt_packet_get_type (&packet);
    GST_DEBUG_OBJECT (rdtmanager, "Have packet of type %04x", type);

    if (GST_RDT_IS_DATA_TYPE (type)) {
      GST_DEBUG_OBJECT (rdtmanager, "We have a data packet");
      res = gst_rdt_manager_handle_data_packet (session, timestamp, &packet);
    } else {
      switch (type) {
        default:
          GST_DEBUG_OBJECT (rdtmanager, "Ignoring packet");
          break;
      }
    }
    if (res != GST_FLOW_OK)
      break;

    more = gst_rdt_packet_move_to_next (&packet);
  }

  gst_buffer_unref (buffer);

  return res;
}
示例#17
0
static GstFlowReturn
gst_bml_transform_transform_ip_stereo (GstBaseTransform * base,
    GstBuffer * outbuf)
{
  GstMapInfo info;
  GstBMLTransform *bml_transform = GST_BML_TRANSFORM (base);
  GstBMLTransformClass *klass = GST_BML_TRANSFORM_GET_CLASS (bml_transform);
  GstBML *bml = GST_BML (bml_transform);
  GstBMLClass *bml_class = GST_BML_CLASS (klass);
  BMLData *data, *seg_data;
  gpointer bm = bml->bm;
  guint todo, seg_size, samples_per_buffer;
  gboolean has_data;
  guint mode = 3;               /*WM_READWRITE */

  bml->running_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME,
      GST_BUFFER_TIMESTAMP (outbuf));

  if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_DISCONT)) {
    bml->subtick_count = (!bml->reverse) ? bml->subticks_per_tick : 1;
  }

  /* TODO(ensonic): sync on subticks ? */
  if (bml->subtick_count >= bml->subticks_per_tick) {
    bml (gstbml_reset_triggers (bml, bml_class));
    bml (gstbml_sync_values (bml, bml_class, GST_BUFFER_TIMESTAMP (outbuf)));
    bml (tick (bm));
    bml->subtick_count = 1;
  } else {
    bml->subtick_count++;
  }

  /* don't process data in passthrough-mode */
  if (gst_base_transform_is_passthrough (base))
    return GST_FLOW_OK;

  if (!gst_buffer_map (outbuf, &info, GST_MAP_READ | GST_MAP_WRITE)) {
    GST_WARNING_OBJECT (base, "unable to map buffer for read & write");
    return GST_FLOW_ERROR;
  }
  data = (BMLData *) info.data;
  samples_per_buffer = info.size / (sizeof (BMLData) * 2);

  /* if buffer has only silence process with different mode */
  if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_GAP)) {
    mode = 2;                   /* WM_WRITE */
  } else {
    gfloat fc = 32768.0;
    orc_scalarmultiply_f32_ns (data, data, fc, samples_per_buffer * 2);
  }

  GST_DEBUG_OBJECT (bml_transform, "  calling work_m2s(%d,%d)",
      samples_per_buffer, mode);
  todo = samples_per_buffer;
  seg_data = data;
  has_data = FALSE;
  while (todo) {
    // 256 is MachineInterface.h::MAX_BUFFER_LENGTH
    seg_size = (todo > 256) ? 256 : todo;
    // first seg_data can be NULL, its ignored
    has_data |= bml (work_m2s (bm, seg_data, seg_data, (int) seg_size, mode));
    seg_data = &seg_data[seg_size * 2];
    todo -= seg_size;
  }
  if (gstbml_fix_data ((GstElement *) bml_transform, &info, has_data)) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
  } else {
    GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP);
  }

  gst_buffer_unmap (outbuf, &info);

  return GST_FLOW_OK;
}
/**
 * gst_basertppayload_push:
 * @payload: a #GstBaseRTPPayload
 * @buffer: a #GstBuffer
 *
 * Push @buffer to the peer element of the payloader. The SSRC, payload type,
 * seqnum and timestamp of the RTP buffer will be updated first.
 * 
 * This function takes ownership of @buffer.
 *
 * Returns: a #GstFlowReturn.
 */
GstFlowReturn
gst_basertppayload_push (GstBaseRTPPayload * payload, GstBuffer * buffer)
{
  GstFlowReturn res;
  GstClockTime timestamp;
  guint32 rtptime;
  GstBaseRTPPayloadPrivate *priv;

  if (payload->clock_rate == 0)
    goto no_rate;

  priv = payload->priv;

  gst_rtp_buffer_set_ssrc (buffer, payload->current_ssrc);

  gst_rtp_buffer_set_payload_type (buffer, payload->pt);

  /* update first, so that the property is set to the last
   * seqnum pushed */
  payload->seqnum = priv->next_seqnum;
  gst_rtp_buffer_set_seq (buffer, payload->seqnum);

  /* can wrap around, which is perfectly fine */
  priv->next_seqnum++;

  /* add our random offset to the timestamp */
  rtptime = payload->ts_base;

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
    gint64 rtime;

    rtime = gst_segment_to_running_time (&payload->segment, GST_FORMAT_TIME,
        timestamp);

    rtime = gst_util_uint64_scale_int (rtime, payload->clock_rate, GST_SECOND);

    /* add running_time in clock-rate units to the base timestamp */
    rtptime += rtime;
  } else {
    /* no timestamp to convert, take previous timestamp */
    rtptime = payload->timestamp;
  }
  gst_rtp_buffer_set_timestamp (buffer, rtptime);

  payload->timestamp = rtptime;

  /* set caps */
  gst_buffer_set_caps (buffer, GST_PAD_CAPS (payload->srcpad));

  GST_LOG_OBJECT (payload,
      "Pushing packet size %d, seq=%d, rtptime=%u, timestamp %" GST_TIME_FORMAT,
      GST_BUFFER_SIZE (buffer), payload->seqnum, rtptime,
      GST_TIME_ARGS (timestamp));

  res = gst_pad_push (payload->srcpad, buffer);

  return res;

  /* ERRORS */
no_rate:
  {
    GST_ELEMENT_ERROR (payload, STREAM, NOT_IMPLEMENTED, (NULL),
        ("subclass did not specify clock-rate"));
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
}
示例#19
0
static GstFlowReturn
gst_mulawdec_chain (GstPad * pad, GstBuffer * buffer)
{
  GstMuLawDec *mulawdec;
  gint16 *linear_data;
  guint8 *mulaw_data;
  guint mulaw_size;
  GstBuffer *outbuf;
  GstFlowReturn ret;

  mulawdec = GST_MULAWDEC (GST_PAD_PARENT (pad));

  if (G_UNLIKELY (mulawdec->rate == 0))
    goto not_negotiated;

  mulaw_data = (guint8 *) GST_BUFFER_DATA (buffer);
  mulaw_size = GST_BUFFER_SIZE (buffer);

  ret =
      gst_pad_alloc_buffer_and_set_caps (mulawdec->srcpad,
      GST_BUFFER_OFFSET_NONE, mulaw_size * 2, GST_PAD_CAPS (mulawdec->srcpad),
      &outbuf);
  if (ret != GST_FLOW_OK)
    goto alloc_failed;

  linear_data = (gint16 *) GST_BUFFER_DATA (outbuf);

  /* copy discont flag */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);

  GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
  if (GST_BUFFER_DURATION (outbuf) == GST_CLOCK_TIME_NONE)
    GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (GST_SECOND,
        mulaw_size * 2, 2 * mulawdec->rate * mulawdec->channels);
  else
    GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buffer);
  gst_buffer_set_caps (outbuf, GST_PAD_CAPS (mulawdec->srcpad));

  mulaw_decode (mulaw_data, linear_data, mulaw_size);

  gst_buffer_unref (buffer);

  ret = gst_pad_push (mulawdec->srcpad, outbuf);

  return ret;

  /* ERRORS */
not_negotiated:
  {
    GST_WARNING_OBJECT (mulawdec, "no input format set: not-negotiated");
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_NEGOTIATED;
  }
alloc_failed:
  {
    GST_DEBUG_OBJECT (mulawdec, "pad alloc failed, flow: %s",
        gst_flow_get_name (ret));
    gst_buffer_unref (buffer);
    return ret;
  }
}
示例#20
0
static GstFlowReturn
gst_audio_segment_clip_clip_buffer (GstSegmentClip * base, GstBuffer * buffer,
    GstBuffer ** outbuf)
{
  GstAudioSegmentClip *self = GST_AUDIO_SEGMENT_CLIP (base);
  GstSegment *segment = &base->segment;
  GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
  GstClockTime duration = GST_BUFFER_DURATION (buffer);
  guint64 offset = GST_BUFFER_OFFSET (buffer);
  guint64 offset_end = GST_BUFFER_OFFSET_END (buffer);
  guint size = GST_BUFFER_SIZE (buffer);

  if (!self->rate || !self->framesize) {
    GST_ERROR_OBJECT (self, "Not negotiated yet");
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  if (segment->format != GST_FORMAT_DEFAULT &&
      segment->format != GST_FORMAT_TIME) {
    GST_DEBUG_OBJECT (self, "Unsupported segment format %s",
        gst_format_get_name (segment->format));
    *outbuf = buffer;
    return GST_FLOW_OK;
  }

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    GST_WARNING_OBJECT (self, "Buffer without valid timestamp");
    *outbuf = buffer;
    return GST_FLOW_OK;
  }

  *outbuf =
      gst_audio_buffer_clip (buffer, segment, self->rate, self->framesize);

  if (!*outbuf) {
    GST_DEBUG_OBJECT (self, "Buffer outside the configured segment");

    /* Now return unexpected if we're before/after the end */
    if (segment->format == GST_FORMAT_TIME) {
      if (segment->rate >= 0) {
        if (segment->stop != -1 && timestamp >= segment->stop)
          return GST_FLOW_UNEXPECTED;
      } else {
        if (!GST_CLOCK_TIME_IS_VALID (duration))
          duration =
              gst_util_uint64_scale_int (size, GST_SECOND,
              self->framesize * self->rate);

        if (segment->start != -1 && timestamp + duration <= segment->start)
          return GST_FLOW_UNEXPECTED;
      }
    } else {
      if (segment->rate >= 0) {
        if (segment->stop != -1 && offset != -1 && offset >= segment->stop)
          return GST_FLOW_UNEXPECTED;
      } else if (offset != -1 || offset_end != -1) {
        if (offset_end == -1)
          offset_end = offset + size / self->framesize;

        if (segment->start != -1 && offset_end <= segment->start)
          return GST_FLOW_UNEXPECTED;
      }
    }
  }

  return GST_FLOW_OK;
}
示例#21
0
/* this tests that the output is a correct discontinuous stream
 * if the input is; ie input drops in time come out the same way */
static void
test_discont_stream_instance (int inrate, int outrate, int samples,
    int numbuffers)
{
  GstElement *audioresample;
  GstBuffer *inbuffer, *outbuffer;
  GstCaps *caps;
  GstClockTime ints;

  int i, j;
  gint16 *p;

  GST_DEBUG ("inrate:%d outrate:%d samples:%d numbuffers:%d",
      inrate, outrate, samples, numbuffers);

  audioresample = setup_audioresample (2, inrate, outrate, 16, FALSE);
  caps = gst_pad_get_negotiated_caps (mysrcpad);
  fail_unless (gst_caps_is_fixed (caps));

  fail_unless (gst_element_set_state (audioresample,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");

  for (j = 1; j <= numbuffers; ++j) {

    inbuffer = gst_buffer_new_and_alloc (samples * 4);
    GST_BUFFER_DURATION (inbuffer) = samples * GST_SECOND / inrate;
    /* "drop" half the buffers */
    ints = GST_BUFFER_DURATION (inbuffer) * 2 * (j - 1);
    GST_BUFFER_TIMESTAMP (inbuffer) = ints;
    GST_BUFFER_OFFSET (inbuffer) = (j - 1) * 2 * samples;
    GST_BUFFER_OFFSET_END (inbuffer) = j * 2 * samples + samples;

    gst_buffer_set_caps (inbuffer, caps);

    p = (gint16 *) GST_BUFFER_DATA (inbuffer);

    /* create a 16 bit signed ramp */
    for (i = 0; i < samples; ++i) {
      *p = -32767 + i * (65535 / samples);
      ++p;
      *p = -32767 + i * (65535 / samples);
      ++p;
    }

    GST_DEBUG ("Sending Buffer time:%" G_GUINT64_FORMAT " duration:%"
        G_GINT64_FORMAT " discont:%d offset:%" G_GUINT64_FORMAT " offset_end:%"
        G_GUINT64_FORMAT, GST_BUFFER_TIMESTAMP (inbuffer),
        GST_BUFFER_DURATION (inbuffer), GST_BUFFER_IS_DISCONT (inbuffer),
        GST_BUFFER_OFFSET (inbuffer), GST_BUFFER_OFFSET_END (inbuffer));
    /* pushing gives away my reference ... */
    fail_unless (gst_pad_push (mysrcpad, inbuffer) == GST_FLOW_OK);

    /* check if the timestamp of the pushed buffer matches the incoming one */
    outbuffer = g_list_nth_data (buffers, g_list_length (buffers) - 1);
    fail_if (outbuffer == NULL);
    fail_unless_equals_uint64 (ints, GST_BUFFER_TIMESTAMP (outbuffer));
    GST_DEBUG ("Got Buffer time:%" G_GUINT64_FORMAT " duration:%"
        G_GINT64_FORMAT " discont:%d offset:%" G_GUINT64_FORMAT " offset_end:%"
        G_GUINT64_FORMAT, GST_BUFFER_TIMESTAMP (outbuffer),
        GST_BUFFER_DURATION (outbuffer), GST_BUFFER_IS_DISCONT (outbuffer),
        GST_BUFFER_OFFSET (outbuffer), GST_BUFFER_OFFSET_END (outbuffer));
    if (j > 1) {
      fail_unless (GST_BUFFER_IS_DISCONT (outbuffer),
          "expected discont for buffer #%d", j);
    }
  }

  /* cleanup */
  gst_caps_unref (caps);
  cleanup_audioresample (audioresample);
}
示例#22
0
static GstFlowReturn
gst_monoscope_chain (GstPad * pad, GstBuffer * inbuf)
{
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstMonoscope *monoscope;

  monoscope = GST_MONOSCOPE (GST_PAD_PARENT (pad));

  /* don't try to combine samples from discont buffer */
  if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (monoscope->adapter);
    monoscope->next_ts = GST_CLOCK_TIME_NONE;
  }

  /* Match timestamps from the incoming audio */
  if (GST_BUFFER_TIMESTAMP (inbuf) != GST_CLOCK_TIME_NONE)
    monoscope->next_ts = GST_BUFFER_TIMESTAMP (inbuf);

  GST_LOG_OBJECT (monoscope, "in buffer has %d samples, ts=%" GST_TIME_FORMAT,
      GST_BUFFER_SIZE (inbuf) / monoscope->bps,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf)));

  gst_adapter_push (monoscope->adapter, inbuf);
  inbuf = NULL;

  /* Collect samples until we have enough for an output frame */
  while (flow_ret == GST_FLOW_OK) {
    gint16 *samples;
    GstBuffer *outbuf = NULL;
    guint32 *pixels, avail, bytesperframe;

    avail = gst_adapter_available (monoscope->adapter);
    GST_LOG_OBJECT (monoscope, "bytes avail now %u", avail);

    /* do negotiation if not done yet, so ->spf etc. is set */
    if (GST_PAD_CAPS (monoscope->srcpad) == NULL) {
      flow_ret = get_buffer (monoscope, &outbuf);
      if (flow_ret != GST_FLOW_OK)
        goto out;
      gst_buffer_unref (outbuf);
      outbuf = NULL;
    }

    bytesperframe = monoscope->spf * monoscope->bps;
    if (avail < bytesperframe)
      break;

    /* FIXME: something is wrong with QoS, we are skipping way too much
     * stuff even with very low CPU loads */
#if 0
    if (monoscope->next_ts != -1) {
      gboolean need_skip;
      gint64 qostime;

      qostime = gst_segment_to_running_time (&monoscope->segment,
          GST_FORMAT_TIME, monoscope->next_ts);

      GST_OBJECT_LOCK (monoscope);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip =
          GST_CLOCK_TIME_IS_VALID (monoscope->earliest_time) &&
          qostime <= monoscope->earliest_time;
      GST_OBJECT_UNLOCK (monoscope);

      if (need_skip) {
        GST_WARNING_OBJECT (monoscope,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (monoscope->earliest_time));
        goto skip;
      }
    }
#endif

    samples = (gint16 *) gst_adapter_peek (monoscope->adapter, bytesperframe);

    if (monoscope->spf < 512) {
      gint16 in_data[512], i;

      for (i = 0; i < 512; ++i) {
        gdouble off;

        off = ((gdouble) i * (gdouble) monoscope->spf) / 512.0;
        in_data[i] = samples[MIN ((guint) off, monoscope->spf)];
      }
      pixels = monoscope_update (monoscope->visstate, in_data);
    } else {
      /* not really correct, but looks much prettier */
      pixels = monoscope_update (monoscope->visstate, samples);
    }

    flow_ret = get_buffer (monoscope, &outbuf);
    if (flow_ret != GST_FLOW_OK)
      goto out;

    memcpy (GST_BUFFER_DATA (outbuf), pixels, monoscope->outsize);

    GST_BUFFER_TIMESTAMP (outbuf) = monoscope->next_ts;
    GST_BUFFER_DURATION (outbuf) = monoscope->frame_duration;

    flow_ret = gst_pad_push (monoscope->srcpad, outbuf);

#if 0
  skip:
#endif

    if (GST_CLOCK_TIME_IS_VALID (monoscope->next_ts))
      monoscope->next_ts += monoscope->frame_duration;

    gst_adapter_flush (monoscope->adapter, bytesperframe);
  }

out:

  return flow_ret;
}
示例#23
0
static GstFlowReturn
gst_dvdec_chain (GstPad * pad, GstBuffer * buf)
{
  GstDVDec *dvdec;
  guint8 *inframe;
  guint8 *outframe;
  guint8 *outframe_ptrs[3];
  gint outframe_pitches[3];
  GstBuffer *outbuf;
  GstFlowReturn ret = GST_FLOW_OK;
  guint length;
  gint64 cstart, cstop;
  gboolean PAL, wide;

  dvdec = GST_DVDEC (gst_pad_get_parent (pad));
  inframe = GST_BUFFER_DATA (buf);

  /* buffer should be at least the size of one NTSC frame, this should
   * be enough to decode the header. */
  if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < NTSC_BUFFER))
    goto wrong_size;

  /* preliminary dropping. unref and return if outside of configured segment */
  if ((dvdec->segment->format == GST_FORMAT_TIME) &&
      (!(gst_segment_clip (dvdec->segment, GST_FORMAT_TIME,
                  GST_BUFFER_TIMESTAMP (buf),
                  GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf),
                  &cstart, &cstop))))
    goto dropping;

  if (G_UNLIKELY (dv_parse_header (dvdec->decoder, inframe) < 0))
    goto parse_header_error;

  /* get size */
  PAL = dv_system_50_fields (dvdec->decoder);
  wide = dv_format_wide (dvdec->decoder);

  /* check the buffer is of right size after we know if we are
   * dealing with PAL or NTSC */
  length = (PAL ? PAL_BUFFER : NTSC_BUFFER);
  if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < length))
    goto wrong_size;

  dv_parse_packs (dvdec->decoder, inframe);

  if (dvdec->video_offset % dvdec->drop_factor != 0)
    goto skip;

  /* renegotiate on change */
  if (PAL != dvdec->PAL || wide != dvdec->wide) {
    dvdec->src_negotiated = FALSE;
    dvdec->PAL = PAL;
    dvdec->wide = wide;
  }

  dvdec->height = (dvdec->PAL ? PAL_HEIGHT : NTSC_HEIGHT);


  /* negotiate if not done yet */
  if (!dvdec->src_negotiated) {
    if (!gst_dvdec_src_negotiate (dvdec))
      goto not_negotiated;
  }

  ret =
      gst_pad_alloc_buffer_and_set_caps (dvdec->srcpad, 0,
      (720 * dvdec->height) * dvdec->bpp,
      GST_PAD_CAPS (dvdec->srcpad), &outbuf);
  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto no_buffer;

  outframe = GST_BUFFER_DATA (outbuf);

  outframe_ptrs[0] = outframe;
  outframe_pitches[0] = 720 * dvdec->bpp;

  /* the rest only matters for YUY2 */
  if (dvdec->bpp < 3) {
    outframe_ptrs[1] = outframe_ptrs[0] + 720 * dvdec->height;
    outframe_ptrs[2] = outframe_ptrs[1] + 360 * dvdec->height;

    outframe_pitches[1] = dvdec->height / 2;
    outframe_pitches[2] = outframe_pitches[1];
  }

  GST_DEBUG_OBJECT (dvdec, "decoding and pushing buffer");
  dv_decode_full_frame (dvdec->decoder, inframe,
      e_dv_color_yuv, outframe_ptrs, outframe_pitches);

  GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET (buf);
  GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_END (buf);
  GST_BUFFER_TIMESTAMP (outbuf) = cstart;
  GST_BUFFER_DURATION (outbuf) = cstop - cstart;

  ret = gst_pad_push (dvdec->srcpad, outbuf);

skip:
  dvdec->video_offset++;

done:
  gst_buffer_unref (buf);
  gst_object_unref (dvdec);

  return ret;

  /* ERRORS */
wrong_size:
  {
    GST_ELEMENT_ERROR (dvdec, STREAM, DECODE,
        (NULL), ("Input buffer too small"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
parse_header_error:
  {
    GST_ELEMENT_ERROR (dvdec, STREAM, DECODE,
        (NULL), ("Error parsing DV header"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
not_negotiated:
  {
    GST_DEBUG_OBJECT (dvdec, "could not negotiate output");
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
no_buffer:
  {
    GST_DEBUG_OBJECT (dvdec, "could not allocate buffer");
    goto done;
  }

dropping:
  {
    GST_DEBUG_OBJECT (dvdec,
        "dropping buffer since it's out of the configured segment");
    goto done;
  }
}
示例#24
0
static GstFlowReturn
gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
{
  guint avail;
  GstBuffer *outbuf;
  GstFlowReturn ret;
  gboolean fragmented;

  avail = gst_adapter_available (rtph263ppay->adapter);
  if (avail == 0)
    return GST_FLOW_OK;

  fragmented = FALSE;
  /* This algorithm assumes the H263/+/++ encoder sends complete frames in each
   * buffer */
  /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:
   *  This algorithm implements the Follow-on packets method for packetization.
   *  This assumes low packet loss network. 
   * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:
   *  This algorithm separates large frames at synchronisation points (Segments)
   *  (See RFC 4629 section 6). It would be interesting to have a property such as network
   *  quality to select between both packetization methods */
  /* TODO Add VRC supprt (See RFC 4629 section 5.2) */

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    guint payload_len;
    gint header_len;
    guint next_gop = 0;
    gboolean found_gob = FALSE;
    GstRTPBuffer rtp = { NULL };

    if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
      /* start after 1st gop possible */
      guint parsed_len = 3;
      const guint8 *parse_data = NULL;

      parse_data = gst_adapter_map (rtph263ppay->adapter, avail);

      /* Check if we have a gob or eos , eossbs */
      /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
      if (avail >= 3 && *parse_data == 0 && *(parse_data + 1) == 0
          && *(parse_data + 2) >= 0x80) {
        GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");
        found_gob = TRUE;
      }
      /* Find next and cut the packet accordingly */
      /* TODO we should get as many gobs as possible until MTU is reached, this
       * code seems to just get one GOB per packet */
      while (parsed_len + 2 < avail) {
        if (parse_data[parsed_len] == 0 && parse_data[parsed_len + 1] == 0
            && parse_data[parsed_len + 2] >= 0x80) {
          next_gop = parsed_len;
          GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at :  %d",
              next_gop);
          break;
        }
        parsed_len++;
      }
      gst_adapter_unmap (rtph263ppay->adapter);
    }

    /* for picture start frames (non-fragmented), we need to remove the first
     * two 0x00 bytes and set P=1 */
    header_len = (fragmented && !found_gob) ? 2 : 0;

    towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
        (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));

    if (next_gop > 0)
      towrite = MIN (next_gop, towrite);

    payload_len = header_len + towrite;

    outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
    /* last fragment gets the marker bit set */
    gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);

    payload = gst_rtp_buffer_get_payload (&rtp);

    gst_adapter_copy (rtph263ppay->adapter, &payload[header_len], 0, towrite);

    /*  0                   1
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |   RR    |P|V|   PLEN    |PEBIT|
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     */
    /* if fragmented or gop header , write p bit =1 */
    payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;
    payload[1] = 0;

    GST_BUFFER_TIMESTAMP (outbuf) = rtph263ppay->first_timestamp;
    GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
    gst_rtp_buffer_unmap (&rtp);

    gst_adapter_flush (rtph263ppay->adapter, towrite);

    ret =
        gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263ppay), outbuf);

    avail -= towrite;
    fragmented = TRUE;
  }

  return ret;
}
void
gst_audio_fx_base_fir_filter_push_residue (GstAudioFXBaseFIRFilter * self)
{
  GstBuffer *outbuf;
  GstFlowReturn res;
  gint rate = GST_AUDIO_FILTER_CAST (self)->format.rate;
  gint channels = GST_AUDIO_FILTER_CAST (self)->format.channels;
  gint width = GST_AUDIO_FILTER_CAST (self)->format.width / 8;
  gint outsize, outsamples;
  guint8 *in, *out;

  if (channels == 0 || rate == 0 || self->nsamples_in == 0) {
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    return;
  }

  /* Calculate the number of samples and their memory size that
   * should be pushed from the residue */
  outsamples = self->nsamples_in - (self->nsamples_out - self->latency);
  if (outsamples <= 0) {
    self->buffer_fill = 0;
    g_free (self->buffer);
    self->buffer = NULL;
    return;
  }
  outsize = outsamples * channels * width;

  if (!self->fft || self->low_latency) {
    gint64 diffsize, diffsamples;

    /* Process the difference between latency and residue length samples
     * to start at the actual data instead of starting at the zeros before
     * when we only got one buffer smaller than latency */
    diffsamples =
        ((gint64) self->latency) - ((gint64) self->buffer_fill) / channels;
    if (diffsamples > 0) {
      diffsize = diffsamples * channels * width;
      in = g_new0 (guint8, diffsize);
      out = g_new0 (guint8, diffsize);
      self->nsamples_out += self->process (self, in, out, diffsamples);
      g_free (in);
      g_free (out);
    }

    res = gst_pad_alloc_buffer (GST_BASE_TRANSFORM_CAST (self)->srcpad,
        GST_BUFFER_OFFSET_NONE, outsize,
        GST_PAD_CAPS (GST_BASE_TRANSFORM_CAST (self)->srcpad), &outbuf);

    if (G_UNLIKELY (res != GST_FLOW_OK)) {
      GST_WARNING_OBJECT (self, "failed allocating buffer of %d bytes",
          outsize);
      self->buffer_fill = 0;
      return;
    }

    /* Convolve the residue with zeros to get the actual remaining data */
    in = g_new0 (guint8, outsize);
    self->nsamples_out +=
        self->process (self, in, GST_BUFFER_DATA (outbuf), outsamples);
    g_free (in);
  } else {
    guint gensamples = 0;
    guint8 *data;

    outbuf = gst_buffer_new_and_alloc (outsize);
    data = GST_BUFFER_DATA (outbuf);

    while (gensamples < outsamples) {
      guint step_insamples = self->block_length - self->buffer_fill;
      guint8 *zeroes = g_new0 (guint8, step_insamples * channels * width);
      guint8 *out = g_new (guint8, self->block_length * channels * width);
      guint step_gensamples;

      step_gensamples = self->process (self, zeroes, out, step_insamples);
      g_free (zeroes);

      memcpy (data + gensamples * width, out, MIN (step_gensamples,
              outsamples - gensamples) * width);
      gensamples += MIN (step_gensamples, outsamples - gensamples);

      g_free (out);
    }
    self->nsamples_out += gensamples;
  }

  /* Set timestamp, offset, etc from the values we
   * saved when processing the regular buffers */
  if (GST_CLOCK_TIME_IS_VALID (self->start_ts))
    GST_BUFFER_TIMESTAMP (outbuf) = self->start_ts;
  else
    GST_BUFFER_TIMESTAMP (outbuf) = 0;
  GST_BUFFER_TIMESTAMP (outbuf) +=
      gst_util_uint64_scale_int (self->nsamples_out - outsamples -
      self->latency, GST_SECOND, rate);

  GST_BUFFER_DURATION (outbuf) =
      gst_util_uint64_scale_int (outsamples, GST_SECOND, rate);

  if (self->start_off != GST_BUFFER_OFFSET_NONE) {
    GST_BUFFER_OFFSET (outbuf) =
        self->start_off + self->nsamples_out - outsamples - self->latency;
    GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + outsamples;
  }

  GST_DEBUG_OBJECT (self, "Pushing residue buffer of size %d with timestamp: %"
      GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
      G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
      GST_BUFFER_SIZE (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
      GST_BUFFER_OFFSET_END (outbuf), outsamples);

  res = gst_pad_push (GST_BASE_TRANSFORM_CAST (self)->srcpad, outbuf);

  if (G_UNLIKELY (res != GST_FLOW_OK)) {
    GST_WARNING_OBJECT (self, "failed to push residue");
  }

  self->buffer_fill = 0;
}
示例#26
0
static GstFlowReturn
gst_aiffparse_stream_data (AIFFParse * aiff)
{
  GstBuffer *buf = NULL;
  GstFlowReturn res = GST_FLOW_OK;
  guint64 desired, obtained;
  GstClockTime timestamp, next_timestamp, duration;
  guint64 pos, nextpos;

iterate_adapter:
  GST_LOG_OBJECT (aiff,
      "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT " , dataleft: %"
      G_GINT64_FORMAT, aiff->offset, aiff->end_offset, aiff->dataleft);

  /* Get the next n bytes and output them */
  if (aiff->dataleft == 0 || aiff->dataleft < aiff->bytes_per_sample)
    goto found_eos;

  /* scale the amount of data by the segment rate so we get equal
   * amounts of data regardless of the playback rate */
  desired =
      MIN (gst_guint64_to_gdouble (aiff->dataleft),
      MAX_BUFFER_SIZE * aiff->segment.abs_rate);

  if (desired >= aiff->bytes_per_sample && aiff->bytes_per_sample > 0)
    desired -= (desired % aiff->bytes_per_sample);

  GST_LOG_OBJECT (aiff, "Fetching %" G_GINT64_FORMAT " bytes of data "
      "from the sinkpad", desired);

  if (aiff->streaming) {
    guint avail = gst_adapter_available (aiff->adapter);

    if (avail < desired) {
      GST_LOG_OBJECT (aiff, "Got only %d bytes of data from the sinkpad",
          avail);
      return GST_FLOW_OK;
    }

    buf = gst_adapter_take_buffer (aiff->adapter, desired);
  } else {
    if ((res = gst_pad_pull_range (aiff->sinkpad, aiff->offset,
                desired, &buf)) != GST_FLOW_OK)
      goto pull_error;
  }

  /* If we have a pending close/start segment, send it now. */
  if (G_UNLIKELY (aiff->close_segment != NULL)) {
    gst_pad_push_event (aiff->srcpad, aiff->close_segment);
    aiff->close_segment = NULL;
  }
  if (G_UNLIKELY (aiff->start_segment != NULL)) {
    gst_pad_push_event (aiff->srcpad, aiff->start_segment);
    aiff->start_segment = NULL;
  }

  obtained = GST_BUFFER_SIZE (buf);

  /* our positions in bytes */
  pos = aiff->offset - aiff->datastart;
  nextpos = pos + obtained;

  /* update offsets, does not overflow. */
  GST_BUFFER_OFFSET (buf) = pos / aiff->bytes_per_sample;
  GST_BUFFER_OFFSET_END (buf) = nextpos / aiff->bytes_per_sample;

  if (aiff->bps > 0) {
    /* and timestamps if we have a bitrate, be careful for overflows */
    timestamp = uint64_ceiling_scale (pos, GST_SECOND, (guint64) aiff->bps);
    next_timestamp =
        uint64_ceiling_scale (nextpos, GST_SECOND, (guint64) aiff->bps);
    duration = next_timestamp - timestamp;

    /* update current running segment position */
    gst_segment_set_last_stop (&aiff->segment, GST_FORMAT_TIME, next_timestamp);
  } else {
    /* no bitrate, all we know is that the first sample has timestamp 0, all
     * other positions and durations have unknown timestamp. */
    if (pos == 0)
      timestamp = 0;
    else
      timestamp = GST_CLOCK_TIME_NONE;
    duration = GST_CLOCK_TIME_NONE;
    /* update current running segment position with byte offset */
    gst_segment_set_last_stop (&aiff->segment, GST_FORMAT_BYTES, nextpos);
  }
  if (aiff->discont) {
    GST_DEBUG_OBJECT (aiff, "marking DISCONT");
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
    aiff->discont = FALSE;
  }

  GST_BUFFER_TIMESTAMP (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;
  gst_buffer_set_caps (buf, aiff->caps);

  GST_LOG_OBJECT (aiff,
      "Got buffer. timestamp:%" GST_TIME_FORMAT " , duration:%" GST_TIME_FORMAT
      ", size:%u", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration),
      GST_BUFFER_SIZE (buf));

  if ((res = gst_pad_push (aiff->srcpad, buf)) != GST_FLOW_OK)
    goto push_error;

  if (obtained < aiff->dataleft) {
    aiff->offset += obtained;
    aiff->dataleft -= obtained;
  } else {
    aiff->offset += aiff->dataleft;
    aiff->dataleft = 0;
  }

  /* Iterate until need more data, so adapter size won't grow */
  if (aiff->streaming) {
    GST_LOG_OBJECT (aiff,
        "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT, aiff->offset,
        aiff->end_offset);
    goto iterate_adapter;
  }
  return res;

  /* ERROR */
found_eos:
  {
    GST_DEBUG_OBJECT (aiff, "found EOS");
    return GST_FLOW_UNEXPECTED;
  }
pull_error:
  {
    /* check if we got EOS */
    if (res == GST_FLOW_UNEXPECTED)
      goto found_eos;

    GST_WARNING_OBJECT (aiff,
        "Error getting %" G_GINT64_FORMAT " bytes from the "
        "sinkpad (dataleft = %" G_GINT64_FORMAT ")", desired, aiff->dataleft);
    return res;
  }
push_error:
  {
    GST_INFO_OBJECT (aiff,
        "Error pushing on srcpad %s:%s, reason %s, is linked? = %d",
        GST_DEBUG_PAD_NAME (aiff->srcpad), gst_flow_get_name (res),
        gst_pad_is_linked (aiff->srcpad));
    return res;
  }
}
示例#27
0
// chain function - this function does the actual processing
static GstFlowReturn
gst_bgfg_acmmm2003_chain(GstPad *pad, GstBuffer *buf)
{
    GstBgFgACMMM2003 *filter;

    // sanity checks
    g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR);
    g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR);

    filter = GST_BGFG_ACMMM2003(GST_OBJECT_PARENT(pad));

    filter->image->imageData = (gchar*) GST_BUFFER_DATA(buf);

    // the bg model must be initialized with a valid image; thus we delay its
    // creation until the chain function
    if (filter->model == NULL) {
        filter->model = cvCreateFGDStatModel(filter->image, NULL);

        ((CvFGDStatModel*)filter->model)->params.minArea           = filter->min_area;
        ((CvFGDStatModel*)filter->model)->params.erode_iterations  = filter->n_erode_iterations;
        ((CvFGDStatModel*)filter->model)->params.dilate_iterations = filter->n_dilate_iterations;

        return gst_pad_push(filter->srcpad, buf);
    }

    cvUpdateBGStatModel(filter->image, filter->model, -1);

    // send mask event, if requested
    if (filter->send_mask_events) {
        GstStructure *structure;
        GstEvent     *event;
        GArray       *data_array;
        IplImage     *mask;

        // prepare and send custom event with the mask surface
        mask = filter->model->foreground;
        data_array = g_array_sized_new(FALSE, FALSE, sizeof(mask->imageData[0]), mask->imageSize);
        g_array_append_vals(data_array, mask->imageData, mask->imageSize);

        structure = gst_structure_new("bgfg-mask",
                                      "data",      G_TYPE_POINTER, data_array,
                                      "width",     G_TYPE_UINT,    mask->width,
                                      "height",    G_TYPE_UINT,    mask->height,
                                      "depth",     G_TYPE_UINT,    mask->depth,
                                      "channels",  G_TYPE_UINT,    mask->nChannels,
                                      "timestamp", G_TYPE_UINT64,  GST_BUFFER_TIMESTAMP(buf),
                                      NULL);

        event = gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM, structure);
        gst_pad_push_event(filter->srcpad, event);
        g_array_unref(data_array);

        if (filter->display) {
            // shade the regions not selected by the acmmm2003 algorithm
            cvXorS(mask,          CV_RGB(255, 255, 255), mask,          NULL);
            cvSubS(filter->image, CV_RGB(191, 191, 191), filter->image, mask);
            cvXorS(mask,          CV_RGB(255, 255, 255), mask,          NULL);
        }
    }

    if (filter->send_roi_events) {
        CvSeq        *contour;
        CvRect       *bounding_rects;
        guint         i, j, n_rects;

        // count # of contours, allocate array to store the bounding rectangles
        for (contour = filter->model->foreground_regions, n_rects = 0;
             contour != NULL;
             contour = contour->h_next, ++n_rects);

        bounding_rects = g_new(CvRect, n_rects);

        for (contour = filter->model->foreground_regions, i = 0; contour != NULL; contour = contour->h_next, ++i)
            bounding_rects[i] = cvBoundingRect(contour, 0);

        for (i = 0; i < n_rects; ++i) {
            // skip collapsed rectangles
            if ((bounding_rects[i].width == 0) || (bounding_rects[i].height == 0)) continue;

            for (j = (i + 1); j < n_rects; ++j) {
                // skip collapsed rectangles
                if ((bounding_rects[j].width == 0) || (bounding_rects[j].height == 0)) continue;

                if (rect_overlap(bounding_rects[i], bounding_rects[j])) {
                    bounding_rects[i] = rect_collapse(bounding_rects[i], bounding_rects[j]);
                    bounding_rects[j] = NULL_RECT;
                }
            }
        }

        for (i = 0; i < n_rects; ++i) {
            GstEvent     *event;
            GstStructure *structure;
            CvRect        r;

            // skip collapsed rectangles
            r = bounding_rects[i];
            if ((r.width == 0) || (r.height == 0)) continue;

            structure = gst_structure_new("bgfg-roi",
                                          "x",         G_TYPE_UINT,   r.x,
                                          "y",         G_TYPE_UINT,   r.y,
                                          "width",     G_TYPE_UINT,   r.width,
                                          "height",    G_TYPE_UINT,   r.height,
                                          "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP(buf),
                                          NULL);

            event = gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM, structure);
            gst_pad_send_event(filter->sinkpad, event);

            if (filter->verbose)
                GST_INFO("[roi] x: %d, y: %d, width: %d, height: %d\n",
                         r.x, r.y, r.width, r.height);

            if (filter->display)
                cvRectangle(filter->image, cvPoint(r.x, r.y), cvPoint(r.x + r.width, r.y + r.height),
                            CV_RGB(0, 0, 255), 1, 0, 0);
        }

        g_free(bounding_rects);
    }

    if (filter->display)
        gst_buffer_set_data(buf, (guchar*) filter->image->imageData, filter->image->imageSize);

    return gst_pad_push(filter->srcpad, buf);
}
示例#28
0
/* Get a DV frame, chop it up in pieces, and push the pieces to the RTP layer.
 */
static GstFlowReturn
gst_rtp_dv_pay_handle_buffer (GstBaseRTPPayload * basepayload,
    GstBuffer * buffer)
{
  GstRTPDVPay *rtpdvpay;
  guint max_payload_size;
  GstBuffer *outbuf;
  GstFlowReturn ret = GST_FLOW_OK;
  gint hdrlen;
  guint size;
  guint8 *data;
  guint8 *dest;
  guint filled;

  rtpdvpay = GST_RTP_DV_PAY (basepayload);

  hdrlen = gst_rtp_buffer_calc_header_len (0);
  /* DV frames are made up from a bunch of DIF blocks. DIF blocks are 80 bytes
   * each, and we should put an integral number of them in each RTP packet.
   * Therefore, we round the available room down to the nearest multiple of 80.
   *
   * The available room is just the packet MTU, minus the RTP header length. */
  max_payload_size = ((GST_BASE_RTP_PAYLOAD_MTU (rtpdvpay) - hdrlen) / 80) * 80;

  /* The length of the buffer to transmit. */
  size = GST_BUFFER_SIZE (buffer);
  data = GST_BUFFER_DATA (buffer);

  GST_DEBUG_OBJECT (rtpdvpay,
      "DV RTP payloader got buffer of %u bytes, splitting in %u byte "
      "payload fragments, at time %" GST_TIME_FORMAT, size, max_payload_size,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));

  if (!rtpdvpay->negotiated) {
    gst_dv_pay_negotiate (rtpdvpay, data, size);
    /* if we have not yet scanned the stream for its type, do so now */
    rtpdvpay->negotiated = TRUE;
  }

  outbuf = NULL;
  dest = NULL;
  filled = 0;

  /* while we have a complete DIF chunks left */
  while (size >= 80) {
    /* Allocate a new buffer, set the timestamp */
    if (outbuf == NULL) {
      outbuf = gst_rtp_buffer_new_allocate (max_payload_size, 0, 0);
      GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
      dest = gst_rtp_buffer_get_payload (outbuf);
      filled = 0;
    }

    /* inspect the DIF chunk, if we don't need to include it, skip to the next one. */
    if (include_dif (rtpdvpay, data)) {
      /* copy data in packet */
      memcpy (dest, data, 80);

      dest += 80;
      filled += 80;
    }

    /* go to next dif chunk */
    size -= 80;
    data += 80;

    /* push out the buffer if the next one would exceed the max packet size or
     * when we are pushing the last packet */
    if (filled + 80 > max_payload_size || size < 80) {
      if (size < 160) {
        guint hlen;

        /* set marker */
        gst_rtp_buffer_set_marker (outbuf, TRUE);

        /* shrink buffer to last packet */
        hlen = gst_rtp_buffer_get_header_len (outbuf);
        gst_rtp_buffer_set_packet_len (outbuf, hlen + filled);
      }
      /* Push out the created piece, and check for errors. */
      ret = gst_basertppayload_push (basepayload, outbuf);
      if (ret != GST_FLOW_OK)
        break;

      outbuf = NULL;
    }
  }
  gst_buffer_unref (buffer);

  return ret;
}
示例#29
0
static GstFlowReturn
gst_dvd_spu_subpic_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstDVDSpu *dvdspu = (GstDVDSpu *) parent;
  GstFlowReturn ret = GST_FLOW_OK;
  gsize size;

  g_return_val_if_fail (dvdspu != NULL, GST_FLOW_ERROR);

  GST_INFO_OBJECT (dvdspu, "Have subpicture buffer with timestamp %"
      GST_TIME_FORMAT " and size %" G_GSIZE_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), gst_buffer_get_size (buf));

  DVD_SPU_LOCK (dvdspu);

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    dvdspu->subp_seg.position = GST_BUFFER_TIMESTAMP (buf);
  }

  if (GST_BUFFER_IS_DISCONT (buf) && dvdspu->partial_spu) {
    gst_buffer_unref (dvdspu->partial_spu);
    dvdspu->partial_spu = NULL;
  }

  if (dvdspu->partial_spu != NULL) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf))
      GST_WARNING_OBJECT (dvdspu,
          "Joining subpicture buffer with timestamp to previous");
    dvdspu->partial_spu = gst_buffer_append (dvdspu->partial_spu, buf);
  } else {
    /* If we don't yet have a buffer, wait for one with a timestamp,
     * since that will avoid collecting the 2nd half of a partial buf */
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf))
      dvdspu->partial_spu = buf;
    else
      gst_buffer_unref (buf);
  }

  if (dvdspu->partial_spu == NULL)
    goto done;

  size = gst_buffer_get_size (dvdspu->partial_spu);

  switch (dvdspu->spu_input_type) {
    case SPU_INPUT_TYPE_VOBSUB:
      if (size > 4) {
        guint8 header[2];
        guint16 packet_size;

        gst_buffer_extract (dvdspu->partial_spu, 0, header, 2);
        packet_size = GST_READ_UINT16_BE (header);
        if (packet_size == size) {
          submit_new_spu_packet (dvdspu, dvdspu->partial_spu);
          dvdspu->partial_spu = NULL;
        } else if (packet_size < size) {
          /* Somehow we collected too much - something is wrong. Drop the
           * packet entirely and wait for a new one */
          GST_DEBUG_OBJECT (dvdspu,
              "Discarding invalid SPU buffer of size %" G_GSIZE_FORMAT, size);

          gst_buffer_unref (dvdspu->partial_spu);
          dvdspu->partial_spu = NULL;
        } else {
          GST_LOG_OBJECT (dvdspu,
              "SPU buffer claims to be of size %u. Collected %" G_GSIZE_FORMAT
              " so far.", packet_size, size);
        }
      }
      break;
    case SPU_INPUT_TYPE_PGS:{
      /* Collect until we have a command buffer that ends exactly at the size
       * we've collected */
      guint8 packet_type;
      guint16 packet_size;
      GstMapInfo map;
      guint8 *ptr, *end;
      gboolean invalid = FALSE;

      gst_buffer_map (dvdspu->partial_spu, &map, GST_MAP_READ);

      ptr = map.data;
      end = ptr + map.size;

      /* FIXME: There's no need to walk the command set each time. We can set a
       * marker and resume where we left off next time */
      /* FIXME: Move the packet parsing and sanity checking into the format-specific modules */
      while (ptr != end) {
        if (ptr + 3 > end)
          break;
        packet_type = *ptr++;
        packet_size = GST_READ_UINT16_BE (ptr);
        ptr += 2;
        if (ptr + packet_size > end)
          break;
        ptr += packet_size;
        /* 0x80 is the END command for PGS packets */
        if (packet_type == 0x80 && ptr != end) {
          /* Extra cruft on the end of the packet -> assume invalid */
          invalid = TRUE;
          break;
        }
      }
      gst_buffer_unmap (dvdspu->partial_spu, &map);

      if (invalid) {
        gst_buffer_unref (dvdspu->partial_spu);
        dvdspu->partial_spu = NULL;
      } else if (ptr == end) {
        GST_DEBUG_OBJECT (dvdspu,
            "Have complete PGS packet of size %" G_GSIZE_FORMAT ". Enqueueing.",
            map.size);
        submit_new_spu_packet (dvdspu, dvdspu->partial_spu);
        dvdspu->partial_spu = NULL;
      }
      break;
    }
    default:
      GST_ERROR_OBJECT (dvdspu, "Input type not configured before SPU passing");
      goto caps_not_set;
  }

done:
  DVD_SPU_UNLOCK (dvdspu);

  return ret;

  /* ERRORS */
caps_not_set:
  {
    GST_ELEMENT_ERROR (dvdspu, RESOURCE, NO_SPACE_LEFT,
        (_("Subpicture format was not configured before data flow")), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }
}
示例#30
0
HRESULT VideoFakeSink::DoRenderSample(IMediaSample *pMediaSample)
{
  gboolean in_seg = FALSE;
  gint64 clip_start = 0, clip_stop = 0;
  GstDshowVideoDecClass *klass =
      (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (mDec);
  GstBuffer *buf = NULL;
  GstClockTime start, stop;

  if(pMediaSample)
  {
    BYTE *pBuffer = NULL;
    LONGLONG lStart = 0, lStop = 0;
    long size = pMediaSample->GetActualDataLength();

    pMediaSample->GetPointer(&pBuffer);
    pMediaSample->GetTime(&lStart, &lStop);

    start = lStart * 100;
    stop = lStop * 100;
    /* check if this buffer is in our current segment */
    in_seg = gst_segment_clip (mDec->segment, GST_FORMAT_TIME,
        start, stop, &clip_start, &clip_stop);

    /* if the buffer is out of segment do not push it downstream */
    if (!in_seg) {
      GST_DEBUG_OBJECT (mDec,
        "buffer is out of segment, start %" GST_TIME_FORMAT " stop %"
        GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
      goto done;
    }

    /* buffer is in our segment, allocate a new out buffer and clip its
     * timestamps */
    mDec->last_ret = gst_pad_alloc_buffer (mDec->srcpad, 
        GST_BUFFER_OFFSET_NONE,
        size, 
        GST_PAD_CAPS (mDec->srcpad), &buf);
    if (!buf) {
      GST_WARNING_OBJECT (mDec,
          "cannot allocate a new GstBuffer");
      goto done;
    }

    /* set buffer properties */
    GST_BUFFER_TIMESTAMP (buf) = clip_start;
    GST_BUFFER_DURATION (buf) = clip_stop - clip_start;

    if (strstr (klass->entry->srccaps, "rgb")) {
      /* FOR RGB directshow decoder will return bottom-up BITMAP 
       * There is probably a way to get top-bottom video frames from
       * the decoder...
       */
      gint line = 0;
      guint stride = mDec->width * 4;

      for (; line < mDec->height; line++) {
        memcpy (GST_BUFFER_DATA (buf) + (line * stride),
            pBuffer + (size - ((line + 1) * (stride))), stride);
      }
    } else {
      memcpy (GST_BUFFER_DATA (buf), pBuffer, MIN ((unsigned int)size, GST_BUFFER_SIZE (buf)));
    }

    GST_LOG_OBJECT (mDec,
        "push_buffer (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
        " duration %" GST_TIME_FORMAT, size,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
        GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

    /* push the buffer downstream */
    mDec->last_ret = gst_pad_push (mDec->srcpad, buf);
  }
done:

  return S_OK;
}