bool GstEnginePipeline::HandoffCallback(GstPad*, GstBuffer* buf,
                                        gpointer self) {
  GstEnginePipeline* instance = reinterpret_cast<GstEnginePipeline*>(self);

  QList<BufferConsumer*> consumers;
  {
    QMutexLocker l(&instance->buffer_consumers_mutex_);
    consumers = instance->buffer_consumers_;
  }

  for (BufferConsumer* consumer : consumers) {
    gst_buffer_ref(buf);
    consumer->ConsumeBuffer(buf, instance->id());
  }

  // Calculate the end time of this buffer so we can stop playback if it's
  // after the end time of this song.
  if (instance->end_offset_nanosec_ > 0) {
    quint64 start_time = GST_BUFFER_TIMESTAMP(buf) - instance->segment_start_;
    quint64 duration = GST_BUFFER_DURATION(buf);
    quint64 end_time = start_time + duration;

    if (end_time > instance->end_offset_nanosec_) {
      if (instance->has_next_valid_url()) {
        if (instance->next_url_ == instance->url_ &&
            instance->next_beginning_offset_nanosec_ ==
                instance->end_offset_nanosec_) {
          // The "next" song is actually the next segment of this file - so
          // cheat and keep on playing, but just tell the Engine we've moved on.
          instance->end_offset_nanosec_ = instance->next_end_offset_nanosec_;
          instance->next_url_ = QUrl();
          instance->next_beginning_offset_nanosec_ = 0;
          instance->next_end_offset_nanosec_ = 0;

          // GstEngine will try to seek to the start of the new section, but
          // we're already there so ignore it.
          instance->ignore_next_seek_ = true;
          emit instance->EndOfStreamReached(instance->id(), true);
        } else {
          // We have a next song but we can't cheat, so move to it normally.
          instance->TransitionToNext();
        }
      } else {
        // There's no next song
        emit instance->EndOfStreamReached(instance->id(), false);
      }
    }
  }

  if (instance->emit_track_ended_on_time_discontinuity_) {
    if (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT) ||
        GST_BUFFER_OFFSET(buf) < instance->last_buffer_offset_) {
      qLog(Debug) << "Buffer discontinuity - emitting EOS";
      instance->emit_track_ended_on_time_discontinuity_ = false;
      emit instance->EndOfStreamReached(instance->id(), true);
    }
  }

  instance->last_buffer_offset_ = GST_BUFFER_OFFSET(buf);

  return true;
}
Пример #2
0
static GstFlowReturn
gst_ffmpegauddec_handle_frame (GstAudioDecoder * decoder, GstBuffer * inbuf)
{
  GstFFMpegAudDec *ffmpegdec;
  GstFFMpegAudDecClass *oclass;
  guint8 *data, *bdata;
  GstMapInfo map;
  gint size, bsize, len, have_data;
  GstFlowReturn ret = GST_FLOW_OK;
  gboolean do_padding, is_header;

  ffmpegdec = (GstFFMpegAudDec *) decoder;

  if (G_UNLIKELY (!ffmpegdec->opened))
    goto not_negotiated;

  if (inbuf == NULL) {
    gst_ffmpegauddec_drain (ffmpegdec);
    return GST_FLOW_OK;
  }

  inbuf = gst_buffer_ref (inbuf);
  is_header = GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_HEADER);

  oclass = (GstFFMpegAudDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));

  GST_LOG_OBJECT (ffmpegdec,
      "Received new data of size %" G_GSIZE_FORMAT ", offset:%" G_GUINT64_FORMAT
      ", ts:%" GST_TIME_FORMAT ", dur:%" GST_TIME_FORMAT,
      gst_buffer_get_size (inbuf), GST_BUFFER_OFFSET (inbuf),
      GST_TIME_ARGS (GST_BUFFER_PTS (inbuf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (inbuf)));

  /* workarounds, functions write to buffers:
   *  libavcodec/svq1.c:svq1_decode_frame writes to the given buffer.
   *  libavcodec/svq3.c:svq3_decode_slice_header too.
   * ffmpeg devs know about it and will fix it (they said). */
  if (oclass->in_plugin->id == AV_CODEC_ID_SVQ1 ||
      oclass->in_plugin->id == AV_CODEC_ID_SVQ3) {
    inbuf = gst_buffer_make_writable (inbuf);
  }

  gst_buffer_map (inbuf, &map, GST_MAP_READ);

  bdata = map.data;
  bsize = map.size;

  if (bsize > 0 && (!GST_MEMORY_IS_ZERO_PADDED (map.memory)
          || (map.maxsize - map.size) < FF_INPUT_BUFFER_PADDING_SIZE)) {
    /* add padding */
    if (ffmpegdec->padded_size < bsize + FF_INPUT_BUFFER_PADDING_SIZE) {
      ffmpegdec->padded_size = bsize + FF_INPUT_BUFFER_PADDING_SIZE;
      ffmpegdec->padded = g_realloc (ffmpegdec->padded, ffmpegdec->padded_size);
      GST_LOG_OBJECT (ffmpegdec, "resized padding buffer to %d",
          ffmpegdec->padded_size);
    }
    GST_CAT_TRACE_OBJECT (CAT_PERFORMANCE, ffmpegdec,
        "Copy input to add padding");
    memcpy (ffmpegdec->padded, bdata, bsize);
    memset (ffmpegdec->padded + bsize, 0, FF_INPUT_BUFFER_PADDING_SIZE);

    bdata = ffmpegdec->padded;
    do_padding = TRUE;
  } else {
    do_padding = FALSE;
  }

  do {
    guint8 tmp_padding[FF_INPUT_BUFFER_PADDING_SIZE];

    data = bdata;
    size = bsize;

    if (do_padding) {
      /* add temporary padding */
      GST_CAT_TRACE_OBJECT (CAT_PERFORMANCE, ffmpegdec,
          "Add temporary input padding");
      memcpy (tmp_padding, data + size, FF_INPUT_BUFFER_PADDING_SIZE);
      memset (data + size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
    }

    /* decode a frame of audio now */
    len = gst_ffmpegauddec_frame (ffmpegdec, data, size, &have_data, &ret);

    if (do_padding) {
      memcpy (data + size, tmp_padding, FF_INPUT_BUFFER_PADDING_SIZE);
    }

    if (ret != GST_FLOW_OK) {
      GST_LOG_OBJECT (ffmpegdec, "breaking because of flow ret %s",
          gst_flow_get_name (ret));
      /* bad flow return, make sure we discard all data and exit */
      bsize = 0;
      break;
    }

    if (len == 0 && have_data == 0) {
      /* nothing was decoded, this could be because no data was available or
       * because we were skipping frames.
       * If we have no context we must exit and wait for more data, we keep the
       * data we tried. */
      GST_LOG_OBJECT (ffmpegdec, "Decoding didn't return any data, breaking");
      break;
    } else if (len < 0) {
      /* a decoding error happened, we must break and try again with next data. */
      GST_LOG_OBJECT (ffmpegdec, "Decoding error, breaking");
      bsize = 0;
      break;
    }
    /* prepare for the next round, for codecs with a context we did this
     * already when using the parser. */
    bsize -= len;
    bdata += len;

    do_padding = TRUE;

    GST_LOG_OBJECT (ffmpegdec, "Before (while bsize>0).  bsize:%d , bdata:%p",
        bsize, bdata);
  } while (bsize > 0);

  gst_buffer_unmap (inbuf, &map);
  gst_buffer_unref (inbuf);

  if (ffmpegdec->outbuf)
    ret =
        gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (ffmpegdec),
        ffmpegdec->outbuf, 1);
  else if (len < 0 || is_header)
    ret =
        gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (ffmpegdec), NULL, 1);
  ffmpegdec->outbuf = NULL;

  if (bsize > 0) {
    GST_DEBUG_OBJECT (ffmpegdec, "Dropping %d bytes of data", bsize);
  }

  return ret;

  /* ERRORS */
not_negotiated:
  {
    oclass = (GstFFMpegAudDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
    GST_ELEMENT_ERROR (ffmpegdec, CORE, NEGOTIATION, (NULL),
        ("avdec_%s: input format was not set before data start",
            oclass->in_plugin->name));
    return GST_FLOW_NOT_NEGOTIATED;
  }
}
Пример #3
0
static GstFlowReturn
gst_ffmpegmux_collected (GstCollectPads * pads, gpointer user_data)
{
  GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) user_data;
  GSList *collected;
  GstFFMpegMuxPad *best_pad;
  GstClockTime best_time;
#if 0
  /* Re-enable once converted to new AVMetaData API
   * See #566605
   */
  const GstTagList *tags;
#endif

  /* open "file" (gstreamer protocol to next element) */
  if (!ffmpegmux->opened) {
    int open_flags = AVIO_FLAG_WRITE;

    /* we do need all streams to have started capsnego,
     * or things will go horribly wrong */
    for (collected = ffmpegmux->collect->data; collected;
        collected = g_slist_next (collected)) {
      GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data;
      AVStream *st = ffmpegmux->context->streams[collect_pad->padnum];

      /* check whether the pad has successfully completed capsnego */
      if (st->codec->codec_id == AV_CODEC_ID_NONE) {
        GST_ELEMENT_ERROR (ffmpegmux, CORE, NEGOTIATION, (NULL),
            ("no caps set on stream %d (%s)", collect_pad->padnum,
                (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) ?
                "video" : "audio"));
        return GST_FLOW_ERROR;
      }
      /* set framerate for audio */
      if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
        switch (st->codec->codec_id) {
          case AV_CODEC_ID_PCM_S16LE:
          case AV_CODEC_ID_PCM_S16BE:
          case AV_CODEC_ID_PCM_U16LE:
          case AV_CODEC_ID_PCM_U16BE:
          case AV_CODEC_ID_PCM_S8:
          case AV_CODEC_ID_PCM_U8:
            st->codec->frame_size = 1;
            break;
          default:
          {
            GstBuffer *buffer;

            /* FIXME : This doesn't work for RAW AUDIO...
             * in fact I'm wondering if it even works for any kind of audio... */
            buffer = gst_collect_pads_peek (ffmpegmux->collect,
                (GstCollectData *) collect_pad);
            if (buffer) {
              st->codec->frame_size =
                  st->codec->sample_rate *
                  GST_BUFFER_DURATION (buffer) / GST_SECOND;
              gst_buffer_unref (buffer);
            }
          }
        }
      }
    }

#if 0
    /* Re-enable once converted to new AVMetaData API
     * See #566605
     */

    /* tags */
    tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (ffmpegmux));
    if (tags) {
      gint i;
      gchar *s;

      /* get the interesting ones */
      if (gst_tag_list_get_string (tags, GST_TAG_TITLE, &s)) {
        strncpy (ffmpegmux->context->title, s,
            sizeof (ffmpegmux->context->title));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_ARTIST, &s)) {
        strncpy (ffmpegmux->context->author, s,
            sizeof (ffmpegmux->context->author));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_COPYRIGHT, &s)) {
        strncpy (ffmpegmux->context->copyright, s,
            sizeof (ffmpegmux->context->copyright));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_COMMENT, &s)) {
        strncpy (ffmpegmux->context->comment, s,
            sizeof (ffmpegmux->context->comment));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_ALBUM, &s)) {
        strncpy (ffmpegmux->context->album, s,
            sizeof (ffmpegmux->context->album));
      }
      if (gst_tag_list_get_string (tags, GST_TAG_GENRE, &s)) {
        strncpy (ffmpegmux->context->genre, s,
            sizeof (ffmpegmux->context->genre));
      }
      if (gst_tag_list_get_int (tags, GST_TAG_TRACK_NUMBER, &i)) {
        ffmpegmux->context->track = i;
      }
    }
#endif

    /* set the streamheader flag for gstffmpegprotocol if codec supports it */
    if (!strcmp (ffmpegmux->context->oformat->name, "flv")) {
      open_flags |= GST_FFMPEG_URL_STREAMHEADER;
    }

    /* some house-keeping for downstream before starting data flow */
    /* stream-start (FIXME: create id based on input ids) */
    {
      gchar s_id[32];

      g_snprintf (s_id, sizeof (s_id), "avmux-%08x", g_random_int ());
      gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_stream_start (s_id));
    }
    /* segment */
    {
      GstSegment segment;

      /* let downstream know we think in BYTES and expect to do seeking later on */
      gst_segment_init (&segment, GST_FORMAT_BYTES);
      gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_segment (&segment));
    }

    if (gst_ffmpegdata_open (ffmpegmux->srcpad, open_flags,
            &ffmpegmux->context->pb) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, TOO_LAZY, (NULL),
          ("Failed to open stream context in avmux"));
      return GST_FLOW_ERROR;
    }

    /* now open the mux format */
    if (avformat_write_header (ffmpegmux->context, NULL) < 0) {
      GST_ELEMENT_ERROR (ffmpegmux, LIBRARY, SETTINGS, (NULL),
          ("Failed to write file header - check codec settings"));
      return GST_FLOW_ERROR;
    }

    /* we're now opened */
    ffmpegmux->opened = TRUE;

    /* flush the header so it will be used as streamheader */
    avio_flush (ffmpegmux->context->pb);
  }

  /* take the one with earliest timestamp,
   * and push it forward */
  best_pad = NULL;
  best_time = GST_CLOCK_TIME_NONE;
  for (collected = ffmpegmux->collect->data; collected;
      collected = g_slist_next (collected)) {
    GstFFMpegMuxPad *collect_pad = (GstFFMpegMuxPad *) collected->data;
    GstBuffer *buffer = gst_collect_pads_peek (ffmpegmux->collect,
        (GstCollectData *) collect_pad);

    /* if there's no buffer, just continue */
    if (buffer == NULL) {
      continue;
    }

    /* if we have no buffer yet, just use the first one */
    if (best_pad == NULL) {
      best_pad = collect_pad;
      best_time = GST_BUFFER_TIMESTAMP (buffer);
      goto next_pad;
    }

    /* if we do have one, only use this one if it's older */
    if (GST_BUFFER_TIMESTAMP (buffer) < best_time) {
      best_time = GST_BUFFER_TIMESTAMP (buffer);
      best_pad = collect_pad;
    }

  next_pad:
    gst_buffer_unref (buffer);

    /* Mux buffers with invalid timestamp first */
    if (!GST_CLOCK_TIME_IS_VALID (best_time))
      break;
  }

  /* now handle the buffer, or signal EOS if we have
   * no buffers left */
  if (best_pad != NULL) {
    GstBuffer *buf;
    AVPacket pkt;
    gboolean need_free = FALSE;
    GstMapInfo map;

    /* push out current buffer */
    buf =
        gst_collect_pads_pop (ffmpegmux->collect, (GstCollectData *) best_pad);

    ffmpegmux->context->streams[best_pad->padnum]->codec->frame_number++;

    /* set time */
    pkt.pts = gst_ffmpeg_time_gst_to_ff (GST_BUFFER_TIMESTAMP (buf),
        ffmpegmux->context->streams[best_pad->padnum]->time_base);
    pkt.dts = pkt.pts;

    if (strcmp (ffmpegmux->context->oformat->name, "gif") == 0) {
      AVStream *st = ffmpegmux->context->streams[best_pad->padnum];
      AVPicture src, dst;

      need_free = TRUE;
      pkt.size = st->codec->width * st->codec->height * 3;
      pkt.data = g_malloc (pkt.size);

      dst.data[0] = pkt.data;
      dst.data[1] = NULL;
      dst.data[2] = NULL;
      dst.linesize[0] = st->codec->width * 3;

      gst_buffer_map (buf, &map, GST_MAP_READ);
      gst_ffmpeg_avpicture_fill (&src, map.data,
          AV_PIX_FMT_RGB24, st->codec->width, st->codec->height);

      av_picture_copy (&dst, &src, AV_PIX_FMT_RGB24,
          st->codec->width, st->codec->height);
      gst_buffer_unmap (buf, &map);
    } else {
      gst_buffer_map (buf, &map, GST_MAP_READ);
      pkt.data = map.data;
      pkt.size = map.size;
    }

    pkt.stream_index = best_pad->padnum;
    pkt.flags = 0;

    if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT))
      pkt.flags |= AV_PKT_FLAG_KEY;

    if (GST_BUFFER_DURATION_IS_VALID (buf))
      pkt.duration =
          gst_ffmpeg_time_gst_to_ff (GST_BUFFER_DURATION (buf),
          ffmpegmux->context->streams[best_pad->padnum]->time_base);
    else
      pkt.duration = 0;
    av_write_frame (ffmpegmux->context, &pkt);
    if (need_free) {
      g_free (pkt.data);
    } else {
      gst_buffer_unmap (buf, &map);
    }
    gst_buffer_unref (buf);
  } else {
    /* close down */
    av_write_trailer (ffmpegmux->context);
    ffmpegmux->opened = FALSE;
    avio_flush (ffmpegmux->context->pb);
    gst_ffmpegdata_close (ffmpegmux->context->pb);
    gst_pad_push_event (ffmpegmux->srcpad, gst_event_new_eos ());
    return GST_FLOW_EOS;
  }

  return GST_FLOW_OK;
}
Пример #4
0
static GstFlowReturn
gst_vdp_vpp_drain (GstVdpVideoPostProcess * vpp)
{
  GstVdpPicture current_pic;

  guint32 video_surfaces_past_count;
  VdpVideoSurface video_surfaces_past[MAX_PICTURES];

  guint32 video_surfaces_future_count;
  VdpVideoSurface video_surfaces_future[MAX_PICTURES];

  GstFlowReturn ret;

  while (gst_vdp_vpp_get_next_picture (vpp,
          &current_pic,
          &video_surfaces_past_count, video_surfaces_past,
          &video_surfaces_future_count, video_surfaces_future)) {
    GError *err;
    GstVdpOutputBuffer *outbuf;

    GstStructure *structure;
    GstVideoRectangle src_r = { 0, }
    , dest_r = {
    0,};
    VdpRect rect;

    GstVdpDevice *device;
    VdpStatus status;

    err = NULL;
    ret =
        gst_vdp_output_src_pad_alloc_buffer ((GstVdpOutputSrcPad *) vpp->srcpad,
        &outbuf, &err);
    if (ret != GST_FLOW_OK)
      goto output_pad_error;

    src_r.w = vpp->width;
    src_r.h = vpp->height;
    if (vpp->got_par) {
      gint new_width;

      new_width = gst_util_uint64_scale_int (src_r.w, vpp->par_n, vpp->par_d);
      src_r.x += (src_r.w - new_width) / 2;
      src_r.w = new_width;
    }

    structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0);
    if (!gst_structure_get_int (structure, "width", &dest_r.w) ||
        !gst_structure_get_int (structure, "height", &dest_r.h))
      goto invalid_caps;

    if (vpp->force_aspect_ratio) {
      GstVideoRectangle res_r;

      gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE);
      rect.x0 = res_r.x;
      rect.x1 = res_r.w + res_r.x;
      rect.y0 = res_r.y;
      rect.y1 = res_r.h + res_r.y;
    } else {
      rect.x0 = 0;
      rect.x1 = dest_r.w;
      rect.y0 = 0;
      rect.y1 = dest_r.h;
    }

    device = vpp->device;
    status =
        device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL,
        current_pic.structure, video_surfaces_past_count, video_surfaces_past,
        current_pic.buf->surface, video_surfaces_future_count,
        video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL);
    if (status != VDP_STATUS_OK)
      goto render_error;

    GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp;
    if (gst_vdp_vpp_is_interlaced (vpp))
      GST_BUFFER_DURATION (outbuf) = vpp->field_duration;
    else
      GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);

    err = NULL;
    ret =
        gst_vdp_output_src_pad_push ((GstVdpOutputSrcPad *) vpp->srcpad,
        outbuf, &err);
    if (ret != GST_FLOW_OK)
      goto output_pad_error;

    continue;

  invalid_caps:
    gst_buffer_unref (GST_BUFFER (outbuf));
    GST_ELEMENT_ERROR (vpp, STREAM, FAILED, ("Invalid output caps"), (NULL));
    ret = GST_FLOW_ERROR;
    break;

  render_error:
    gst_buffer_unref (GST_BUFFER (outbuf));
    GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
        ("Could not postprocess frame"),
        ("Error returned from vdpau was: %s",
            device->vdp_get_error_string (status)));
    ret = GST_FLOW_ERROR;
    break;

  output_pad_error:
    if (ret == GST_FLOW_ERROR && err != NULL)
      gst_vdp_vpp_post_error (vpp, err);
    break;
  }

  return ret;
}
Пример #5
0
static GstFlowReturn
gst_level_transform_ip (GstBaseTransform * trans, GstBuffer * in)
{
  GstLevel *filter;
  GstMapInfo map;
  guint8 *in_data;
  gsize in_size;
  gdouble CS;
  guint i;
  guint num_frames;
  guint num_int_samples = 0;    /* number of interleaved samples
                                 * ie. total count for all channels combined */
  guint block_size, block_int_size;     /* we subdivide buffers to not skip message
                                         * intervals */
  GstClockTimeDiff falloff_time;
  gint channels, rate, bps;

  filter = GST_LEVEL (trans);

  channels = GST_AUDIO_INFO_CHANNELS (&filter->info);
  bps = GST_AUDIO_INFO_BPS (&filter->info);
  rate = GST_AUDIO_INFO_RATE (&filter->info);

  gst_buffer_map (in, &map, GST_MAP_READ);
  in_data = map.data;
  in_size = map.size;

  num_int_samples = in_size / bps;

  GST_LOG_OBJECT (filter, "analyzing %u sample frames at ts %" GST_TIME_FORMAT,
      num_int_samples, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (in)));

  g_return_val_if_fail (num_int_samples % channels == 0, GST_FLOW_ERROR);

  if (GST_BUFFER_FLAG_IS_SET (in, GST_BUFFER_FLAG_DISCONT)) {
    filter->message_ts = GST_BUFFER_TIMESTAMP (in);
    filter->num_frames = 0;
  }
  if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (filter->message_ts))) {
    filter->message_ts = GST_BUFFER_TIMESTAMP (in);
  }

  num_frames = num_int_samples / channels;
  while (num_frames > 0) {
    block_size = filter->interval_frames - filter->num_frames;
    block_size = MIN (block_size, num_frames);
    block_int_size = block_size * channels;

    for (i = 0; i < channels; ++i) {
      if (!GST_BUFFER_FLAG_IS_SET (in, GST_BUFFER_FLAG_GAP)) {
        filter->process (in_data, block_int_size, channels, &CS,
            &filter->peak[i]);
        GST_LOG_OBJECT (filter,
            "[%d]: cumulative squares %lf, over %d samples/%d channels",
            i, CS, block_int_size, channels);
        filter->CS[i] += CS;
      } else {
        filter->peak[i] = 0.0;
      }
      in_data += bps;

      filter->decay_peak_age[i] += GST_FRAMES_TO_CLOCK_TIME (num_frames, rate);
      GST_LOG_OBJECT (filter,
          "[%d]: peak %f, last peak %f, decay peak %f, age %" GST_TIME_FORMAT,
          i, filter->peak[i], filter->last_peak[i], filter->decay_peak[i],
          GST_TIME_ARGS (filter->decay_peak_age[i]));

      /* update running peak */
      if (filter->peak[i] > filter->last_peak[i])
        filter->last_peak[i] = filter->peak[i];

      /* make decay peak fall off if too old */
      falloff_time =
          GST_CLOCK_DIFF (gst_gdouble_to_guint64 (filter->decay_peak_ttl),
          filter->decay_peak_age[i]);
      if (falloff_time > 0) {
        gdouble falloff_dB;
        gdouble falloff;
        gdouble length;         /* length of falloff time in seconds */

        length = (gdouble) falloff_time / (gdouble) GST_SECOND;
        falloff_dB = filter->decay_peak_falloff * length;
        falloff = pow (10, falloff_dB / -20.0);

        GST_LOG_OBJECT (filter,
            "falloff: current %f, base %f, interval %" GST_TIME_FORMAT
            ", dB falloff %f, factor %e",
            filter->decay_peak[i], filter->decay_peak_base[i],
            GST_TIME_ARGS (falloff_time), falloff_dB, falloff);
        filter->decay_peak[i] = filter->decay_peak_base[i] * falloff;
        GST_LOG_OBJECT (filter,
            "peak is %" GST_TIME_FORMAT " old, decayed with factor %e to %f",
            GST_TIME_ARGS (filter->decay_peak_age[i]), falloff,
            filter->decay_peak[i]);
      } else {
        GST_LOG_OBJECT (filter, "peak not old enough, not decaying");
      }

      /* if the peak of this run is higher, the decay peak gets reset */
      if (filter->peak[i] >= filter->decay_peak[i]) {
        GST_LOG_OBJECT (filter, "new peak, %f", filter->peak[i]);
        filter->decay_peak[i] = filter->peak[i];
        filter->decay_peak_base[i] = filter->peak[i];
        filter->decay_peak_age[i] = G_GINT64_CONSTANT (0);
      }
    }
    in_data += block_size * bps;

    filter->num_frames += block_size;
    num_frames -= block_size;

    /* do we need to message ? */
    if (filter->num_frames >= filter->interval_frames) {
      gst_level_post_message (filter);
    }
  }

  gst_buffer_unmap (in, &map);

  return GST_FLOW_OK;
}
Пример #6
0
static GstFlowReturn
gst_visual_gl_chain (GstPad * pad, GstBuffer * buffer)
{
  GstGLBuffer *outbuf = NULL;
  GstVisualGL *visual = GST_VISUAL_GL (gst_pad_get_parent (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  guint avail;

  GST_DEBUG_OBJECT (visual, "chain function called");

  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (visual->srcpad) == NULL) {
    ret = get_buffer (visual, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }

  /* resync on DISCONT */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (visual->adapter);
  }

  GST_DEBUG_OBJECT (visual,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer));

  gst_adapter_push (visual->adapter, buffer);

  while (TRUE) {
    gboolean need_skip;
    guint64 dist, timestamp;

    GST_DEBUG_OBJECT (visual, "processing buffer");

    avail = gst_adapter_available (visual->adapter);
    GST_DEBUG_OBJECT (visual, "avail now %u", avail);

    /* we need at least VISUAL_SAMPLES samples */
    if (avail < VISUAL_SAMPLES * visual->bps)
      break;

    /* we need at least enough samples to make one frame */
    if (avail < visual->spf * visual->bps)
      break;

    /* get timestamp of the current adapter byte */
    timestamp = gst_adapter_prev_timestamp (visual->adapter, &dist);
    if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
      /* convert bytes to time */
      dist /= visual->bps;
      timestamp += gst_util_uint64_scale_int (dist, GST_SECOND, visual->rate);
    }

    if (timestamp != -1) {
      gint64 qostime;

      /* QoS is done on running time */
      qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME,
          timestamp);
      qostime += visual->duration;

      GST_OBJECT_LOCK (visual);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = visual->earliest_time != -1 &&
          qostime <= visual->earliest_time;
      GST_OBJECT_UNLOCK (visual);

      if (need_skip) {
        GST_WARNING_OBJECT (visual,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time));
        goto skip;
      }
    }

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (visual, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }

    /* render libvisual plugin to our target */
    gst_gl_display_use_fbo_v2 (visual->display,
        visual->width, visual->height, visual->fbo, visual->depthbuffer,
        visual->midtexture, (GLCB_V2) render_frame, (gpointer *) visual);

    /* gst video is top-down whereas opengl plan is bottom up */
    gst_gl_display_use_fbo (visual->display,
        visual->width, visual->height, visual->fbo, visual->depthbuffer,
        outbuf->texture, (GLCB) bottom_up_to_top_down,
        visual->width, visual->height, visual->midtexture,
        0, visual->width, 0, visual->height, GST_GL_DISPLAY_PROJECTION_ORTHO2D,
        (gpointer *) visual);

    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
    GST_BUFFER_DURATION (outbuf) = visual->duration;

    ret = gst_pad_push (visual->srcpad, GST_BUFFER (outbuf));
    outbuf = NULL;

  skip:
    GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input",
        visual->spf);

    /* Flush out the number of samples per frame */
    gst_adapter_flush (visual->adapter, visual->spf * visual->bps);

    /* quit the loop if something was wrong */
    if (ret != GST_FLOW_OK)
      break;
  }

beach:

  if (outbuf != NULL)
    gst_gl_buffer_unref (outbuf);

  gst_object_unref (visual);

  return ret;
}
Пример #7
0
static GstFlowReturn new_sample_callback (GstAppSink * sink, gpointer user_data)
{
    GstBuffer *buffer;
    GstSample *sample;
    Encoder *encoder = (Encoder *)user_data;

    *(encoder->output->heartbeat) = gst_clock_get_time (encoder->system_clock);
    sample = gst_app_sink_pull_sample (GST_APP_SINK (sink));
    buffer = gst_sample_get_buffer (sample);
    if (sem_wait (encoder->output->semaphore) == -1) {
        GST_ERROR ("new_sample_callback sem_wait failure: %s", g_strerror (errno));
        gst_sample_unref (sample);
        return GST_FLOW_OK;
    }

    (*(encoder->output->total_count)) += gst_buffer_get_size (buffer);

    /* update head_addr, free enough memory for current buffer. */
    while (cache_free (encoder) <= gst_buffer_get_size (buffer) + 12) { /* timestamp + gop size = 12 */
        move_head (encoder);
    }

    if (encoder->has_tssegment && encoder->has_m3u8_output) { 
        if ((encoder->duration_accumulation >= encoder->segment_duration) ||
                ((encoder->segment_duration - encoder->duration_accumulation) < 500000000)) {
            encoder->last_segment_duration = encoder->duration_accumulation;
            encoder->last_running_time = GST_BUFFER_PTS (buffer);
            encoder->duration_accumulation = 0;

        }
        encoder->duration_accumulation += GST_BUFFER_DURATION (buffer);
    }

    /* 
     * random access point found.
     * 1. with video encoder and IDR found;
     * 2. audio only encoder and current pts >= last_running_time;
     * 3. tssegment out every buffer with random access point.
     */
    if ((encoder->has_video && !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) ||
            (encoder->has_audio_only && (GST_BUFFER_PTS (buffer) >= encoder->last_running_time)) ||
            (encoder->has_tssegment && (GST_BUFFER_PTS (buffer) >= encoder->last_running_time))) {
        if (encoder->has_m3u8_output == FALSE) {
            /* no m3u8 output */
            move_last_rap (encoder, buffer);

        } else if (GST_BUFFER_PTS (buffer) >= encoder->last_running_time) {
            move_last_rap (encoder, buffer);
            send_msg (encoder);

        } else if (encoder->is_first_key) {
            /* move_last_rap if its first key even if has m3u8 output */
            move_last_rap (encoder, buffer);
            send_msg (encoder);
            encoder->is_first_key = FALSE;
        }
    }

    /* udpstreaming? */
    if (encoder->udpstreaming) {
        udp_streaming (encoder, buffer);
    }

    /*
     * copy buffer to cache.
     * update tail_addr
     */
    copy_buffer (encoder, buffer);

    sem_post (encoder->output->semaphore);
    gst_sample_unref (sample);

    return GST_FLOW_OK;
}
Пример #8
0
static GstFlowReturn
gst_visual_chain (GstPad * pad, GstBuffer * buffer)
{
  GstBuffer *outbuf = NULL;
  guint i;
  GstVisual *visual = GST_VISUAL (gst_pad_get_parent (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  guint avail;

  GST_DEBUG_OBJECT (visual, "chain function called");

  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (visual->srcpad) == NULL) {
    ret = get_buffer (visual, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }

  /* resync on DISCONT */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (visual->adapter);
    visual->next_ts = -1;
  }

  /* Match timestamps from the incoming audio */
  if (GST_BUFFER_TIMESTAMP (buffer) != GST_CLOCK_TIME_NONE)
    visual->next_ts = GST_BUFFER_TIMESTAMP (buffer);

  GST_DEBUG_OBJECT (visual,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer));

  gst_adapter_push (visual->adapter, buffer);

  while (TRUE) {
    gboolean need_skip;
    const guint16 *data;

    GST_DEBUG_OBJECT (visual, "processing buffer");

    avail = gst_adapter_available (visual->adapter);
    GST_DEBUG_OBJECT (visual, "avail now %u", avail);

    /* we need at least 512 samples */
    if (avail < 512 * visual->bps)
      break;

    /* we need at least enough samples to make one frame */
    if (avail < visual->spf * visual->bps)
      break;

    if (visual->next_ts != -1) {
      gint64 qostime;

      /* QoS is done on running time */
      qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME,
          visual->next_ts);

      GST_OBJECT_LOCK (visual);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = visual->earliest_time != -1 &&
          qostime <= visual->earliest_time;
      GST_OBJECT_UNLOCK (visual);

      if (need_skip) {
        GST_WARNING_OBJECT (visual,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time));
        goto skip;
      }
    }

    /* Read 512 samples per channel */
    data =
        (const guint16 *) gst_adapter_peek (visual->adapter, 512 * visual->bps);

#if defined(VISUAL_API_VERSION) && VISUAL_API_VERSION >= 4000 && VISUAL_API_VERSION < 5000
    {
      VisBuffer *lbuf, *rbuf;
      guint16 ldata[512], rdata[512];
      VisAudioSampleRateType rate;

      lbuf = visual_buffer_new_with_buffer (ldata, sizeof (ldata), NULL);
      rbuf = visual_buffer_new_with_buffer (rdata, sizeof (rdata), NULL);

      if (visual->channels == 2) {
        for (i = 0; i < 512; i++) {
          ldata[i] = *data++;
          rdata[i] = *data++;
        }
      } else {
        for (i = 0; i < 512; i++) {
          ldata[i] = *data;
          rdata[i] = *data++;
        }
      }

      switch (visual->rate) {
        case 8000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_8000;
          break;
        case 11250:
          rate = VISUAL_AUDIO_SAMPLE_RATE_11250;
          break;
        case 22500:
          rate = VISUAL_AUDIO_SAMPLE_RATE_22500;
          break;
        case 32000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_32000;
          break;
        case 44100:
          rate = VISUAL_AUDIO_SAMPLE_RATE_44100;
          break;
        case 48000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_48000;
          break;
        case 96000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_96000;
          break;
        default:
          visual_object_unref (VISUAL_OBJECT (lbuf));
          visual_object_unref (VISUAL_OBJECT (rbuf));
          GST_ERROR_OBJECT (visual, "unsupported rate %d", visual->rate);
          ret = GST_FLOW_ERROR;
          goto beach;
          break;
      }

      visual_audio_samplepool_input_channel (visual->audio->samplepool,
          lbuf,
          rate, VISUAL_AUDIO_SAMPLE_FORMAT_S16, VISUAL_AUDIO_CHANNEL_LEFT);
      visual_audio_samplepool_input_channel (visual->audio->samplepool,
          rbuf,
          rate, VISUAL_AUDIO_SAMPLE_FORMAT_S16, VISUAL_AUDIO_CHANNEL_RIGHT);

      visual_object_unref (VISUAL_OBJECT (lbuf));
      visual_object_unref (VISUAL_OBJECT (rbuf));

    }
#else
    if (visual->channels == 2) {
      for (i = 0; i < 512; i++) {
        visual->audio->plugpcm[0][i] = *data++;
        visual->audio->plugpcm[1][i] = *data++;
      }
    } else {
      for (i = 0; i < 512; i++) {
        visual->audio->plugpcm[0][i] = *data;
        visual->audio->plugpcm[1][i] = *data++;
      }
    }
#endif

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (visual, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }
    visual_video_set_buffer (visual->video, GST_BUFFER_DATA (outbuf));
    visual_audio_analyze (visual->audio);
    visual_actor_run (visual->actor, visual->audio);
    visual_video_set_buffer (visual->video, NULL);
    GST_DEBUG_OBJECT (visual, "rendered one frame");

    GST_BUFFER_TIMESTAMP (outbuf) = visual->next_ts;
    GST_BUFFER_DURATION (outbuf) = visual->duration;

    ret = gst_pad_push (visual->srcpad, outbuf);
    outbuf = NULL;

  skip:
    /* interpollate next timestamp */
    if (visual->next_ts != -1)
      visual->next_ts += visual->duration;

    GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input",
        visual->spf);

    /* Flush out the number of samples per frame */
    gst_adapter_flush (visual->adapter, visual->spf * visual->bps);

    /* quit the loop if something was wrong */
    if (ret != GST_FLOW_OK)
      break;
  }

beach:

  if (outbuf != NULL)
    gst_buffer_unref (outbuf);

  gst_object_unref (visual);

  return ret;
}
Пример #9
0
static GstFlowReturn
gst_goo_encarmaac_chain (GstPad* pad, GstBuffer* buffer)
{
	GstGooEncArmAac* self = GST_GOO_ENCARMAAC (gst_pad_get_parent (pad));
	guint omxbufsiz = GOO_PORT_GET_DEFINITION (self->inport)->nBufferSize;
	GstFlowReturn result;

	if (self->component->cur_state != OMX_StateExecuting)
	{
		goto not_negotiated;
	}

	if (goo_port_is_tunneled (self->inport))
	{
		GST_DEBUG_OBJECT (self, "DASF Source");
		OMX_BUFFERHEADERTYPE* omxbuf = NULL;
		omxbuf = goo_port_grab_buffer (self->outport);
		result = process_output_buffer (self, omxbuf);
		return result;
	}

	/* discontinuity clears adapter, FIXME, maybe we can set some
	 * encoder flag to mask the discont. */
	if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
	{
		gst_goo_adapter_clear (self->adapter);
		self->ts = 0;
	}

	/* take latest timestamp, FIXME timestamp is the one of the
	 * first buffer in the adapter. */
	if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
	{
		self->ts = GST_BUFFER_TIMESTAMP (buffer);
	}

	result = GST_FLOW_OK;
	GST_DEBUG_OBJECT (self, "Pushing a GST buffer to adapter (%d)",
			  GST_BUFFER_SIZE (buffer));
	gst_goo_adapter_push (self->adapter, buffer);

	/* Collect samples until we have enough for an output frame */
	while (gst_goo_adapter_available (self->adapter) >= omxbufsiz)
	{
		GST_DEBUG_OBJECT (self, "Popping an OMX buffer");
		OMX_BUFFERHEADERTYPE* omxbuf;
		omxbuf = goo_port_grab_buffer (self->inport);
		gst_goo_adapter_peek (self->adapter, omxbufsiz, omxbuf);
		omxbuf->nFilledLen = omxbufsiz;
		gst_goo_adapter_flush (self->adapter, omxbufsiz);
		goo_component_release_buffer (self->component, omxbuf);
	}

done:
	GST_DEBUG_OBJECT (self, "");
	gst_object_unref (self);

	return result;

	/* ERRORS */
not_negotiated:
	{
		GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
				   ("failed to negotiate MPEG/AAC format with next element"));
		gst_buffer_unref (buffer);
		result = GST_FLOW_ERROR;
		goto done;
	}
}
Пример #10
0
static GstFlowReturn
gst_goo_encjpeg_chain (GstPad* pad, GstBuffer* buffer)
{
	GST_LOG ("");

	GstGooEncJpeg* self = GST_GOO_ENCJPEG (gst_pad_get_parent (pad));
	GstGooEncJpegPrivate* priv = GST_GOO_ENCJPEG_GET_PRIVATE (self);
	GstFlowReturn ret = GST_FLOW_OK;
	GstGooAdapter* adapter = self->adapter;
	OMX_BUFFERHEADERTYPE* omx_buffer = NULL;

	GstClockTime timestamp, duration;
	guint64 offset, offsetend;
	GstBuffer* outbuf = NULL;

	if (goo_port_is_tunneled (self->inport))
	{
		GST_INFO ("Inport is tunneled");
		ret = GST_FLOW_OK;
		priv->incount++;
		goto process_output;
	}

	if (goo_port_is_eos (self->inport))
	{
		GST_INFO ("port is eos");
		ret = GST_FLOW_UNEXPECTED;
		goto fail;
	}

	if (self->component->cur_state != OMX_StateExecuting)
	{
		goto fail;
	}

	/* let's copy the timestamp meta data */
	timestamp = GST_BUFFER_TIMESTAMP (buffer);
	duration  = GST_BUFFER_DURATION (buffer);
	offset	  = GST_BUFFER_OFFSET (buffer);
	offsetend = GST_BUFFER_OFFSET_END (buffer);

	if (GST_IS_GOO_BUFFER (buffer) &&
	    goo_port_is_my_buffer (self->inport,
				   GST_GOO_BUFFER (buffer)->omx_buffer))
	{
		GST_INFO ("My own OMX buffer");
		priv->incount++;
		gst_buffer_unref (buffer); /* let's push the buffer to omx */
		ret = GST_FLOW_OK;
	}
	else if (GST_IS_GOO_BUFFER (buffer) &&
		 !goo_port_is_my_buffer (self->inport,
					 GST_GOO_BUFFER (buffer)->omx_buffer))
	{
		GST_INFO ("Other OMX buffer");

		if (GST_BUFFER_SIZE (buffer) != priv->omxbufsiz)
		{
			GST_ELEMENT_ERROR (self, STREAM, FORMAT,
					   ("Frame is incomplete (%u!=%u)",
					    GST_BUFFER_SIZE (buffer),
					    priv->omxbufsiz),
					   ("Frame is incomplete (%u!=%u)",
					    GST_BUFFER_SIZE (buffer),
					    priv->omxbufsiz));
			ret = GST_FLOW_ERROR;
		}

		omx_buffer = goo_port_grab_buffer (self->inport);
		memcpy (omx_buffer->pBuffer, GST_BUFFER_DATA (buffer),
			priv->omxbufsiz);
		omx_buffer->nFilledLen = priv->omxbufsiz;
		priv->incount++;
		goo_component_release_buffer (self->component, omx_buffer);
		gst_buffer_unref (buffer);
		ret = GST_FLOW_OK;
	}
	else
	{
		if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
		{
			gst_goo_adapter_clear (adapter);
		}

		GST_LOG ("size = %d bytes", GST_BUFFER_SIZE (buffer));
		gst_goo_adapter_push (adapter, buffer);

		guint tmp = priv->incount;
		while (gst_goo_adapter_available (adapter) >= priv->omxbufsiz &&
		       ret == GST_FLOW_OK)
		{
			GST_DEBUG ("Pushing data to OMX");
			OMX_BUFFERHEADERTYPE* omx_buffer;
			omx_buffer = goo_port_grab_buffer (self->inport);
			gst_goo_adapter_peek (adapter, priv->omxbufsiz,
					      omx_buffer);
			omx_buffer->nFilledLen = priv->omxbufsiz;
			gst_goo_adapter_flush (adapter, priv->omxbufsiz);
			priv->incount++;
			goo_component_release_buffer (self->component,
						      omx_buffer);
			ret = GST_FLOW_OK;
		}

		if (tmp == priv->incount)
		{
			goto done;
		}
	}

process_output:
	if (goo_port_is_tunneled (self->outport))
	{
		outbuf = gst_ghost_buffer_new ();
		GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_READONLY);
		GST_BUFFER_DATA (outbuf)       = NULL;
		GST_BUFFER_SIZE (outbuf)       = 0;
		GST_BUFFER_TIMESTAMP (outbuf)  = timestamp;
		GST_BUFFER_DURATION (outbuf)   = duration;
		GST_BUFFER_OFFSET (outbuf)     = offset;
		GST_BUFFER_OFFSET_END (outbuf) = offsetend;
		gst_buffer_set_caps (outbuf, GST_PAD_CAPS (self->srcpad));
		gst_pad_push (self->srcpad, outbuf);
		goto done;
	}

	GST_DEBUG ("Poping out buffer from OMX");
	omx_buffer = goo_port_grab_buffer (self->outport);

	if (omx_buffer->nFilledLen <= 0)
	{
		ret = GST_FLOW_ERROR;
		goto done;
	}

	if (gst_pad_alloc_buffer (self->srcpad, priv->outcount,
				  omx_buffer->nFilledLen,
				  GST_PAD_CAPS (self->srcpad),
				  &outbuf) == GST_FLOW_OK)
	{
		priv->outcount++;

		/* if the buffer is a goo buffer of the peer element */
		if (GST_IS_GOO_BUFFER (outbuf))
		{
			GST_INFO ("It is a OMX buffer!");
			memcpy (GST_GOO_BUFFER (outbuf)->omx_buffer->pBuffer,
				omx_buffer->pBuffer, omx_buffer->nFilledLen);
			GST_GOO_BUFFER (outbuf)->omx_buffer->nFilledLen =
				omx_buffer->nFilledLen;
			GST_GOO_BUFFER (outbuf)->omx_buffer->nFlags =
				omx_buffer->nFlags;
			GST_GOO_BUFFER (outbuf)->omx_buffer->nTimeStamp =
				GST2OMX_TIMESTAMP (timestamp);
			goo_component_release_buffer (self->component,
						      omx_buffer);
		}
		else
		{
			/* @fixme! */
			/* we do this because there is a buffer extarbation
			 * when a filesink is used.
			 * Maybe using multiple buffers it could be solved.
			 */
			memcpy (GST_BUFFER_DATA (outbuf),
				omx_buffer->pBuffer, omx_buffer->nFilledLen);
			goo_component_release_buffer (self->component,
						      omx_buffer);

/* 			gst_buffer_unref (outbuf); */
/* 			outbuf = GST_BUFFER (gst_goo_buffer_new ()); */
/* 			gst_goo_buffer_set_data (outbuf, */
/* 						 self->component, */
/* 						 omx_buffer); */
		}

		GST_BUFFER_TIMESTAMP (outbuf)  = timestamp;
		GST_BUFFER_DURATION (outbuf)   = duration;
		GST_BUFFER_OFFSET (outbuf)     = offset;
		GST_BUFFER_OFFSET_END (outbuf) = offsetend;

		gst_buffer_set_caps (outbuf, GST_PAD_CAPS (self->srcpad));
		g_signal_emit (G_OBJECT (self),
			       gst_goo_encjpeg_signals[FRAME_ENCODED], 0);
	
		ret = gst_pad_push (self->srcpad, outbuf);
		if (omx_buffer->nFlags & OMX_BUFFERFLAG_EOS ||
		   	goo_port_is_eos (self->outport))
		{
			GST_INFO ("EOS flag found in output buffer (%d)",
			  	omx_buffer->nFilledLen);
			goo_component_set_done (self->component);

		}

		goto done;
	}
	else
	{
		ret = GST_FLOW_ERROR;
		goto done;
	}

fail:
	gst_buffer_unref (buffer);
	gst_goo_adapter_clear (adapter);

done:
	gst_object_unref (self);
	return ret;

}
static void
send_message_locked (GstSoupHttpClientSink * souphttpsink)
{
  GList *g;
  guint64 n;

  if (souphttpsink->queued_buffers == NULL || souphttpsink->message) {
    return;
  }

  /* If the URI went away, drop all these buffers */
  if (souphttpsink->location == NULL) {
    GST_DEBUG_OBJECT (souphttpsink, "URI went away, dropping queued buffers");
    g_list_free_full (souphttpsink->queued_buffers,
        (GDestroyNotify) gst_buffer_unref);
    souphttpsink->queued_buffers = NULL;
    return;
  }

  souphttpsink->message = soup_message_new ("PUT", souphttpsink->location);
  if (souphttpsink->message == NULL) {
    GST_WARNING_OBJECT (souphttpsink,
        "URI could not be parsed while creating message.");
    g_list_free_full (souphttpsink->queued_buffers,
        (GDestroyNotify) gst_buffer_unref);
    souphttpsink->queued_buffers = NULL;
    return;
  }

  soup_message_set_flags (souphttpsink->message,
      (souphttpsink->automatic_redirect ? 0 : SOUP_MESSAGE_NO_REDIRECT));

  if (souphttpsink->cookies) {
    gchar **cookie;

    for (cookie = souphttpsink->cookies; *cookie != NULL; cookie++) {
      soup_message_headers_append (souphttpsink->message->request_headers,
          "Cookie", *cookie);
    }
  }

  n = 0;
  if (souphttpsink->offset == 0) {
    for (g = souphttpsink->streamheader_buffers; g; g = g_list_next (g)) {
      GstBuffer *buffer = g->data;
      GstMapInfo map;

      GST_DEBUG_OBJECT (souphttpsink, "queueing stream headers");
      gst_buffer_map (buffer, &map, GST_MAP_READ);
      /* Stream headers are updated whenever ::set_caps is called, so there's
       * no guarantees about their lifetime and we ask libsoup to copy them 
       * into the message body with SOUP_MEMORY_COPY. */
      soup_message_body_append (souphttpsink->message->request_body,
          SOUP_MEMORY_COPY, map.data, map.size);
      n += map.size;
      gst_buffer_unmap (buffer, &map);
    }
  }

  for (g = souphttpsink->queued_buffers; g; g = g_list_next (g)) {
    GstBuffer *buffer = g->data;
    if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER)) {
      GstMapInfo map;

      gst_buffer_map (buffer, &map, GST_MAP_READ);
      /* Queued buffers are only freed in the next iteration of the mainloop
       * after the message body has been written out, so we don't need libsoup
       * to copy those while appending to the body. However, if the buffer is
       * used elsewhere, it should be copied. Hence, SOUP_MEMORY_TEMPORARY. */
      soup_message_body_append (souphttpsink->message->request_body,
          SOUP_MEMORY_TEMPORARY, map.data, map.size);
      n += map.size;
      gst_buffer_unmap (buffer, &map);
    }
  }

  if (souphttpsink->offset != 0) {
    char *s;
    s = g_strdup_printf ("bytes %" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT "/*",
        souphttpsink->offset, souphttpsink->offset + n - 1);
    soup_message_headers_append (souphttpsink->message->request_headers,
        "Content-Range", s);
    g_free (s);
  }

  if (n == 0) {
    GST_DEBUG_OBJECT (souphttpsink,
        "total size of buffers queued is 0, freeing everything");
    g_list_free_full (souphttpsink->queued_buffers,
        (GDestroyNotify) gst_buffer_unref);
    souphttpsink->queued_buffers = NULL;
    g_object_unref (souphttpsink->message);
    souphttpsink->message = NULL;
    return;
  }

  souphttpsink->sent_buffers = souphttpsink->queued_buffers;
  souphttpsink->queued_buffers = NULL;

  GST_DEBUG_OBJECT (souphttpsink,
      "queue message %" G_GUINT64_FORMAT " %" G_GUINT64_FORMAT,
      souphttpsink->offset, n);
  soup_session_queue_message (souphttpsink->session, souphttpsink->message,
      callback, souphttpsink);

  souphttpsink->offset += n;
}
Пример #12
0
static GstFlowReturn
gst_mulawenc_chain (GstPad * pad, GstBuffer * buffer)
{
  GstMuLawEnc *mulawenc;
  gint16 *linear_data;
  guint linear_size;
  guint8 *mulaw_data;
  guint mulaw_size;
  GstBuffer *outbuf;
  GstFlowReturn ret;
  GstClockTime timestamp, duration;

  mulawenc = GST_MULAWENC (gst_pad_get_parent (pad));

  if (!mulawenc->rate || !mulawenc->channels)
    goto not_negotiated;

  linear_data = (gint16 *) GST_BUFFER_DATA (buffer);
  linear_size = GST_BUFFER_SIZE (buffer);

  mulaw_size = linear_size / 2;

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);

  ret = gst_pad_alloc_buffer_and_set_caps (mulawenc->srcpad,
      GST_BUFFER_OFFSET_NONE, mulaw_size, GST_PAD_CAPS (mulawenc->srcpad),
      &outbuf);
  if (ret != GST_FLOW_OK)
    goto alloc_failed;

  if (duration == -1) {
    duration = gst_util_uint64_scale_int (mulaw_size,
        GST_SECOND, mulawenc->rate * mulawenc->channels);
  }

  if (GST_BUFFER_SIZE (outbuf) < mulaw_size) {
    /* pad-alloc can suggest a smaller size */
    gst_buffer_unref (outbuf);
    outbuf = gst_buffer_new_and_alloc (mulaw_size);
  }

  mulaw_data = (guint8 *) GST_BUFFER_DATA (outbuf);

  /* copy discont flag */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);

  GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
  GST_BUFFER_DURATION (outbuf) = duration;

  gst_buffer_set_caps (outbuf, GST_PAD_CAPS (mulawenc->srcpad));

  mulaw_encode (linear_data, mulaw_data, mulaw_size);

  gst_buffer_unref (buffer);

  ret = gst_pad_push (mulawenc->srcpad, outbuf);

done:
  gst_object_unref (mulawenc);

  return ret;

not_negotiated:
  {
    GST_DEBUG_OBJECT (mulawenc, "no format negotiated");
    ret = GST_FLOW_NOT_NEGOTIATED;
    gst_buffer_unref (buffer);
    goto done;
  }
alloc_failed:
  {
    GST_DEBUG_OBJECT (mulawenc, "pad alloc failed");
    gst_buffer_unref (buffer);
    goto done;
  }
}
Пример #13
0
static GstFlowReturn
gst_multi_file_sink_render (GstBaseSink * sink, GstBuffer * buffer)
{
  GstMultiFileSink *multifilesink;
  guint size;
  guint8 *data;
  gchar *filename;
  gboolean ret;
  GError *error = NULL;

  size = GST_BUFFER_SIZE (buffer);
  data = GST_BUFFER_DATA (buffer);

  multifilesink = GST_MULTI_FILE_SINK (sink);

  switch (multifilesink->next_file) {
    case GST_MULTI_FILE_SINK_NEXT_BUFFER:
      filename = g_strdup_printf (multifilesink->filename,
          multifilesink->index);

      ret = g_file_set_contents (filename, (char *) data, size, &error);
      if (!ret)
        goto write_error;

      gst_multi_file_sink_post_message (multifilesink, buffer, filename);
      multifilesink->index++;

      g_free (filename);
      break;
    case GST_MULTI_FILE_SINK_NEXT_DISCONT:
      if (GST_BUFFER_IS_DISCONT (buffer)) {
        if (multifilesink->file) {
          fclose (multifilesink->file);
          multifilesink->file = NULL;

          filename = g_strdup_printf (multifilesink->filename,
              multifilesink->index);
          gst_multi_file_sink_post_message (multifilesink, buffer, filename);
          g_free (filename);
          multifilesink->index++;
        }
      }

      if (multifilesink->file == NULL) {
        filename = g_strdup_printf (multifilesink->filename,
            multifilesink->index);
        multifilesink->file = g_fopen (filename, "wb");
        g_free (filename);

        if (multifilesink->file == NULL)
          goto stdio_write_error;
      }

      ret = fwrite (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 1,
          multifilesink->file);
      if (ret != 1)
        goto stdio_write_error;

      break;
    case GST_MULTI_FILE_SINK_NEXT_KEY_FRAME:
      if (multifilesink->next_segment == GST_CLOCK_TIME_NONE) {
        if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
          multifilesink->next_segment = GST_BUFFER_TIMESTAMP (buffer) +
              10 * GST_SECOND;
        }
      }

      if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer) &&
          GST_BUFFER_TIMESTAMP (buffer) >= multifilesink->next_segment &&
          !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
        if (multifilesink->file) {
          fclose (multifilesink->file);
          multifilesink->file = NULL;

          filename = g_strdup_printf (multifilesink->filename,
              multifilesink->index);
          gst_multi_file_sink_post_message (multifilesink, buffer, filename);
          g_free (filename);
          multifilesink->index++;
        }

        multifilesink->next_segment += 10 * GST_SECOND;
      }

      if (multifilesink->file == NULL) {
        filename = g_strdup_printf (multifilesink->filename,
            multifilesink->index);
        multifilesink->file = g_fopen (filename, "wb");
        g_free (filename);

        if (multifilesink->file == NULL)
          goto stdio_write_error;
      }

      ret = fwrite (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer), 1,
          multifilesink->file);
      if (ret != 1)
        goto stdio_write_error;

      break;
    default:
      g_assert_not_reached ();
  }

  return GST_FLOW_OK;

  /* ERRORS */
write_error:
  {
    switch (error->code) {
      case G_FILE_ERROR_NOSPC:{
        GST_ELEMENT_ERROR (multifilesink, RESOURCE, NO_SPACE_LEFT, (NULL),
            (NULL));
        break;
      }
      default:{
        GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
            ("Error while writing to file \"%s\".", filename),
            ("%s", g_strerror (errno)));
      }
    }
    g_error_free (error);
    g_free (filename);

    return GST_FLOW_ERROR;
  }
stdio_write_error:
  GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
      ("Error while writing to file."), (NULL));
  return GST_FLOW_ERROR;
}
static GstFlowReturn
recv_sample (GstElement * appsink, gpointer user_data)
{
  KmsRecorderEndpoint *self =
      KMS_RECORDER_ENDPOINT (GST_OBJECT_PARENT (appsink));
  GstElement *appsrc = GST_ELEMENT (user_data);
  KmsUriEndpointState state;
  GstFlowReturn ret;
  GstSample *sample;
  GstBuffer *buffer;
  GstCaps *caps;
  BaseTimeType *base_time;

  g_signal_emit_by_name (appsink, "pull-sample", &sample);
  if (sample == NULL)
    return GST_FLOW_OK;

  g_object_get (G_OBJECT (appsrc), "caps", &caps, NULL);
  if (caps == NULL) {
    /* Appsrc has not yet caps defined */
    GstPad *sink_pad = gst_element_get_static_pad (appsink, "sink");

    if (sink_pad != NULL) {
      caps = gst_pad_get_current_caps (sink_pad);
      g_object_unref (sink_pad);
    }

    if (caps == NULL) {
      GST_ELEMENT_ERROR (self, CORE, CAPS, ("No caps found for %s",
              GST_ELEMENT_NAME (appsrc)), GST_ERROR_SYSTEM);
      ret = GST_FLOW_ERROR;
      goto end;
    }

    g_object_set (appsrc, "caps", caps, NULL);
  }

  gst_caps_unref (caps);

  buffer = gst_sample_get_buffer (sample);
  if (buffer == NULL) {
    ret = GST_FLOW_OK;
    goto end;
  }

  g_object_get (G_OBJECT (self), "state", &state, NULL);
  if (state != KMS_URI_ENDPOINT_STATE_START) {
    GST_DEBUG ("Dropping buffer %" GST_PTR_FORMAT, buffer);
    ret = GST_FLOW_OK;
    goto end;
  }

  gst_buffer_ref (buffer);
  buffer = gst_buffer_make_writable (buffer);

  KMS_ELEMENT_LOCK (GST_OBJECT_PARENT (appsink));

  base_time = g_object_get_data (G_OBJECT (appsrc), BASE_TIME_DATA);

  if (base_time == NULL) {
    base_time = g_slice_new0 (BaseTimeType);
    base_time->pts = GST_CLOCK_TIME_NONE;
    base_time->dts = GST_CLOCK_TIME_NONE;
    g_object_set_data_full (G_OBJECT (appsrc), BASE_TIME_DATA, base_time,
        release_base_time_type);
  }

  if (!GST_CLOCK_TIME_IS_VALID (base_time->pts)
      && GST_BUFFER_PTS_IS_VALID (buffer)) {
    base_time->pts = buffer->pts;
    GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT,
        base_time->pts);
  }

  if (!GST_CLOCK_TIME_IS_VALID (base_time->dts)
      && GST_BUFFER_DTS_IS_VALID (buffer)) {
    base_time->dts = buffer->dts;
    GST_DEBUG_OBJECT (appsrc, "Setting dts base time to: %" G_GUINT64_FORMAT,
        base_time->dts);
  }

  if (GST_CLOCK_TIME_IS_VALID (base_time->pts)
      && GST_BUFFER_PTS_IS_VALID (buffer)) {
    buffer->pts -= base_time->pts + self->priv->paused_time;
    buffer->dts = buffer->pts;
  } else if (GST_CLOCK_TIME_IS_VALID (base_time->dts)
      && GST_BUFFER_DTS_IS_VALID (buffer)) {
    buffer->dts -= base_time->dts + self->priv->paused_time;
    buffer->pts = buffer->dts;
  }

  g_object_set (self->priv->controller, "has_data", TRUE, NULL);

  KMS_ELEMENT_UNLOCK (GST_OBJECT_PARENT (appsink));

  GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_LIVE);

  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER))
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);

  g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);

  gst_buffer_unref (buffer);

  if (ret != GST_FLOW_OK) {
    /* something wrong */
    GST_ERROR ("Could not send buffer to appsrc %s. Cause: %s",
        GST_ELEMENT_NAME (appsrc), gst_flow_get_name (ret));
  }

end:
  if (sample != NULL)
    gst_sample_unref (sample);

  return ret;
}
Пример #15
0
static GstFlowReturn
gst_live_live_adder_chain (GstPad * pad, GstBuffer * buffer)
{
    GstLiveAdder *adder = GST_LIVE_ADDER (gst_pad_get_parent_element (pad));
    GstLiveAdderPadPrivate *padprivate = NULL;
    GstFlowReturn ret = GST_FLOW_OK;
    GList *item = NULL;
    GstClockTime skip = 0;
    gint64 drift = 0;             /* Positive if new buffer after old buffer */

    GST_OBJECT_LOCK (adder);

    ret = adder->srcresult;

    GST_DEBUG ("Incoming buffer time:%" GST_TIME_FORMAT " duration:%"
               GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
               GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));

    if (ret != GST_FLOW_OK) {
        GST_DEBUG_OBJECT (adder, "Passing non-ok result from src: %s",
                          gst_flow_get_name (ret));
        gst_buffer_unref (buffer);
        goto out;
    }

    padprivate = gst_pad_get_element_private (pad);

    if (!padprivate) {
        ret = GST_FLOW_NOT_LINKED;
        gst_buffer_unref (buffer);
        goto out;
    }

    if (padprivate->eos) {
        GST_DEBUG_OBJECT (adder, "Received buffer after EOS");
        ret = GST_FLOW_UNEXPECTED;
        gst_buffer_unref (buffer);
        goto out;
    }

    if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
        goto invalid_timestamp;

    if (padprivate->segment.format == GST_FORMAT_UNDEFINED) {
        GST_WARNING_OBJECT (adder, "No new-segment received,"
                            " initializing segment with time 0..-1");
        gst_segment_init (&padprivate->segment, GST_FORMAT_TIME);
        gst_segment_set_newsegment (&padprivate->segment,
                                    FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0);
    }

    if (padprivate->segment.format != GST_FORMAT_TIME)
        goto invalid_segment;

    buffer = gst_buffer_make_metadata_writable (buffer);

    drift = GST_BUFFER_TIMESTAMP (buffer) - padprivate->expected_timestamp;

    /* Just see if we receive invalid timestamp/durations */
    if (GST_CLOCK_TIME_IS_VALID (padprivate->expected_timestamp) &&
            !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT) &&
            (drift != 0)) {
        GST_LOG_OBJECT (adder,
                        "Timestamp discontinuity without the DISCONT flag set"
                        " (expected %" GST_TIME_FORMAT ", got %" GST_TIME_FORMAT
                        " drift:%" G_GINT64_FORMAT "ms)",
                        GST_TIME_ARGS (padprivate->expected_timestamp),
                        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)), drift / GST_MSECOND);

        /* We accept drifts of 10ms */
        if (ABS (drift) < (10 * GST_MSECOND)) {
            GST_DEBUG ("Correcting minor drift");
            GST_BUFFER_TIMESTAMP (buffer) = padprivate->expected_timestamp;
        }
    }


    /* If there is no duration, lets set one */
    if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
        GST_BUFFER_DURATION (buffer) =
            gst_audio_duration_from_pad_buffer (pad, buffer);
        padprivate->expected_timestamp = GST_CLOCK_TIME_NONE;
    } else {
        padprivate->expected_timestamp = GST_BUFFER_TIMESTAMP (buffer) +
                                         GST_BUFFER_DURATION (buffer);
    }


    /*
     * Lets clip the buffer to the segment (so we don't have to worry about
     * cliping afterwards).
     * This should also guarantee us that we'll have valid timestamps and
     * durations afterwards
     */

    buffer = gst_audio_buffer_clip (buffer, &padprivate->segment, adder->rate,
                                    adder->bps);

    /* buffer can be NULL if it's completely outside of the segment */
    if (!buffer) {
        GST_DEBUG ("Buffer completely outside of configured segment, dropping it");
        goto out;
    }

    /*
     * Make sure all incoming buffers share the same timestamping
     */
    GST_BUFFER_TIMESTAMP (buffer) =
        gst_segment_to_running_time (&padprivate->segment,
                                     padprivate->segment.format, GST_BUFFER_TIMESTAMP (buffer));


    if (GST_CLOCK_TIME_IS_VALID (adder->next_timestamp) &&
            GST_BUFFER_TIMESTAMP (buffer) < adder->next_timestamp) {
        if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <
                adder->next_timestamp) {
            GST_DEBUG_OBJECT (adder, "Buffer is late, dropping (ts: %" GST_TIME_FORMAT
                              " duration: %" GST_TIME_FORMAT ")",
                              GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
                              GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));
            gst_buffer_unref (buffer);
            goto out;
        } else {
            skip = adder->next_timestamp - GST_BUFFER_TIMESTAMP (buffer);
            GST_DEBUG_OBJECT (adder, "Buffer is partially late, skipping %"
                              GST_TIME_FORMAT, GST_TIME_ARGS (skip));
        }
    }

    /* If our new buffer's head is higher than the queue's head, lets wake up,
     * we may not have to wait for as long
     */
    if (adder->clock_id &&
            g_queue_peek_head (adder->buffers) != NULL &&
            GST_BUFFER_TIMESTAMP (buffer) + skip <
            GST_BUFFER_TIMESTAMP (g_queue_peek_head (adder->buffers)))
        gst_clock_id_unschedule (adder->clock_id);

    for (item = g_queue_peek_head_link (adder->buffers);
            item; item = g_list_next (item)) {
        GstBuffer *oldbuffer = item->data;
        GstClockTime old_skip = 0;
        GstClockTime mix_duration = 0;
        GstClockTime mix_start = 0;
        GstClockTime mix_end = 0;

        /* We haven't reached our place yet */
        if (GST_BUFFER_TIMESTAMP (buffer) + skip >=
                GST_BUFFER_TIMESTAMP (oldbuffer) + GST_BUFFER_DURATION (oldbuffer))
            continue;

        /* We're past our place, lets insert ouselves here */
        if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <=
                GST_BUFFER_TIMESTAMP (oldbuffer))
            break;

        /* if we reach this spot, we have overlap, so we must mix */

        /* First make a subbuffer with the non-overlapping part */
        if (GST_BUFFER_TIMESTAMP (buffer) + skip < GST_BUFFER_TIMESTAMP (oldbuffer)) {
            GstBuffer *subbuffer = NULL;
            GstClockTime subbuffer_duration = GST_BUFFER_TIMESTAMP (oldbuffer) -
                                              (GST_BUFFER_TIMESTAMP (buffer) + skip);

            subbuffer = gst_buffer_create_sub (buffer,
                                               gst_live_adder_length_from_duration (adder, skip),
                                               gst_live_adder_length_from_duration (adder, subbuffer_duration));

            GST_BUFFER_TIMESTAMP (subbuffer) = GST_BUFFER_TIMESTAMP (buffer) + skip;
            GST_BUFFER_DURATION (subbuffer) = subbuffer_duration;

            skip += subbuffer_duration;

            g_queue_insert_before (adder->buffers, item, subbuffer);
        }

        /* Now we are on the overlapping part */
        oldbuffer = gst_buffer_make_writable (oldbuffer);
        item->data = oldbuffer;

        old_skip = GST_BUFFER_TIMESTAMP (buffer) + skip -
                   GST_BUFFER_TIMESTAMP (oldbuffer);

        mix_start = GST_BUFFER_TIMESTAMP (oldbuffer) + old_skip;

        if (GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) <
                GST_BUFFER_TIMESTAMP (oldbuffer) + GST_BUFFER_DURATION (oldbuffer))
            mix_end = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
        else
            mix_end = GST_BUFFER_TIMESTAMP (oldbuffer) +
                      GST_BUFFER_DURATION (oldbuffer);

        mix_duration = mix_end - mix_start;

        adder->func (GST_BUFFER_DATA (oldbuffer) +
                     gst_live_adder_length_from_duration (adder, old_skip),
                     GST_BUFFER_DATA (buffer) +
                     gst_live_adder_length_from_duration (adder, skip),
                     gst_live_adder_length_from_duration (adder, mix_duration));

        skip += mix_duration;
    }

    g_cond_broadcast (adder->not_empty_cond);

    if (skip == GST_BUFFER_DURATION (buffer)) {
        gst_buffer_unref (buffer);
    } else {
        if (skip) {
            GstClockTime subbuffer_duration = GST_BUFFER_DURATION (buffer) - skip;
            GstClockTime subbuffer_ts = GST_BUFFER_TIMESTAMP (buffer) + skip;
            GstBuffer *new_buffer = gst_buffer_create_sub (buffer,
                                    gst_live_adder_length_from_duration (adder, skip),
                                    gst_live_adder_length_from_duration (adder, subbuffer_duration));
            gst_buffer_unref (buffer);
            buffer = new_buffer;
            GST_BUFFER_TIMESTAMP (buffer) = subbuffer_ts;
            GST_BUFFER_DURATION (buffer) = subbuffer_duration;
        }

        if (item)
            g_queue_insert_before (adder->buffers, item, buffer);
        else
            g_queue_push_tail (adder->buffers, buffer);
    }

out:

    GST_OBJECT_UNLOCK (adder);
    gst_object_unref (adder);

    return ret;

invalid_timestamp:

    GST_OBJECT_UNLOCK (adder);
    gst_buffer_unref (buffer);
    GST_ELEMENT_ERROR (adder, STREAM, FAILED,
                       ("Buffer without a valid timestamp received"),
                       ("Invalid timestamp received on buffer"));

    return GST_FLOW_ERROR;

invalid_segment:
    {
        const gchar *format = gst_format_get_name (padprivate->segment.format);
        GST_OBJECT_UNLOCK (adder);
        gst_buffer_unref (buffer);
        GST_ELEMENT_ERROR (adder, STREAM, FAILED,
                           ("This element only supports TIME segments, received other type"),
                           ("Received a segment of type %s, only support time segment", format));

        return GST_FLOW_ERROR;
    }

}
Пример #16
0
static GstFlowReturn
gst_multipart_demux_chain (GstPad * pad, GstBuffer * buf)
{
  GstMultipartDemux *multipart;
  GstAdapter *adapter;
  GstClockTime timestamp;
  gint size = 1;
  GstFlowReturn res;

  multipart = GST_MULTIPART_DEMUX (gst_pad_get_parent (pad));
  adapter = multipart->adapter;

  res = GST_FLOW_OK;

  timestamp = GST_BUFFER_TIMESTAMP (buf);

  if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (adapter);
  }
  gst_adapter_push (adapter, buf);

  while (gst_adapter_available (adapter) > 0) {
    GstMultipartPad *srcpad;
    GstBuffer *outbuf;
    gboolean created;
    gint datalen;

    if (G_UNLIKELY (!multipart->header_completed)) {
      if ((size = multipart_parse_header (multipart)) < 0) {
        goto nodata;
      } else {
        gst_adapter_flush (adapter, size);
        multipart->header_completed = TRUE;
      }
    }
    if ((size = multipart_find_boundary (multipart, &datalen)) < 0) {
      goto nodata;
    }

    /* Invalidate header info */
    multipart->header_completed = FALSE;
    multipart->content_length = -1;

    if (G_UNLIKELY (datalen <= 0)) {
      GST_DEBUG_OBJECT (multipart, "skipping empty content.");
      gst_adapter_flush (adapter, size - datalen);
    } else {
      srcpad =
          gst_multipart_find_pad_by_mime (multipart,
          multipart->mime_type, &created);
      outbuf = gst_adapter_take_buffer (adapter, datalen);
      gst_adapter_flush (adapter, size - datalen);

      gst_buffer_set_caps (outbuf, GST_PAD_CAPS (srcpad->pad));
      if (created) {
        GstTagList *tags;

        /* Push new segment, first buffer has 0 timestamp */
        gst_pad_push_event (srcpad->pad,
            gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));

        tags =
            gst_tag_list_new_full (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL);
        gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags));

        GST_BUFFER_TIMESTAMP (outbuf) = 0;
      } else {
        GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
      }
      GST_DEBUG_OBJECT (multipart,
          "pushing buffer with timestamp %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
      GST_DEBUG_OBJECT (multipart, "buffer has caps %" GST_PTR_FORMAT,
          GST_BUFFER_CAPS (outbuf));
      res = gst_pad_push (srcpad->pad, outbuf);
      res = gst_multipart_combine_flows (multipart, srcpad, res);
      if (res != GST_FLOW_OK)
        break;
    }
  }

nodata:
  gst_object_unref (multipart);

  if (G_UNLIKELY (size == MULTIPART_DATA_ERROR))
    return GST_FLOW_ERROR;
  if (G_UNLIKELY (size == MULTIPART_DATA_EOS))
    return GST_FLOW_UNEXPECTED;

  return res;
}
Пример #17
0
bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
                                       int64_t aTimeThreshold)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedVideoEos && !mVideoSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mVideoSinkBufferCount) {
      if (!mAudioSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mVideoSinkBufferCount) {
          /* There is still no video data available, so either there is audio data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

    mDecoder->NotifyDecodedFrames(0, 1);

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
#endif
    mVideoSinkBufferCount--;
  }

  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if ((aKeyFrameSkip && !isKeyframe)) {
    gst_buffer_unref(buffer);
    return true;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mVideoSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
               "frame has invalid timestamp");

  timestamp = GST_TIME_AS_USECONDS(timestamp);
  int64_t duration = 0;
  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
  else if (fpsNum && fpsDen)
    /* add 1-frame duration */
    duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum);

  if (timestamp < aTimeThreshold) {
    LOG(PR_LOG_DEBUG, "skipping frame %" GST_TIME_FORMAT
                      " threshold %" GST_TIME_FORMAT,
                      GST_TIME_ARGS(timestamp * 1000),
                      GST_TIME_ARGS(aTimeThreshold * 1000));
    gst_buffer_unref(buffer);
    return true;
  }

  if (!buffer)
    /* no more frames */
    return true;

#if GST_VERSION_MAJOR >= 1
  if (mConfigureAlignment && buffer->pool) {
    GstStructure *config = gst_buffer_pool_get_config(buffer->pool);
    GstVideoAlignment align;
    if (gst_buffer_pool_config_get_video_alignment(config, &align))
      gst_video_info_align(&mVideoInfo, &align);
    gst_structure_free(config);
    mConfigureAlignment = false;
  }
#endif

  nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer);
  if (!image) {
    /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
     * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
     */
    GstBuffer* tmp = nullptr;
    CopyIntoImageBuffer(buffer, &tmp, image);
    gst_buffer_unref(buffer);
    buffer = tmp;
  }

  int64_t offset = mDecoder->GetResource()->Tell(); // Estimate location in media.
  VideoData* video = VideoData::CreateFromImage(mInfo.mVideo,
                                                mDecoder->GetImageContainer(),
                                                offset, timestamp, duration,
                                                static_cast<Image*>(image.get()),
                                                isKeyframe, -1, mPicture);
  mVideoQueue.Push(video);

  gst_buffer_unref(buffer);

  return true;
}
Пример #18
0
static GstFlowReturn
mpegpsmux_collected (GstCollectPads * pads, MpegPsMux * mux)
{
  /* main muxing function */

  GstFlowReturn ret = GST_FLOW_OK;
  MpegPsPadData *best = NULL;
  gboolean keyunit;

  GST_DEBUG_OBJECT (mux, "Pads collected");

  if (mux->first) {             /* process block for the first mux */
    /* iterate through the collect pads and add streams to @mux */
    ret = mpegpsmux_create_streams (mux);
    /* Assumption : all pads are already added at this time */

    if (G_UNLIKELY (ret != GST_FLOW_OK))
      return ret;

    best = mpegpsmux_choose_best_stream (mux);

    /* prepare the src pad (output), return if failed */
    if (!mpegpsdemux_prepare_srcpad (mux)) {
      GST_DEBUG_OBJECT (mux, "Failed to send new segment");
      goto new_seg_fail;
    }

    mux->first = FALSE;
  } else {
    best = mpegpsmux_choose_best_stream (mux);
  }

  if (best != NULL) {
    GstBuffer *buf = best->queued.buf;
    gint64 pts, dts;

    g_assert (buf != NULL);

    GST_LOG_OBJECT (mux,
        "Chose stream from pad %" GST_PTR_FORMAT " for output (PID: 0x%04x): "
        "adjusted pts: %" GST_TIME_FORMAT ", dts: %" GST_TIME_FORMAT,
        best->collect.pad, best->stream_id,
        GST_TIME_ARGS (best->queued.pts), GST_TIME_ARGS (best->queued.dts));

    /* and convert to mpeg time stamps */
    pts = GSTTIME_TO_MPEGTIME (best->queued.pts);
    dts = GSTTIME_TO_MPEGTIME (best->queued.dts);

    /* start of new GOP? */
    keyunit = !GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);

    if (keyunit && best->stream_id == mux->video_stream_id
        && mux->gop_list != NULL) {
      ret = mpegpsmux_push_gop_list (mux);
      if (ret != GST_FLOW_OK)
        goto done;
    }

    /* give the buffer to libpsmux for processing */
    psmux_stream_add_data (best->stream, buf, pts, dts, keyunit);

    best->queued.buf = NULL;

    /* write the data from libpsmux to stream */
    while (psmux_stream_bytes_in_buffer (best->stream) > 0) {
      GST_LOG_OBJECT (mux, "Before @psmux_write_stream_packet");
      if (!psmux_write_stream_packet (mux->psmux, best->stream)) {
        GST_DEBUG_OBJECT (mux, "Failed to write data packet");
        goto write_fail;
      }
    }
    mux->last_ts = best->last_ts;
  } else {
    /* FIXME: Drain all remaining streams */
    /* At EOS */
    if (mux->gop_list != NULL)
      mpegpsmux_push_gop_list (mux);

    if (psmux_write_end_code (mux->psmux)) {
      GST_WARNING_OBJECT (mux, "Writing MPEG PS Program end code failed.");
    }
    gst_pad_push_event (mux->srcpad, gst_event_new_eos ());
  }

done:

  return ret;
new_seg_fail:
  return GST_FLOW_ERROR;
write_fail:
  /* FIXME: Failed writing data for some reason. Should set appropriate error */
  return mux->last_flow_ret;
}
Пример #19
0
static GstFlowReturn
gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoEncoder *base_video_encoder;
  GstBaseVideoEncoderClass *klass;
  GstVideoFrame *frame;
  GstFlowReturn ret = GST_FLOW_OK;

  base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
  klass = GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);

  g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);

  GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);

  if (!GST_PAD_CAPS (pad)) {
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

  GST_LOG_OBJECT (base_video_encoder,
      "received buffer of size %d with ts %" GST_TIME_FORMAT
      ", duration %" GST_TIME_FORMAT, GST_BUFFER_SIZE (buf),
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

  if (base_video_encoder->a.at_eos) {
    ret = GST_FLOW_UNEXPECTED;
    goto done;
  }

  if (base_video_encoder->sink_clipping) {
    gint64 start = GST_BUFFER_TIMESTAMP (buf);
    gint64 stop = start + GST_BUFFER_DURATION (buf);
    gint64 clip_start;
    gint64 clip_stop;

    if (!gst_segment_clip (&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
            GST_FORMAT_TIME, start, stop, &clip_start, &clip_stop)) {
      GST_DEBUG_OBJECT (base_video_encoder,
          "clipping to segment dropped frame");
      goto done;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_LOG_OBJECT (base_video_encoder, "marked discont");
    GST_BASE_VIDEO_CODEC (base_video_encoder)->discont = TRUE;
  }

  frame =
      gst_base_video_codec_new_frame (GST_BASE_VIDEO_CODEC
      (base_video_encoder));
  frame->events = base_video_encoder->current_frame_events;
  base_video_encoder->current_frame_events = NULL;
  frame->sink_buffer = buf;
  frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (buf);
  frame->presentation_duration = GST_BUFFER_DURATION (buf);
  frame->presentation_frame_number =
      base_video_encoder->presentation_frame_number;
  base_video_encoder->presentation_frame_number++;
  frame->force_keyframe = base_video_encoder->force_keyframe;
  base_video_encoder->force_keyframe = FALSE;

  GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
      g_list_append (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);

  /* new data, more finish needed */
  base_video_encoder->drained = FALSE;

  GST_LOG_OBJECT (base_video_encoder, "passing frame pfn %d to subclass",
      frame->presentation_frame_number);

  ret = klass->handle_frame (base_video_encoder, frame);

done:
  GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);

  g_object_unref (base_video_encoder);

  return ret;
}
Пример #20
0
static GstFlowReturn
gst_jasper_dec_chain (GstPad * pad, GstBuffer * buf)
{
  GstJasperDec *dec;
  GstFlowReturn ret = GST_FLOW_OK;
  GstClockTime ts;
  GstBuffer *outbuf = NULL;
  guint8 *data;
  guint size;
  gboolean decode;

  dec = GST_JASPER_DEC (GST_PAD_PARENT (pad));

  if (dec->fmt < 0)
    goto not_negotiated;

  ts = GST_BUFFER_TIMESTAMP (buf);

  GST_LOG_OBJECT (dec, "buffer with ts: %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));

  if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))
    dec->discont = TRUE;

  decode = gst_jasper_dec_do_qos (dec, ts);

  /* FIXME: do clipping */

  if (G_UNLIKELY (!decode)) {
    dec->discont = TRUE;
    goto done;
  }

  /* strip possible prefix */
  if (dec->strip) {
    GstBuffer *tmp;

    tmp = gst_buffer_create_sub (buf, dec->strip,
        GST_BUFFER_SIZE (buf) - dec->strip);
    gst_buffer_copy_metadata (tmp, buf, GST_BUFFER_COPY_TIMESTAMPS);
    gst_buffer_unref (buf);
    buf = tmp;
  }
  /* preprend possible codec_data */
  if (dec->codec_data) {
    GstBuffer *tmp;

    tmp = gst_buffer_merge (dec->codec_data, buf);
    gst_buffer_copy_metadata (tmp, buf, GST_BUFFER_COPY_TIMESTAMPS);
    gst_buffer_unref (buf);
    buf = tmp;
  }

  /* now really feed the data to decoder */
  data = GST_BUFFER_DATA (buf);
  size = GST_BUFFER_SIZE (buf);

  ret = gst_jasper_dec_get_picture (dec, data, size, &outbuf);

  if (outbuf) {
    gst_buffer_copy_metadata (outbuf, buf, GST_BUFFER_COPY_TIMESTAMPS);
    if (dec->discont) {
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
      dec->discont = FALSE;
    }

    if (ret == GST_FLOW_OK)
      ret = gst_pad_push (dec->srcpad, outbuf);
    else
      gst_buffer_unref (outbuf);
  }

done:
  gst_buffer_unref (buf);

  return ret;

  /* ERRORS */
not_negotiated:
  {
    GST_ELEMENT_ERROR (dec, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated before chain function"));
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
}
Пример #21
0
static GstFlowReturn
gst_mulawdec_chain (GstPad * pad, GstBuffer * buffer)
{
  GstMuLawDec *mulawdec;
  gint16 *linear_data;
  guint8 *mulaw_data;
  guint mulaw_size;
  GstBuffer *outbuf;
  GstFlowReturn ret;

  mulawdec = GST_MULAWDEC (GST_PAD_PARENT (pad));

  if (G_UNLIKELY (mulawdec->rate == 0))
    goto not_negotiated;

  mulaw_data = (guint8 *) GST_BUFFER_DATA (buffer);
  mulaw_size = GST_BUFFER_SIZE (buffer);

  ret =
      gst_pad_alloc_buffer_and_set_caps (mulawdec->srcpad,
      GST_BUFFER_OFFSET_NONE, mulaw_size * 2, GST_PAD_CAPS (mulawdec->srcpad),
      &outbuf);
  if (ret != GST_FLOW_OK)
    goto alloc_failed;

  linear_data = (gint16 *) GST_BUFFER_DATA (outbuf);

  /* copy discont flag */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);

  GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
  if (GST_BUFFER_DURATION (outbuf) == GST_CLOCK_TIME_NONE)
    GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (GST_SECOND,
        mulaw_size * 2, 2 * mulawdec->rate * mulawdec->channels);
  else
    GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buffer);
  gst_buffer_set_caps (outbuf, GST_PAD_CAPS (mulawdec->srcpad));

  mulaw_decode (mulaw_data, linear_data, mulaw_size);

  gst_buffer_unref (buffer);

  ret = gst_pad_push (mulawdec->srcpad, outbuf);

  return ret;

  /* ERRORS */
not_negotiated:
  {
    GST_WARNING_OBJECT (mulawdec, "no input format set: not-negotiated");
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_NEGOTIATED;
  }
alloc_failed:
  {
    GST_DEBUG_OBJECT (mulawdec, "pad alloc failed, flow: %s",
        gst_flow_get_name (ret));
    gst_buffer_unref (buffer);
    return ret;
  }
}
static GstFlowReturn
gst_amc_audio_dec_handle_frame (GstAudioDecoder * decoder, GstBuffer * inbuf)
{
  GstAmcAudioDec *self;
  gint idx;
  GstAmcBuffer *buf;
  GstAmcBufferInfo buffer_info;
  guint offset = 0;
  GstClockTime timestamp, duration, timestamp_offset = 0;
  GstMapInfo minfo;

  memset (&minfo, 0, sizeof (minfo));

  self = GST_AMC_AUDIO_DEC (decoder);

  GST_DEBUG_OBJECT (self, "Handling frame");

  /* Make sure to keep a reference to the input here,
   * it can be unreffed from the other thread if
   * finish_frame() is called */
  if (inbuf)
    inbuf = gst_buffer_ref (inbuf);

  if (!self->started) {
    GST_ERROR_OBJECT (self, "Codec not started yet");
    if (inbuf)
      gst_buffer_unref (inbuf);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  if (self->eos) {
    GST_WARNING_OBJECT (self, "Got frame after EOS");
    if (inbuf)
      gst_buffer_unref (inbuf);
    return GST_FLOW_EOS;
  }

  if (self->flushing)
    goto flushing;

  if (self->downstream_flow_ret != GST_FLOW_OK)
    goto downstream_error;

  if (!inbuf)
    return gst_amc_audio_dec_drain (self);

  timestamp = GST_BUFFER_PTS (inbuf);
  duration = GST_BUFFER_DURATION (inbuf);

  gst_buffer_map (inbuf, &minfo, GST_MAP_READ);

  while (offset < minfo.size) {
    /* Make sure to release the base class stream lock, otherwise
     * _loop() can't call _finish_frame() and we might block forever
     * because no input buffers are released */
    GST_AUDIO_DECODER_STREAM_UNLOCK (self);
    /* Wait at most 100ms here, some codecs don't fail dequeueing if
     * the codec is flushing, causing deadlocks during shutdown */
    idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000);
    GST_AUDIO_DECODER_STREAM_LOCK (self);

    if (idx < 0) {
      if (self->flushing)
        goto flushing;
      switch (idx) {
        case INFO_TRY_AGAIN_LATER:
          GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out");
          continue;             /* next try */
          break;
        case G_MININT:
          GST_ERROR_OBJECT (self, "Failed to dequeue input buffer");
          goto dequeue_error;
        default:
          g_assert_not_reached ();
          break;
      }

      continue;
    }

    if (idx >= self->n_input_buffers)
      goto invalid_buffer_index;

    if (self->flushing)
      goto flushing;

    if (self->downstream_flow_ret != GST_FLOW_OK) {
      memset (&buffer_info, 0, sizeof (buffer_info));
      gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info);
      goto downstream_error;
    }

    /* Now handle the frame */

    /* Copy the buffer content in chunks of size as requested
     * by the port */
    buf = &self->input_buffers[idx];

    memset (&buffer_info, 0, sizeof (buffer_info));
    buffer_info.offset = 0;
    buffer_info.size = MIN (minfo.size - offset, buf->size);

    orc_memcpy (buf->data, minfo.data + offset, buffer_info.size);

    /* Interpolate timestamps if we're passing the buffer
     * in multiple chunks */
    if (offset != 0 && duration != GST_CLOCK_TIME_NONE) {
      timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size);
    }

    if (timestamp != GST_CLOCK_TIME_NONE) {
      buffer_info.presentation_time_us =
          gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND);
      self->last_upstream_ts = timestamp + timestamp_offset;
    }
    if (duration != GST_CLOCK_TIME_NONE)
      self->last_upstream_ts += duration;

    if (offset == 0) {
      if (!GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DELTA_UNIT))
        buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME;
    }

    offset += buffer_info.size;
    GST_DEBUG_OBJECT (self,
        "Queueing buffer %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x",
        idx, buffer_info.size, buffer_info.presentation_time_us,
        buffer_info.flags);
    if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info))
      goto queue_error;
  }
  gst_buffer_unmap (inbuf, &minfo);
  gst_buffer_unref (inbuf);

  return self->downstream_flow_ret;

downstream_error:
  {
    GST_ERROR_OBJECT (self, "Downstream returned %s",
        gst_flow_get_name (self->downstream_flow_ret));
    if (minfo.data)
      gst_buffer_unmap (inbuf, &minfo);
    if (inbuf)
      gst_buffer_unref (inbuf);
    return self->downstream_flow_ret;
  }
invalid_buffer_index:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
        ("Invalid input buffer index %d of %d", idx, self->n_input_buffers));
    if (minfo.data)
      gst_buffer_unmap (inbuf, &minfo);
    if (inbuf)
      gst_buffer_unref (inbuf);
    return GST_FLOW_ERROR;
  }
dequeue_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
        ("Failed to dequeue input buffer"));
    if (minfo.data)
      gst_buffer_unmap (inbuf, &minfo);
    if (inbuf)
      gst_buffer_unref (inbuf);
    return GST_FLOW_ERROR;
  }
queue_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
        ("Failed to queue input buffer"));
    if (minfo.data)
      gst_buffer_unmap (inbuf, &minfo);
    if (inbuf)
      gst_buffer_unref (inbuf);
    return GST_FLOW_ERROR;
  }
flushing:
  {
    GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING");
    if (minfo.data)
      gst_buffer_unmap (inbuf, &minfo);
    if (inbuf)
      gst_buffer_unref (inbuf);
    return GST_FLOW_FLUSHING;
  }
}
Пример #23
0
static GstFlowReturn
gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));

  GstClockTime qostime;
  GstFlowReturn ret = GST_FLOW_OK;
  GError *err;

  GST_DEBUG ("chain");

  /* can only do QoS if the segment is in TIME */
  if (vpp->segment.format != GST_FORMAT_TIME)
    goto no_qos;

  /* QOS is done on the running time of the buffer, get it now */
  qostime = gst_segment_to_running_time (&vpp->segment, GST_FORMAT_TIME,
      GST_BUFFER_TIMESTAMP (buffer));

  if (qostime != -1) {
    gboolean need_skip;
    GstClockTime earliest_time;

    /* lock for getting the QoS parameters that are set (in a different thread)
     * with the QOS events */
    GST_OBJECT_LOCK (vpp);
    earliest_time = vpp->earliest_time;
    /* check for QoS, don't perform conversion for buffers
     * that are known to be late. */
    need_skip = GST_CLOCK_TIME_IS_VALID (earliest_time) && qostime != -1 &&
        qostime <= earliest_time;

    GST_OBJECT_UNLOCK (vpp);

    if (need_skip) {
      GST_DEBUG_OBJECT (vpp, "skipping transform: qostime %"
          GST_TIME_FORMAT " <= %" GST_TIME_FORMAT,
          GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
      /* mark discont for next buffer */
      vpp->discont = TRUE;
      gst_buffer_unref (buffer);
      return GST_FLOW_OK;
    }
  }

no_qos:

  if (vpp->discont) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
    vpp->discont = FALSE;
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (vpp, "Received discont buffer");
    gst_vdp_vpp_flush (vpp);
  }

  if (!vpp->native_input) {
    GstVdpVideoBuffer *video_buf;

    err = NULL;
    video_buf =
        (GstVdpVideoBuffer *) gst_vdp_buffer_pool_get_buffer (vpp->vpool, &err);
    if (G_UNLIKELY (!video_buf))
      goto video_buf_error;

    if (!gst_vdp_video_buffer_upload (video_buf, buffer, vpp->fourcc,
            vpp->width, vpp->height)) {
      gst_buffer_unref (GST_BUFFER (video_buf));
      GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
          ("Couldn't upload YUV data to vdpau"), (NULL));
      ret = GST_FLOW_ERROR;
      goto error;
    }

    gst_buffer_copy_metadata (GST_BUFFER (video_buf), buffer,
        GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);

    gst_buffer_unref (buffer);
    buffer = GST_BUFFER (video_buf);
  }

  if (G_UNLIKELY (vpp->mixer == VDP_INVALID_HANDLE)) {
    ret = gst_vdp_vpp_create_mixer (vpp);
    if (ret != GST_FLOW_OK)
      goto error;
  }

  gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer));

  ret = gst_vdp_vpp_drain (vpp);

done:
  gst_object_unref (vpp);

  return ret;

error:
  gst_buffer_unref (buffer);
  goto done;

video_buf_error:
  gst_buffer_unref (GST_BUFFER (buffer));
  gst_vdp_vpp_post_error (vpp, err);
  ret = GST_FLOW_ERROR;
  goto done;
}
Пример #24
0
static GstFlowReturn
gst_xviddec_chain (GstPad * pad, GstBuffer * buf)
{
    GstXvidDec *dec;
    GstBuffer *outbuf = NULL;
    xvid_dec_frame_t xframe;
    xvid_dec_stats_t xstats;
    gint ret;
    guint8 *data, *dupe = NULL;
    guint size;
    GstFlowReturn fret;

    dec = GST_XVIDDEC (GST_OBJECT_PARENT (pad));

    if (!dec->handle)
        goto not_negotiated;

    fret = GST_FLOW_OK;

    GST_LOG_OBJECT (dec, "Received buffer of time %" GST_TIME_FORMAT
                    " duration %" GST_TIME_FORMAT ", size %d",
                    GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
                    GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_SIZE (buf));

    if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
        /* FIXME: should we do anything here, like flush the decoder? */
    }

    data = GST_BUFFER_DATA (buf);
    size = GST_BUFFER_SIZE (buf);

    /* xvidcore overreads the input buffer, we need to alloc some extra padding
     * to make things work reliably */
#define EXTRA_PADDING 16
    if (EXTRA_PADDING > 0) {
        dupe = g_malloc (size + EXTRA_PADDING);
        memcpy (dupe, data, size);
        memset (dupe + size, 0, EXTRA_PADDING);
        data = dupe;
    }

    do {                          /* loop needed because xvidcore may return vol information */
        /* decode and so ... */
        gst_xvid_init_struct (xframe);
        xframe.general = XVID_LOWDELAY;
        xframe.bitstream = (void *) data;
        xframe.length = size;

        gst_xvid_init_struct (xstats);

        if (outbuf == NULL) {
            fret = gst_pad_alloc_buffer (dec->srcpad, GST_BUFFER_OFFSET_NONE,
                                         dec->outbuf_size, GST_PAD_CAPS (dec->srcpad), &outbuf);
            if (fret != GST_FLOW_OK)
                goto done;
        }

        gst_xvid_image_fill (&xframe.output, GST_BUFFER_DATA (outbuf),
                             dec->csp, dec->width, dec->height);

        ret = xvid_decore (dec->handle, XVID_DEC_DECODE, &xframe, &xstats);
        if (ret < 0)
            goto decode_error;

        GST_LOG_OBJECT (dec, "xvid produced output, type %d, consumed %d",
                        xstats.type, ret);

        if (xstats.type == XVID_TYPE_VOL)
            gst_xviddec_negotiate (dec, &xstats);

        data += ret;
        size -= ret;
    } while (xstats.type <= 0 && size > 0);

    /* 1 byte is frequently left over */
    if (size > 1) {
        GST_WARNING_OBJECT (dec, "decoder did not consume all input");
    }

    /* FIXME, reflow the multiple return exit points */
    if (xstats.type > 0) {        /* some real output was produced */
        if (G_UNLIKELY (dec->waiting_for_key)) {
            if (xstats.type != XVID_TYPE_IVOP)
                goto dropping;

            dec->waiting_for_key = FALSE;
        }
        /* bframes can cause a delay in frames being returned
           non keyframe timestamps can permute a bit between
           encode and display order, but should match for keyframes */
        if (dec->have_ts) {
            GST_BUFFER_TIMESTAMP (outbuf) = dec->next_ts;
            GST_BUFFER_DURATION (outbuf) = dec->next_dur;
            dec->next_ts = GST_BUFFER_TIMESTAMP (buf);
            dec->next_dur = GST_BUFFER_DURATION (buf);
        } else {
            GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
            GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf);
        }
        gst_buffer_set_caps (outbuf, GST_PAD_CAPS (dec->srcpad));
        GST_LOG_OBJECT (dec, "pushing buffer with pts %" GST_TIME_FORMAT
                        " duration %" GST_TIME_FORMAT,
                        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
                        GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
        fret = gst_pad_push (dec->srcpad, outbuf);

    } else {                      /* no real output yet, delay in frames being returned */
        if (G_UNLIKELY (dec->have_ts)) {
            GST_WARNING_OBJECT (dec,
                                "xvid decoder produced no output, but timestamp %" GST_TIME_FORMAT
                                " already queued", GST_TIME_ARGS (dec->next_ts));
        } else {
            dec->have_ts = TRUE;
            dec->next_ts = GST_BUFFER_TIMESTAMP (buf);
            dec->next_dur = GST_BUFFER_DURATION (buf);
        }
        gst_buffer_unref (outbuf);
    }

done:
    g_free (dupe);
    gst_buffer_unref (buf);

    return fret;

    /* ERRORS */
not_negotiated:
    {
        GST_ELEMENT_ERROR (dec, CORE, NEGOTIATION, (NULL),
                           ("format wasn't negotiated before chain function"));
        fret = GST_FLOW_NOT_NEGOTIATED;
        goto done;
    }
decode_error:
    {
        /* FIXME: shouldn't error out fatally/properly after N decoding errors? */
        GST_ELEMENT_WARNING (dec, STREAM, DECODE, (NULL),
                             ("Error decoding xvid frame: %s (%d)", gst_xvid_error (ret), ret));
        if (outbuf)
            gst_buffer_unref (outbuf);
        goto done;
    }
dropping:
    {
        GST_WARNING_OBJECT (dec, "Dropping non-keyframe (seek/init)");
        if (outbuf)
            gst_buffer_unref (outbuf);
        goto done;
    }
}
Пример #25
0
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *klass;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (!base_video_decoder->have_segment) {
    GstEvent *event;
    GstFlowReturn ret;

    GST_WARNING
        ("Received buffer without a new-segment. Assuming timestamps start from 0.");

    gst_segment_set_newsegment_full (&base_video_decoder->segment,
        FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
    base_video_decoder->have_segment = TRUE;

    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
        GST_CLOCK_TIME_NONE, 0);

    ret =
        gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
        event);
    if (!ret) {
      GST_ERROR ("new segment event ret=%d", ret);
      return GST_FLOW_ERROR;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    gst_base_video_decoder_reset (base_video_decoder);
  }

  if (!base_video_decoder->started) {
    klass->start (base_video_decoder);
    base_video_decoder->started = TRUE;
  }

  if (base_video_decoder->current_frame == NULL) {
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
  }
  base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);

#if 0
  if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
    GST_DEBUG ("got new offset %" GST_TIME_FORMAT,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
    base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
  }
#endif

  if (base_video_decoder->packetized) {
    base_video_decoder->current_frame->sink_buffer = buf;

    ret = gst_base_video_decoder_have_frame_2 (base_video_decoder);
  } else {

    gst_adapter_push (base_video_decoder->input_adapter, buf);

    if (!base_video_decoder->have_sync) {
      int n, m;

      GST_DEBUG ("no sync, scanning");

      n = gst_adapter_available (base_video_decoder->input_adapter);
      m = klass->scan_for_sync (base_video_decoder, FALSE, 0, n);
      if (m == -1) {
        gst_object_unref (base_video_decoder);
        return GST_FLOW_OK;
      }

      if (m < 0) {
        g_warning ("subclass returned negative scan %d", m);
      }

      if (m >= n) {
        GST_ERROR ("subclass scanned past end %d >= %d", m, n);
      }

      gst_adapter_flush (base_video_decoder->input_adapter, m);

      if (m < n) {
        GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);

        /* this is only "maybe" sync */
        base_video_decoder->have_sync = TRUE;
      }

      if (!base_video_decoder->have_sync) {
        gst_object_unref (base_video_decoder);
        return GST_FLOW_OK;
      }
    }

    do {
      ret = klass->parse_data (base_video_decoder, FALSE);
    } while (ret == GST_FLOW_OK);

    if (ret == GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA) {
      gst_object_unref (base_video_decoder);
      return GST_FLOW_OK;
    }
  }

  gst_object_unref (base_video_decoder);
  return ret;
}
static void
output_loop (gpointer data)
{
    GstPad *pad;
    GOmxCore *gomx;
    GOmxPort *out_port;
    GstOmxBaseFilter21 *self;
    GstFlowReturn ret = GST_FLOW_OK;
    GstOmxBaseFilter21Class *bclass;
    
    pad = data;
    self = GST_OMX_BASE_FILTER21 (gst_pad_get_parent (pad));
    gomx = self->gomx;

    bclass = GST_OMX_BASE_FILTER21_GET_CLASS (self);

    if (!self->ready)
    {
        g_error ("not ready");
        return;
    }

    out_port = (GOmxPort *)gst_pad_get_element_private(pad);

    if (G_LIKELY (out_port->enabled))
    {
        gpointer obj = g_omx_port_recv (out_port);
        if (G_UNLIKELY (!obj))
        {
            GST_WARNING_OBJECT (self, "null buffer: leaving");
            ret = GST_FLOW_WRONG_STATE;
            goto leave;
        }

        if (G_LIKELY (GST_IS_BUFFER (obj)))
        {
            if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (obj, GST_BUFFER_FLAG_IN_CAPS)))
            {
                GstCaps *caps = NULL;
                GstStructure *structure;
                GValue value = { 0 };

                caps = gst_pad_get_negotiated_caps (pad);
                caps = gst_caps_make_writable (caps);
                structure = gst_caps_get_structure (caps, 0);

                g_value_init (&value, GST_TYPE_BUFFER);
                gst_value_set_buffer (&value, obj);
                gst_buffer_unref (obj);
                gst_structure_set_value (structure, "codec_data", &value);
                g_value_unset (&value);

                gst_pad_set_caps (pad, caps);
            }
            else
            {
                GstBuffer *buf = GST_BUFFER (obj);
                ret = bclass->push_buffer (self, buf);
                GST_DEBUG_OBJECT (self, "ret=%s", gst_flow_get_name (ret));
				// HACK!! Dont care if one of the output pads are not connected
                ret = GST_FLOW_OK;
            }
        }
        else if (GST_IS_EVENT (obj))
        {
            GST_INFO_OBJECT (self, "got eos");
            gst_pad_push_event (pad, obj);
            ret = GST_FLOW_UNEXPECTED;
            goto leave;
        }

    }

leave:

    self->last_pad_push_return = ret;

    if (gomx->omx_error != OMX_ErrorNone)
    {
        GST_INFO_OBJECT (self, "omx_error=%s", g_omx_error_to_str (gomx->omx_error));
        ret = GST_FLOW_ERROR;
    }

    if (ret != GST_FLOW_OK)
    {
        GST_INFO_OBJECT (self, "pause task, reason:  %s",
                         gst_flow_get_name (ret));
        gst_pad_pause_task (pad);
    }

    gst_object_unref (self);
    
}
Пример #27
0
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
                                       MetadataTags** aTags)
{
  MOZ_ASSERT(OnTaskQueue());
  nsresult ret = NS_OK;

  /*
   * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there
   * might be concurrent stream operations happening on both decoding and gstreamer
   * threads which will screw the GStreamer state machine.
   */
  LOG(LogLevel::Debug, "content-type: %s %s",
      mDecoder->GetResource()->GetContentType().get(),
      mDecoder->GetResource()->GetContentURL().get());
  bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3);
  if (isMP3) {
    ParseMP3Headers();
  }


  /* We do 3 attempts here: decoding audio and video, decoding video only,
   * decoding audio only. This allows us to play streams that have one broken
   * stream but that are otherwise decodeable.
   */
  guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
    static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
  guint default_flags, current_flags;
  g_object_get(mPlayBin, "flags", &default_flags, nullptr);

  GstMessage* message = nullptr;
  for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
    current_flags = default_flags & flags[i];
    g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);

    /* reset filter caps to ANY */
    GstCaps* caps = gst_caps_new_any();
    GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);

    filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);
    gst_caps_unref(caps);
    filter = nullptr;

    if (!(current_flags & GST_PLAY_FLAG_AUDIO))
      filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
      filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");

    if (filter) {
      /* Little trick: set the target caps to "skip" so that playbin2 fails to
       * find a decoder for the stream we want to skip.
       */
      GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr);
      g_object_set(filter, "caps", filterCaps, nullptr);
      gst_caps_unref(filterCaps);
      gst_object_unref(filter);
    }

    LOG(LogLevel::Debug, "starting metadata pipeline");
    if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
      LOG(LogLevel::Debug, "metadata pipeline state change failed");
      ret = NS_ERROR_FAILURE;
      continue;
    }

    /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
     * prerolled and ready to play. Also watch for errors.
     */
    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
                 (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) {
      LOG(LogLevel::Debug, "read metadata pipeline prerolled");
      gst_message_unref(message);
      ret = NS_OK;
      break;
    } else {
      LOG(LogLevel::Debug, "read metadata pipeline failed to preroll: %s",
            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

      if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
        GError* error;
        gchar* debug;
        gst_message_parse_error(message, &error, &debug);
        LOG(LogLevel::Error, "read metadata error: %s: %s", error->message, debug);
        g_error_free(error);
        g_free(debug);
      }
      /* Unexpected stream close/EOS or other error. We'll give up if all
       * streams are in error/eos. */
      gst_element_set_state(mPlayBin, GST_STATE_NULL);
      gst_message_unref(message);
      ret = NS_ERROR_FAILURE;
    }
  }

  if (NS_SUCCEEDED(ret))
    ret = CheckSupportedFormats();

  if (NS_FAILED(ret))
    /* we couldn't get this to play */
    return ret;

  /* report the duration */
  gint64 duration;

  if (isMP3 && mMP3FrameParser.IsMP3()) {
    // The MP3FrameParser has reported a duration; use that over the gstreamer
    // reported duration for inter-platform consistency.
    mUseParserDuration = true;
    mLastParserDuration = mMP3FrameParser.GetDuration();
    mInfo.mMetadataDuration.emplace(TimeUnit::FromMicroseconds(mLastParserDuration));
  } else {
    LOG(LogLevel::Debug, "querying duration");
    // Otherwise use the gstreamer duration.
#if GST_VERSION_MAJOR >= 1
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
          GST_FORMAT_TIME, &duration)) {
#else
    GstFormat format = GST_FORMAT_TIME;
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
      &format, &duration) && format == GST_FORMAT_TIME) {
#endif
      LOG(LogLevel::Debug, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
      duration = GST_TIME_AS_USECONDS (duration);
      mInfo.mMetadataDuration.emplace(TimeUnit::FromMicroseconds(duration));
    }
  }

  int n_video = 0, n_audio = 0;
  g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);

  if (!n_video) {
    mInfo.mVideo = VideoInfo();
  }
  if (!n_audio) {
    mInfo.mAudio = AudioInfo();
  }
  *aInfo = mInfo;

  *aTags = nullptr;

  // Watch the pipeline for fatal errors
#if GST_VERSION_MAJOR >= 1
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr);
#else
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this);
#endif

  /* set the pipeline to PLAYING so that it starts decoding and queueing data in
   * the appsinks */
  gst_element_set_state(mPlayBin, GST_STATE_PLAYING);

  return NS_OK;
}

bool
GStreamerReader::IsMediaSeekable()
{
  if (mUseParserDuration) {
    return true;
  }

  gint64 duration;
#if GST_VERSION_MAJOR >= 1
  if (gst_element_query_duration(GST_ELEMENT(mPlayBin), GST_FORMAT_TIME,
                                 &duration)) {
#else
  GstFormat format = GST_FORMAT_TIME;
  if (gst_element_query_duration(GST_ELEMENT(mPlayBin), &format, &duration) &&
      format == GST_FORMAT_TIME) {
#endif
    return true;
  }

  return false;
}

nsresult GStreamerReader::CheckSupportedFormats()
{
  bool done = false;
  bool unsupported = false;

  GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
  while (!done) {
    GstIteratorResult res;
    GstElement* element;

#if GST_VERSION_MAJOR >= 1
    GValue value = {0,};
    res = gst_iterator_next(it, &value);
#else
    res = gst_iterator_next(it, (void **) &element);
#endif
    switch(res) {
      case GST_ITERATOR_OK:
      {
#if GST_VERSION_MAJOR >= 1
        element = GST_ELEMENT (g_value_get_object (&value));
#endif
        GstElementFactory* factory = gst_element_get_factory(element);
        if (factory) {
          const char* klass = gst_element_factory_get_klass(factory);
          GstPad* pad = gst_element_get_static_pad(element, "sink");
          if (pad) {
            GstCaps* caps;

#if GST_VERSION_MAJOR >= 1
            caps = gst_pad_get_current_caps(pad);
#else
            caps = gst_pad_get_negotiated_caps(pad);
#endif

            if (caps) {
              /* check for demuxers but ignore elements like id3demux */
              if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
              else if (strstr (klass, "Decoder") && !strstr(klass, "Generic"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);

              gst_caps_unref(caps);
            }
            gst_object_unref(pad);
          }
        }

#if GST_VERSION_MAJOR >= 1
        g_value_unset (&value);
#else
        gst_object_unref(element);
#endif
        done = unsupported;
        break;
      }
      case GST_ITERATOR_RESYNC:
        unsupported = false;
        break;
      case GST_ITERATOR_ERROR:
        done = true;
        break;
      case GST_ITERATOR_DONE:
        done = true;
        break;
    }
  }

  gst_iterator_free(it);

  return unsupported ? NS_ERROR_FAILURE : NS_OK;
}

nsresult GStreamerReader::ResetDecode()
{
  nsresult res = NS_OK;

  LOG(LogLevel::Debug, "reset decode");

  if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
    res = NS_ERROR_FAILURE;
  }

  mVideoQueue.Reset();
  mAudioQueue.Reset();

  mVideoSinkBufferCount = 0;
  mAudioSinkBufferCount = 0;
  mReachedAudioEos = false;
  mReachedVideoEos = false;
#if GST_VERSION_MAJOR >= 1
  mConfigureAlignment = true;
#endif

  LOG(LogLevel::Debug, "reset decode done");

  return res;
}

bool GStreamerReader::DecodeAudioData()
{
  MOZ_ASSERT(OnTaskQueue());

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedAudioEos && !mAudioSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mAudioSinkBufferCount) {
      if(!mVideoSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mAudioSinkBufferCount) {
          /* There is still no audio data available, so either there is video data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it.
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mAudioAppSink);
#endif

    mAudioSinkBufferCount--;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mAudioSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  timestamp = GST_TIME_AS_USECONDS(timestamp);

  int64_t offset = GST_BUFFER_OFFSET(buffer);
  guint8* data;
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_READ);
  unsigned int size = info.size;
  data = info.data;
#else
  unsigned int size = GST_BUFFER_SIZE(buffer);
  data = GST_BUFFER_DATA(buffer);
#endif
  int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;

  typedef AudioCompactor::NativeCopy GstCopy;
  mAudioCompactor.Push(offset,
                       timestamp,
                       mInfo.mAudio.mRate,
                       frames,
                       mInfo.mAudio.mChannels,
                       GstCopy(data,
                               size,
                               mInfo.mAudio.mChannels));
#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
#endif

  gst_buffer_unref(buffer);

  return true;
}

bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
                                       int64_t aTimeThreshold)
{
  MOZ_ASSERT(OnTaskQueue());

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedVideoEos && !mVideoSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mVideoSinkBufferCount) {
      if (!mAudioSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mVideoSinkBufferCount) {
          /* There is still no video data available, so either there is audio data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

    mDecoder->NotifyDecodedFrames(0, 1, 0);

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
#endif
    mVideoSinkBufferCount--;
  }

  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if ((aKeyFrameSkip && !isKeyframe)) {
    mDecoder->NotifyDecodedFrames(0, 0, 1);
    gst_buffer_unref(buffer);
    return true;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mVideoSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
               "frame has invalid timestamp");

  timestamp = GST_TIME_AS_USECONDS(timestamp);
  int64_t duration = 0;
  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
  else if (fpsNum && fpsDen)
    /* add 1-frame duration */
    duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum);

  if (timestamp < aTimeThreshold) {
    LOG(LogLevel::Debug, "skipping frame %" GST_TIME_FORMAT
                      " threshold %" GST_TIME_FORMAT,
                      GST_TIME_ARGS(timestamp * 1000),
                      GST_TIME_ARGS(aTimeThreshold * 1000));
    gst_buffer_unref(buffer);
    return true;
  }

  if (!buffer)
    /* no more frames */
    return true;

#if GST_VERSION_MAJOR >= 1
  if (mConfigureAlignment && buffer->pool) {
    GstStructure *config = gst_buffer_pool_get_config(buffer->pool);
    GstVideoAlignment align;
    if (gst_buffer_pool_config_get_video_alignment(config, &align))
      gst_video_info_align(&mVideoInfo, &align);
    gst_structure_free(config);
    mConfigureAlignment = false;
  }
#endif

  nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer);
  if (!image) {
    /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
     * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
     */
    GstBuffer* tmp = nullptr;
    CopyIntoImageBuffer(buffer, &tmp, image);
    gst_buffer_unref(buffer);
    buffer = tmp;
  }

  int64_t offset = mResource.Tell(); // Estimate location in media.
  nsRefPtr<VideoData> video = VideoData::CreateFromImage(mInfo.mVideo,
                                                         mDecoder->GetImageContainer(),
                                                         offset, timestamp, duration,
                                                         static_cast<Image*>(image.get()),
                                                         isKeyframe, -1, mPicture);
  mVideoQueue.Push(video);

  gst_buffer_unref(buffer);

  return true;
}
Пример #28
0
/* Called with the object lock for both the element and pad held,
 * as well as the aagg lock
 */
static gboolean
gst_audio_aggregator_fill_buffer (GstAudioAggregator * aagg,
    GstAudioAggregatorPad * pad, GstBuffer * inbuf)
{
  GstClockTime start_time, end_time;
  gboolean discont = FALSE;
  guint64 start_offset, end_offset;
  gint rate, bpf;

  GstAggregator *agg = GST_AGGREGATOR (aagg);
  GstAggregatorPad *aggpad = GST_AGGREGATOR_PAD (pad);

  g_assert (pad->priv->buffer == NULL);

  rate = GST_AUDIO_INFO_RATE (&pad->info);
  bpf = GST_AUDIO_INFO_BPF (&pad->info);

  pad->priv->position = 0;
  pad->priv->size = gst_buffer_get_size (inbuf) / bpf;

  if (!GST_BUFFER_PTS_IS_VALID (inbuf)) {
    if (pad->priv->output_offset == -1)
      pad->priv->output_offset = aagg->priv->offset;
    if (pad->priv->next_offset == -1)
      pad->priv->next_offset = pad->priv->size;
    else
      pad->priv->next_offset += pad->priv->size;
    goto done;
  }

  start_time = GST_BUFFER_PTS (inbuf);
  end_time =
      start_time + gst_util_uint64_scale_ceil (pad->priv->size, GST_SECOND,
      rate);

  /* Clipping should've ensured this */
  g_assert (start_time >= aggpad->segment.start);

  start_offset =
      gst_util_uint64_scale (start_time - aggpad->segment.start, rate,
      GST_SECOND);
  end_offset = start_offset + pad->priv->size;

  if (GST_BUFFER_IS_DISCONT (inbuf)
      || GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_RESYNC)
      || pad->priv->new_segment || pad->priv->next_offset == -1) {
    discont = TRUE;
    pad->priv->new_segment = FALSE;
  } else {
    guint64 diff, max_sample_diff;

    /* Check discont, based on audiobasesink */
    if (start_offset <= pad->priv->next_offset)
      diff = pad->priv->next_offset - start_offset;
    else
      diff = start_offset - pad->priv->next_offset;

    max_sample_diff =
        gst_util_uint64_scale_int (aagg->priv->alignment_threshold, rate,
        GST_SECOND);

    /* Discont! */
    if (G_UNLIKELY (diff >= max_sample_diff)) {
      if (aagg->priv->discont_wait > 0) {
        if (pad->priv->discont_time == GST_CLOCK_TIME_NONE) {
          pad->priv->discont_time = start_time;
        } else if (start_time - pad->priv->discont_time >=
            aagg->priv->discont_wait) {
          discont = TRUE;
          pad->priv->discont_time = GST_CLOCK_TIME_NONE;
        }
      } else {
        discont = TRUE;
      }
    } else if (G_UNLIKELY (pad->priv->discont_time != GST_CLOCK_TIME_NONE)) {
      /* we have had a discont, but are now back on track! */
      pad->priv->discont_time = GST_CLOCK_TIME_NONE;
    }
  }

  if (discont) {
    /* Have discont, need resync */
    if (pad->priv->next_offset != -1)
      GST_INFO_OBJECT (pad, "Have discont. Expected %"
          G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT,
          pad->priv->next_offset, start_offset);
    pad->priv->output_offset = -1;
    pad->priv->next_offset = end_offset;
  } else {
    pad->priv->next_offset += pad->priv->size;
  }

  if (pad->priv->output_offset == -1) {
    GstClockTime start_running_time;
    GstClockTime end_running_time;
    guint64 start_output_offset;
    guint64 end_output_offset;

    start_running_time =
        gst_segment_to_running_time (&aggpad->segment,
        GST_FORMAT_TIME, start_time);
    end_running_time =
        gst_segment_to_running_time (&aggpad->segment,
        GST_FORMAT_TIME, end_time);

    /* Convert to position in the output segment */
    start_output_offset =
        gst_segment_to_position (&agg->segment, GST_FORMAT_TIME,
        start_running_time);
    if (start_output_offset != -1)
      start_output_offset =
          gst_util_uint64_scale (start_output_offset - agg->segment.start, rate,
          GST_SECOND);

    end_output_offset =
        gst_segment_to_position (&agg->segment, GST_FORMAT_TIME,
        end_running_time);
    if (end_output_offset != -1)
      end_output_offset =
          gst_util_uint64_scale (end_output_offset - agg->segment.start, rate,
          GST_SECOND);

    if (start_output_offset == -1 && end_output_offset == -1) {
      /* Outside output segment, drop */
      gst_buffer_unref (inbuf);
      pad->priv->buffer = NULL;
      pad->priv->position = 0;
      pad->priv->size = 0;
      pad->priv->output_offset = -1;
      GST_DEBUG_OBJECT (pad, "Buffer outside output segment");
      return FALSE;
    }

    /* Calculate end_output_offset if it was outside the output segment */
    if (end_output_offset == -1)
      end_output_offset = start_output_offset + pad->priv->size;

    if (end_output_offset < aagg->priv->offset) {
      /* Before output segment, drop */
      gst_buffer_unref (inbuf);
      pad->priv->buffer = NULL;
      pad->priv->position = 0;
      pad->priv->size = 0;
      pad->priv->output_offset = -1;
      GST_DEBUG_OBJECT (pad,
          "Buffer before segment or current position: %" G_GUINT64_FORMAT " < %"
          G_GINT64_FORMAT, end_output_offset, aagg->priv->offset);
      return FALSE;
    }

    if (start_output_offset == -1 || start_output_offset < aagg->priv->offset) {
      guint diff;

      if (start_output_offset == -1 && end_output_offset < pad->priv->size) {
        diff = pad->priv->size - end_output_offset + aagg->priv->offset;
      } else if (start_output_offset == -1) {
        start_output_offset = end_output_offset - pad->priv->size;

        if (start_output_offset < aagg->priv->offset)
          diff = aagg->priv->offset - start_output_offset;
        else
          diff = 0;
      } else {
        diff = aagg->priv->offset - start_output_offset;
      }

      pad->priv->position += diff;
      if (pad->priv->position >= pad->priv->size) {
        /* Empty buffer, drop */
        gst_buffer_unref (inbuf);
        pad->priv->buffer = NULL;
        pad->priv->position = 0;
        pad->priv->size = 0;
        pad->priv->output_offset = -1;
        GST_DEBUG_OBJECT (pad,
            "Buffer before segment or current position: %" G_GUINT64_FORMAT
            " < %" G_GINT64_FORMAT, end_output_offset, aagg->priv->offset);
        return FALSE;
      }
    }

    if (start_output_offset == -1 || start_output_offset < aagg->priv->offset)
      pad->priv->output_offset = aagg->priv->offset;
    else
      pad->priv->output_offset = start_output_offset;

    GST_DEBUG_OBJECT (pad,
        "Buffer resynced: Pad offset %" G_GUINT64_FORMAT
        ", current audio aggregator offset %" G_GINT64_FORMAT,
        pad->priv->output_offset, aagg->priv->offset);
  }

done:

  GST_LOG_OBJECT (pad,
      "Queued new buffer at offset %" G_GUINT64_FORMAT,
      pad->priv->output_offset);
  pad->priv->buffer = inbuf;

  return TRUE;
}
Пример #29
0
static GstFlowReturn
gst_amrwbenc_chain (GstPad * pad, GstBuffer * buffer)
{
  GstAmrwbEnc *amrwbenc;
  GstFlowReturn ret = GST_FLOW_OK;
  const int buffer_size = sizeof (Word16) * L_FRAME16k;

  amrwbenc = GST_AMRWBENC (gst_pad_get_parent (pad));

  g_return_val_if_fail (amrwbenc->handle, GST_FLOW_WRONG_STATE);

  if (amrwbenc->rate == 0 || amrwbenc->channels == 0) {
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

  /* discontinuity clears adapter, FIXME, maybe we can set some
   * encoder flag to mask the discont. */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (amrwbenc->adapter);
    amrwbenc->ts = 0;
    amrwbenc->discont = TRUE;
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    amrwbenc->ts = GST_BUFFER_TIMESTAMP (buffer);

  ret = GST_FLOW_OK;
  gst_adapter_push (amrwbenc->adapter, buffer);

  /* Collect samples until we have enough for an output frame */
  while (gst_adapter_available (amrwbenc->adapter) >= buffer_size) {
    GstBuffer *out;
    guint8 *data;
    gint outsize;

    out = gst_buffer_new_and_alloc (buffer_size);
    GST_BUFFER_DURATION (out) = GST_SECOND * L_FRAME16k /
        (amrwbenc->rate * amrwbenc->channels);
    GST_BUFFER_TIMESTAMP (out) = amrwbenc->ts;
    if (amrwbenc->ts != -1) {
      amrwbenc->ts += GST_BUFFER_DURATION (out);
    }
    if (amrwbenc->discont) {
      GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT);
      amrwbenc->discont = FALSE;
    }
    gst_buffer_set_caps (out, gst_pad_get_caps (amrwbenc->srcpad));

    data = (guint8 *) gst_adapter_peek (amrwbenc->adapter, buffer_size);

    /* encode */
    outsize =
        E_IF_encode (amrwbenc->handle, amrwbenc->bandmode, (Word16 *) data,
        (UWord8 *) GST_BUFFER_DATA (out), 0);

    gst_adapter_flush (amrwbenc->adapter, buffer_size);
    GST_BUFFER_SIZE (out) = outsize;

    /* play */
    if ((ret = gst_pad_push (amrwbenc->srcpad, out)) != GST_FLOW_OK)
      break;
  }

done:

  gst_object_unref (amrwbenc);
  return ret;

}
Пример #30
0
static GstFlowReturn
gst_base_video_parse_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoParse *base_video_parse;
  GstBaseVideoParseClass *klass;
  GstBuffer *buffer;
  GstFlowReturn ret;

  GST_DEBUG ("chain with %d bytes", GST_BUFFER_SIZE (buf));

  base_video_parse = GST_BASE_VIDEO_PARSE (GST_PAD_PARENT (pad));
  klass = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse);

  if (!base_video_parse->started) {
    klass->start (base_video_parse);
    base_video_parse->started = TRUE;
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_parse, "received DISCONT buffer");
    gst_base_video_parse_reset (base_video_parse);
    base_video_parse->discont = TRUE;
    base_video_parse->have_sync = FALSE;
  }

  if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
    base_video_parse->last_timestamp = GST_BUFFER_TIMESTAMP (buf);
  }
  gst_adapter_push (base_video_parse->input_adapter, buf);

  if (!base_video_parse->have_sync) {
    int n, m;

    GST_DEBUG ("no sync, scanning");

    n = gst_adapter_available (base_video_parse->input_adapter);
    m = klass->scan_for_sync (base_video_parse->input_adapter, FALSE, 0, n);

    gst_adapter_flush (base_video_parse->input_adapter, m);

    if (m < n) {
      GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);

      /* this is only "maybe" sync */
      base_video_parse->have_sync = TRUE;
    }

    if (!base_video_parse->have_sync) {
      return GST_FLOW_OK;
    }
  }

  /* FIXME: use gst_adapter_prev_timestamp() here instead? */
  buffer = gst_adapter_get_buffer (base_video_parse->input_adapter);

  gst_buffer_unref (buffer);

  /* FIXME check klass->parse_data */

  do {
    ret = klass->parse_data (base_video_parse, FALSE);
  } while (ret == GST_FLOW_OK);

  if (ret == GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA) {
    return GST_FLOW_OK;
  }
  return ret;
}