Example #1
0
static GstFlowReturn
gst_jpeg_parse_push_buffer (GstJpegParse * parse, guint len)
{
  GstBuffer *outbuf;
  GstFlowReturn ret = GST_FLOW_OK;
  gboolean header_ok;

  /* reset the offset (only when we flushed) */
  parse->priv->last_offset = 0;
  parse->priv->last_entropy_len = 0;

  outbuf = gst_adapter_take_buffer (parse->priv->adapter, len);
  if (outbuf == NULL) {
    GST_ELEMENT_ERROR (parse, STREAM, DECODE,
        ("Failed to take buffer of size %u", len),
        ("Failed to take buffer of size %u", len));
    return GST_FLOW_ERROR;
  }

  header_ok = gst_jpeg_parse_read_header (parse, outbuf);

  if (parse->priv->new_segment == TRUE
      || parse->priv->width != parse->priv->caps_width
      || parse->priv->height != parse->priv->caps_height
      || parse->priv->framerate_numerator !=
      parse->priv->caps_framerate_numerator
      || parse->priv->framerate_denominator !=
      parse->priv->caps_framerate_denominator) {
    if (!gst_jpeg_parse_set_new_caps (parse, header_ok)) {
      GST_ELEMENT_ERROR (parse, CORE, NEGOTIATION,
          ("Can't set caps to the src pad"), ("Can't set caps to the src pad"));
      return GST_FLOW_ERROR;
    }

    if (parse->priv->tags) {
      GST_DEBUG_OBJECT (parse, "Pushing tags: %" GST_PTR_FORMAT,
          parse->priv->tags);
      gst_element_found_tags_for_pad (GST_ELEMENT_CAST (parse),
          parse->priv->srcpad, parse->priv->tags);
      parse->priv->tags = NULL;
    }

    parse->priv->new_segment = FALSE;
    parse->priv->caps_width = parse->priv->width;
    parse->priv->caps_height = parse->priv->height;
    parse->priv->caps_framerate_numerator = parse->priv->framerate_numerator;
    parse->priv->caps_framerate_denominator =
        parse->priv->framerate_denominator;
  }

  GST_BUFFER_TIMESTAMP (outbuf) = parse->priv->next_ts;

  if (parse->priv->has_fps && GST_CLOCK_TIME_IS_VALID (parse->priv->next_ts)
      && GST_CLOCK_TIME_IS_VALID (parse->priv->duration)) {
    parse->priv->next_ts += parse->priv->duration;
  } else {
    parse->priv->duration = GST_CLOCK_TIME_NONE;
    parse->priv->next_ts = GST_CLOCK_TIME_NONE;
  }

  GST_BUFFER_DURATION (outbuf) = parse->priv->duration;

  gst_buffer_set_caps (outbuf, GST_PAD_CAPS (parse->priv->srcpad));

  GST_LOG_OBJECT (parse, "pushing buffer (ts=%" GST_TIME_FORMAT ", len=%u)",
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), len);

  ret = gst_pad_push (parse->priv->srcpad, outbuf);

  return ret;
}
/* chain function
 * this function does the actual processing
 */
static GstFlowReturn
gst_ivf_parse_chain (GstPad * pad, GstBuffer * buf)
{
  GstIvfParse *ivf = GST_IVF_PARSE (GST_OBJECT_PARENT (pad));
  gboolean res;

  /* lazy creation of the adapter */
  if (G_UNLIKELY (ivf->adapter == NULL)) {
    ivf->adapter = gst_adapter_new ();
  }

  GST_LOG_OBJECT (ivf, "Pushing buffer of size %u to adapter",
      GST_BUFFER_SIZE (buf));

  gst_adapter_push (ivf->adapter, buf); /* adapter takes ownership of buf */

  res = GST_FLOW_OK;

  switch (ivf->state) {
    case GST_IVF_PARSE_START:
      if (gst_adapter_available (ivf->adapter) >= 32) {
        GstCaps *caps;

        const guint8 *data = gst_adapter_peek (ivf->adapter, 32);
        guint32 magic = GST_READ_UINT32_LE (data);
        guint16 version = GST_READ_UINT16_LE (data + 4);
        guint16 header_size = GST_READ_UINT16_LE (data + 6);
        guint32 fourcc = GST_READ_UINT32_LE (data + 8);
        guint16 width = GST_READ_UINT16_LE (data + 12);
        guint16 height = GST_READ_UINT16_LE (data + 14);
        guint32 rate_num = GST_READ_UINT32_LE (data + 16);
        guint32 rate_den = GST_READ_UINT32_LE (data + 20);
#ifndef GST_DISABLE_GST_DEBUG
        guint32 num_frames = GST_READ_UINT32_LE (data + 24);
#endif

        /* last 4 bytes unused */
        gst_adapter_flush (ivf->adapter, 32);

        if (magic != GST_MAKE_FOURCC ('D', 'K', 'I', 'F') ||
            version != 0 || header_size != 32 ||
            fourcc != GST_MAKE_FOURCC ('V', 'P', '8', '0')) {
          GST_ELEMENT_ERROR (ivf, STREAM, WRONG_TYPE, (NULL), (NULL));
          return GST_FLOW_ERROR;
        }

        /* create src pad caps */
        caps = gst_caps_new_simple ("video/x-vp8",
            "width", G_TYPE_INT, width, "height", G_TYPE_INT, height,
            "framerate", GST_TYPE_FRACTION, rate_num, rate_den, NULL);

        GST_INFO_OBJECT (ivf, "Found stream: %" GST_PTR_FORMAT, caps);

        GST_LOG_OBJECT (ivf, "Stream has %d frames", num_frames);

        gst_pad_set_caps (ivf->srcpad, caps);
        gst_caps_unref (caps);

        /* keep framerate in instance for convenience */
        ivf->rate_num = rate_num;
        ivf->rate_den = rate_den;

        gst_pad_push_event (ivf->srcpad, gst_event_new_new_segment (FALSE, 1.0,
                GST_FORMAT_TIME, 0, -1, 0));

        /* move along */
        ivf->state = GST_IVF_PARSE_DATA;
      } else {
        GST_LOG_OBJECT (ivf, "Header data not yet available.");
        break;
      }

      /* fall through */

    case GST_IVF_PARSE_DATA:
      while (gst_adapter_available (ivf->adapter) > 12) {
        const guint8 *data = gst_adapter_peek (ivf->adapter, 12);
        guint32 frame_size = GST_READ_UINT32_LE (data);
        guint64 frame_pts = GST_READ_UINT64_LE (data + 4);

        GST_LOG_OBJECT (ivf,
            "Read frame header: size %u, pts %" G_GUINT64_FORMAT, frame_size,
            frame_pts);

        if (gst_adapter_available (ivf->adapter) >= 12 + frame_size) {
          GstBuffer *frame;

          gst_adapter_flush (ivf->adapter, 12);

          frame = gst_adapter_take_buffer (ivf->adapter, frame_size);
          gst_buffer_set_caps (frame, GST_PAD_CAPS (ivf->srcpad));
          GST_BUFFER_TIMESTAMP (frame) =
              gst_util_uint64_scale_int (GST_SECOND * frame_pts, ivf->rate_den,
              ivf->rate_num);
          GST_BUFFER_DURATION (frame) =
              gst_util_uint64_scale_int (GST_SECOND, ivf->rate_den,
              ivf->rate_num);

          GST_DEBUG_OBJECT (ivf, "Pushing frame of size %u, ts %"
              GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %"
              G_GUINT64_FORMAT ", off_end %" G_GUINT64_FORMAT,
              GST_BUFFER_SIZE (frame),
              GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (frame)),
              GST_TIME_ARGS (GST_BUFFER_DURATION (frame)),
              GST_BUFFER_OFFSET (frame), GST_BUFFER_OFFSET_END (frame));

          res = gst_pad_push (ivf->srcpad, frame);
          if (res != GST_FLOW_OK)
            break;
        } else {
          GST_LOG_OBJECT (ivf, "Frame data not yet available.");
          break;
        }
      }
      break;

    default:
      g_return_val_if_reached (GST_FLOW_ERROR);
  }

  return res;
}
Example #3
0
static GstFlowReturn
gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
{
  guint avail;
  GstBufferList *list = NULL;
  GstBuffer *outbuf = NULL;
  GstFlowReturn ret;
  gboolean fragmented = FALSE;

  avail = gst_adapter_available (rtph263ppay->adapter);
  if (avail == 0)
    return GST_FLOW_OK;

  fragmented = FALSE;
  /* This algorithm assumes the H263/+/++ encoder sends complete frames in each
   * buffer */
  /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:
   *  This algorithm implements the Follow-on packets method for packetization.
   *  This assumes low packet loss network. 
   * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:
   *  This algorithm separates large frames at synchronisation points (Segments)
   *  (See RFC 4629 section 6). It would be interesting to have a property such as network
   *  quality to select between both packetization methods */
  /* TODO Add VRC supprt (See RFC 4629 section 5.2) */

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    gint header_len;
    guint next_gop = 0;
    gboolean found_gob = FALSE;
    GstRTPBuffer rtp = { NULL };
    GstBuffer *payload_buf;

    if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
      /* start after 1st gop possible */

      /* Check if we have a gob or eos , eossbs */
      /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
      next_gop =
          gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
          0x00008000, 0, avail);
      if (next_gop == 0) {
        GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");
        found_gob = TRUE;
      }

      /* Find next and cut the packet accordingly */
      /* TODO we should get as many gobs as possible until MTU is reached, this
       * code seems to just get one GOB per packet */
      if (next_gop == 0 && avail > 3)
        next_gop =
            gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
            0x00008000, 3, avail - 3);
      GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at :  %d", next_gop);
      if (next_gop == -1)
        next_gop = 0;
    }

    /* for picture start frames (non-fragmented), we need to remove the first
     * two 0x00 bytes and set P=1 */
    if (!fragmented || found_gob) {
      gst_adapter_flush (rtph263ppay->adapter, 2);
      avail -= 2;
    }
    header_len = 2;

    towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
        (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));

    if (next_gop > 0)
      towrite = MIN (next_gop, towrite);

    outbuf = gst_rtp_buffer_new_allocate (header_len, 0, 0);

    gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
    /* last fragment gets the marker bit set */
    gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);

    payload = gst_rtp_buffer_get_payload (&rtp);

    /*  0                   1
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |   RR    |P|V|   PLEN    |PEBIT|
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     */
    /* if fragmented or gop header , write p bit =1 */
    payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;
    payload[1] = 0;

    GST_BUFFER_PTS (outbuf) = rtph263ppay->first_timestamp;
    GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
    gst_rtp_buffer_unmap (&rtp);

    payload_buf = gst_adapter_take_buffer_fast (rtph263ppay->adapter, towrite);
    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtph263ppay), outbuf, payload_buf,
        g_quark_from_static_string (GST_META_TAG_VIDEO_STR));
    outbuf = gst_buffer_append (outbuf, payload_buf);
    avail -= towrite;

    /* If more data is available and this is our first iteration,
     * we create a buffer list and remember that we're fragmented.
     *
     * If we're fragmented already, add buffers to the previously
     * created buffer list.
     *
     * Otherwise fragmented will be FALSE and we just push the single output
     * buffer, and no list is allocated.
     */
    if (avail && !fragmented) {
      fragmented = TRUE;
      list = gst_buffer_list_new ();
      gst_buffer_list_add (list, outbuf);
    } else if (fragmented) {
      gst_buffer_list_add (list, outbuf);
    }
  }

  if (fragmented) {
    ret =
        gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtph263ppay),
        list);
  } else {
    ret =
        gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263ppay), outbuf);
  }

  return ret;
}
Example #4
0
static GstFlowReturn
speex_dec_chain_parse_data (GstSpeexDec * dec, GstBuffer * buf,
    GstClockTime timestamp, GstClockTime duration)
{
  GstFlowReturn res = GST_FLOW_OK;
  gint i, fpp;
  guint size;
  guint8 *data;
  SpeexBits *bits;

  if (!dec->frame_duration)
    goto not_negotiated;

  if (timestamp != -1) {
    dec->segment.last_stop = timestamp;
  } else {
    timestamp = dec->segment.last_stop;
  }

  if (buf) {
    data = GST_BUFFER_DATA (buf);
    size = GST_BUFFER_SIZE (buf);

    /* send data to the bitstream */
    speex_bits_read_from (&dec->bits, (char *) data, size);

    fpp = 0;
    bits = &dec->bits;

    GST_DEBUG_OBJECT (dec, "received buffer of size %u, fpp %d", size, fpp);
  } else {
    /* concealment data, pass NULL as the bits parameters */
    GST_DEBUG_OBJECT (dec, "creating concealment data");
    fpp = dec->header->frames_per_packet;
    bits = NULL;
  }


  /* now decode each frame, catering for unknown number of them (e.g. rtp) */
  for (i = 0; (!fpp || i < fpp) && (!bits || speex_bits_remaining (bits) > 0);
      i++) {
    GstBuffer *outbuf;
    gint16 *out_data;
    gint ret;

    GST_LOG_OBJECT (dec, "decoding frame %d/%d", i, fpp);

    res = gst_pad_alloc_buffer_and_set_caps (dec->srcpad,
        GST_BUFFER_OFFSET_NONE, dec->frame_size * dec->header->nb_channels * 2,
        GST_PAD_CAPS (dec->srcpad), &outbuf);

    if (res != GST_FLOW_OK) {
      GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));
      return res;
    }

    out_data = (gint16 *) GST_BUFFER_DATA (outbuf);

    ret = speex_decode_int (dec->state, bits, out_data);
    if (ret == -1) {
      /* uh? end of stream */
      GST_WARNING_OBJECT (dec, "Unexpected end of stream found");
      gst_buffer_unref (outbuf);
      outbuf = NULL;
      break;
    } else if (ret == -2) {
      GST_WARNING_OBJECT (dec, "Decoding error: corrupted stream?");
      gst_buffer_unref (outbuf);
      outbuf = NULL;
      break;
    }

    if (bits && speex_bits_remaining (bits) < 0) {
      GST_WARNING_OBJECT (dec, "Decoding overflow: corrupted stream?");
      gst_buffer_unref (outbuf);
      outbuf = NULL;
      break;
    }
    if (dec->header->nb_channels == 2)
      speex_decode_stereo_int (out_data, dec->frame_size, dec->stereo);

    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
    GST_BUFFER_DURATION (outbuf) = dec->frame_duration;

    dec->segment.last_stop += dec->frame_duration;
    timestamp = dec->segment.last_stop;

    GST_LOG_OBJECT (dec, "pushing buffer with ts=%" GST_TIME_FORMAT ", dur=%"
        GST_TIME_FORMAT, GST_TIME_ARGS (timestamp),
        GST_TIME_ARGS (dec->frame_duration));

    res = gst_pad_push (dec->srcpad, outbuf);

    if (res != GST_FLOW_OK) {
      GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));
      break;
    }
  }

  return res;

  /* ERRORS */
not_negotiated:
  {
    GST_ELEMENT_ERROR (dec, CORE, NEGOTIATION, (NULL),
        ("decoder not initialized"));
    return GST_FLOW_NOT_NEGOTIATED;
  }
}
static GstFlowReturn
gst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf)
{
  GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src);
  GstBuffer *new_buf;
  gint new_buf_size;
  GstClock *clock;
  GstClockTime buf_time, buf_dur;
  guint64 frame_number;

  if (G_UNLIKELY (!src->info.bmiHeader.biWidth ||
          !src->info.bmiHeader.biHeight)) {
    GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated before create function"));
    return GST_FLOW_NOT_NEGOTIATED;
  }

  new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) *
      (-src->info.bmiHeader.biHeight);

  GST_LOG_OBJECT (src,
      "creating buffer of %d bytes with %dx%d image",
      new_buf_size, (gint) src->info.bmiHeader.biWidth,
      (gint) (-src->info.bmiHeader.biHeight));

  new_buf = gst_buffer_new_and_alloc (new_buf_size);

  clock = gst_element_get_clock (GST_ELEMENT (src));
  if (clock != NULL) {
    GstClockTime time, base_time;

    /* Calculate sync time. */

    time = gst_clock_get_time (clock);
    base_time = gst_element_get_base_time (GST_ELEMENT (src));
    buf_time = time - base_time;

    if (src->rate_numerator) {
      frame_number = gst_util_uint64_scale (buf_time,
          src->rate_numerator, GST_SECOND * src->rate_denominator);
    } else {
      frame_number = -1;
    }
  } else {
    buf_time = GST_CLOCK_TIME_NONE;
    frame_number = -1;
  }

  if (frame_number != -1 && frame_number == src->frame_number) {
    GstClockID id;
    GstClockReturn ret;

    /* Need to wait for the next frame */
    frame_number += 1;

    /* Figure out what the next frame time is */
    buf_time = gst_util_uint64_scale (frame_number,
        src->rate_denominator * GST_SECOND, src->rate_numerator);

    id = gst_clock_new_single_shot_id (clock,
        buf_time + gst_element_get_base_time (GST_ELEMENT (src)));
    GST_OBJECT_LOCK (src);
    src->clock_id = id;
    GST_OBJECT_UNLOCK (src);

    GST_DEBUG_OBJECT (src, "Waiting for next frame time %" G_GUINT64_FORMAT,
        buf_time);
    ret = gst_clock_id_wait (id, NULL);
    GST_OBJECT_LOCK (src);

    gst_clock_id_unref (id);
    src->clock_id = NULL;
    if (ret == GST_CLOCK_UNSCHEDULED) {
      /* Got woken up by the unlock function */
      GST_OBJECT_UNLOCK (src);
      return GST_FLOW_FLUSHING;
    }
    GST_OBJECT_UNLOCK (src);

    /* Duration is a complete 1/fps frame duration */
    buf_dur =
        gst_util_uint64_scale_int (GST_SECOND, src->rate_denominator,
        src->rate_numerator);
  } else if (frame_number != -1) {
    GstClockTime next_buf_time;

    GST_DEBUG_OBJECT (src, "No need to wait for next frame time %"
        G_GUINT64_FORMAT " next frame = %" G_GINT64_FORMAT " prev = %"
        G_GINT64_FORMAT, buf_time, frame_number, src->frame_number);
    next_buf_time = gst_util_uint64_scale (frame_number + 1,
        src->rate_denominator * GST_SECOND, src->rate_numerator);
    /* Frame duration is from now until the next expected capture time */
    buf_dur = next_buf_time - buf_time;
  } else {
    buf_dur = GST_CLOCK_TIME_NONE;
  }
  src->frame_number = frame_number;

  GST_BUFFER_TIMESTAMP (new_buf) = buf_time;
  GST_BUFFER_DURATION (new_buf) = buf_dur;

  /* Do screen capture and put it into buffer... */
  gst_gdiscreencapsrc_screen_capture (src, new_buf);

  gst_object_unref (clock);

  *buf = new_buf;
  return GST_FLOW_OK;
}
static GstFlowReturn
gst_wavpack_enc_chain (GstPad * pad, GstBuffer * buf)
{
  GstWavpackEnc *enc = GST_WAVPACK_ENC (gst_pad_get_parent (pad));
  uint32_t sample_count = GST_BUFFER_SIZE (buf) / 4;
  GstFlowReturn ret;

  /* reset the last returns to GST_FLOW_OK. This is only set to something else
   * while WavpackPackSamples() or more specific gst_wavpack_enc_push_block()
   * so not valid anymore */
  enc->srcpad_last_return = enc->wvcsrcpad_last_return = GST_FLOW_OK;

  GST_DEBUG ("got %u raw samples", sample_count);

  /* check if we already have a valid WavpackContext, otherwise make one */
  if (!enc->wp_context) {
    /* create raw context */
    enc->wp_context =
        WavpackOpenFileOutput (gst_wavpack_enc_push_block, &enc->wv_id,
        (enc->correction_mode > 0) ? &enc->wvc_id : NULL);
    if (!enc->wp_context) {
      GST_ELEMENT_ERROR (enc, LIBRARY, INIT, (NULL),
          ("error creating Wavpack context"));
      gst_object_unref (enc);
      gst_buffer_unref (buf);
      return GST_FLOW_ERROR;
    }

    /* set the WavpackConfig according to our parameters */
    gst_wavpack_enc_set_wp_config (enc);

    /* set the configuration to the context now that we know everything
     * and initialize the encoder */
    if (!WavpackSetConfiguration (enc->wp_context,
            enc->wp_config, (uint32_t) (-1))
        || !WavpackPackInit (enc->wp_context)) {
      GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL),
          ("error setting up wavpack encoding context"));
      WavpackCloseFile (enc->wp_context);
      gst_object_unref (enc);
      gst_buffer_unref (buf);
      return GST_FLOW_ERROR;
    }
    GST_DEBUG ("setup of encoding context successfull");
  }

  /* Save the timestamp of the first buffer. This will be later
   * used as offset for all following buffers */
  if (enc->timestamp_offset == GST_CLOCK_TIME_NONE) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
      enc->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
      enc->next_ts = GST_BUFFER_TIMESTAMP (buf);
    } else {
      enc->timestamp_offset = 0;
      enc->next_ts = 0;
    }
  }

  /* Check if we have a continous stream, if not drop some samples or the buffer or
   * insert some silence samples */
  if (enc->next_ts != GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) < enc->next_ts) {
    guint64 diff = enc->next_ts - GST_BUFFER_TIMESTAMP (buf);
    guint64 diff_bytes;

    GST_WARNING_OBJECT (enc, "Buffer is older than previous "
        "timestamp + duration (%" GST_TIME_FORMAT "< %" GST_TIME_FORMAT
        "), cannot handle. Clipping buffer.",
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (enc->next_ts));

    diff_bytes =
        GST_CLOCK_TIME_TO_FRAMES (diff, enc->samplerate) * enc->channels * 2;
    if (diff_bytes >= GST_BUFFER_SIZE (buf)) {
      gst_buffer_unref (buf);
      return GST_FLOW_OK;
    }
    buf = gst_buffer_make_metadata_writable (buf);
    GST_BUFFER_DATA (buf) += diff_bytes;
    GST_BUFFER_SIZE (buf) -= diff_bytes;

    GST_BUFFER_TIMESTAMP (buf) += diff;
    if (GST_BUFFER_DURATION_IS_VALID (buf))
      GST_BUFFER_DURATION (buf) -= diff;
  }

  /* Allow a diff of at most 5 ms */
  if (enc->next_ts != GST_CLOCK_TIME_NONE
      && GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    if (GST_BUFFER_TIMESTAMP (buf) != enc->next_ts &&
        GST_BUFFER_TIMESTAMP (buf) - enc->next_ts > 5 * GST_MSECOND) {
      GST_WARNING_OBJECT (enc,
          "Discontinuity detected: %" G_GUINT64_FORMAT " > %" G_GUINT64_FORMAT,
          GST_BUFFER_TIMESTAMP (buf) - enc->next_ts, 5 * GST_MSECOND);

      WavpackFlushSamples (enc->wp_context);
      enc->timestamp_offset += (GST_BUFFER_TIMESTAMP (buf) - enc->next_ts);
    }
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)
      && GST_BUFFER_DURATION_IS_VALID (buf))
    enc->next_ts = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
  else
    enc->next_ts = GST_CLOCK_TIME_NONE;

  if (enc->need_channel_remap) {
    buf = gst_buffer_make_writable (buf);
    gst_wavpack_enc_fix_channel_order (enc, (gint32 *) GST_BUFFER_DATA (buf),
        sample_count);
  }

  /* if we want to append the MD5 sum to the stream update it here
   * with the current raw samples */
  if (enc->md5) {
    MD5Update (enc->md5_context, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
  }

  /* encode and handle return values from encoding */
  if (WavpackPackSamples (enc->wp_context, (int32_t *) GST_BUFFER_DATA (buf),
          sample_count / enc->channels)) {
    GST_DEBUG ("encoding samples successful");
    ret = GST_FLOW_OK;
  } else {
    if ((enc->srcpad_last_return == GST_FLOW_RESEND) ||
        (enc->wvcsrcpad_last_return == GST_FLOW_RESEND)) {
      ret = GST_FLOW_RESEND;
    } else if ((enc->srcpad_last_return == GST_FLOW_OK) ||
        (enc->wvcsrcpad_last_return == GST_FLOW_OK)) {
      ret = GST_FLOW_OK;
    } else if ((enc->srcpad_last_return == GST_FLOW_NOT_LINKED) &&
        (enc->wvcsrcpad_last_return == GST_FLOW_NOT_LINKED)) {
      ret = GST_FLOW_NOT_LINKED;
    } else if ((enc->srcpad_last_return == GST_FLOW_WRONG_STATE) &&
        (enc->wvcsrcpad_last_return == GST_FLOW_WRONG_STATE)) {
      ret = GST_FLOW_WRONG_STATE;
    } else {
      GST_ELEMENT_ERROR (enc, LIBRARY, ENCODE, (NULL),
          ("encoding samples failed"));
      ret = GST_FLOW_ERROR;
    }
  }

  gst_buffer_unref (buf);
  gst_object_unref (enc);
  return ret;
}
static GstFlowReturn
gst_rtp_celt_pay_flush_queued (GstRtpCELTPay * rtpceltpay)
{
  GstFlowReturn ret;
  GstBuffer *buf, *outbuf;
  guint8 *payload, *spayload;
  guint payload_len;
  GstClockTime duration;
  GstRTPBuffer rtp = { NULL, };

  payload_len = rtpceltpay->bytes + rtpceltpay->sbytes;
  duration = rtpceltpay->qduration;

  GST_DEBUG_OBJECT (rtpceltpay, "flushing out %u, duration %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (rtpceltpay->qduration));

  /* get a big enough packet for the sizes + payloads */
  outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

  GST_BUFFER_DURATION (outbuf) = duration;

  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

  /* point to the payload for size headers and data */
  spayload = gst_rtp_buffer_get_payload (&rtp);
  payload = spayload + rtpceltpay->sbytes;

  while ((buf = g_queue_pop_head (rtpceltpay->queue))) {
    guint size;

    /* copy first timestamp to output */
    if (GST_BUFFER_PTS (outbuf) == -1)
      GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buf);

    /* write the size to the header */
    size = gst_buffer_get_size (buf);
    while (size > 0xff) {
      *spayload++ = 0xff;
      size -= 0xff;
    }
    *spayload++ = size;

    /* copy payload */
    size = gst_buffer_get_size (buf);
    gst_buffer_extract (buf, 0, payload, size);
    payload += size;

    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpceltpay), outbuf, buf,
        g_quark_from_static_string (GST_META_TAG_AUDIO_STR));

    gst_buffer_unref (buf);
  }
  gst_rtp_buffer_unmap (&rtp);

  /* we consumed it all */
  rtpceltpay->bytes = 0;
  rtpceltpay->sbytes = 0;
  rtpceltpay->qduration = 0;

  ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpceltpay), outbuf);

  return ret;
}
static GstFlowReturn
gst_rtp_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstRTPMux *rtp_mux;
  GstFlowReturn ret;
  GstRTPMuxPadPrivate *padpriv;
  gboolean drop;
  gboolean changed = FALSE;
  GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;

  rtp_mux = GST_RTP_MUX (parent);

  if (gst_pad_check_reconfigure (rtp_mux->srcpad)) {
    GstCaps *current_caps = gst_pad_get_current_caps (pad);

    if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) {
      ret = GST_FLOW_NOT_NEGOTIATED;
      gst_buffer_unref (buffer);
      goto out;
    }
    gst_caps_unref (current_caps);
  }

  GST_OBJECT_LOCK (rtp_mux);
  padpriv = gst_pad_get_element_private (pad);

  if (!padpriv) {
    GST_OBJECT_UNLOCK (rtp_mux);
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_LINKED;
  }

  buffer = gst_buffer_make_writable (buffer);

  if (!gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtpbuffer)) {
    GST_OBJECT_UNLOCK (rtp_mux);
    gst_buffer_unref (buffer);
    GST_ERROR_OBJECT (rtp_mux, "Invalid RTP buffer");
    return GST_FLOW_ERROR;
  }

  drop = !process_buffer_locked (rtp_mux, padpriv, &rtpbuffer);

  gst_rtp_buffer_unmap (&rtpbuffer);

  if (!drop) {
    if (pad != rtp_mux->last_pad) {
      changed = TRUE;
      g_clear_object (&rtp_mux->last_pad);
      rtp_mux->last_pad = g_object_ref (pad);
    }

    if (GST_BUFFER_DURATION_IS_VALID (buffer) &&
        GST_BUFFER_PTS_IS_VALID (buffer))
      rtp_mux->last_stop = GST_BUFFER_PTS (buffer) +
          GST_BUFFER_DURATION (buffer);
    else
      rtp_mux->last_stop = GST_CLOCK_TIME_NONE;
  }

  GST_OBJECT_UNLOCK (rtp_mux);

  if (changed)
    gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux);

  if (drop) {
    gst_buffer_unref (buffer);
    ret = GST_FLOW_OK;
  } else {
    ret = gst_pad_push (rtp_mux->srcpad, buffer);
  }

out:
  return ret;
}
Example #9
0
bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
                                       int64_t aTimeThreshold)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedVideoEos && !mVideoSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mVideoSinkBufferCount) {
      if (!mAudioSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mVideoSinkBufferCount) {
          /* There is still no video data available, so either there is audio data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

    mDecoder->NotifyDecodedFrames(0, 1);

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
#endif
    mVideoSinkBufferCount--;
  }

  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if ((aKeyFrameSkip && !isKeyframe)) {
    gst_buffer_unref(buffer);
    return true;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mVideoSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
               "frame has invalid timestamp");

  timestamp = GST_TIME_AS_USECONDS(timestamp);
  int64_t duration = 0;
  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
  else if (fpsNum && fpsDen)
    /* add 1-frame duration */
    duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum);

  if (timestamp < aTimeThreshold) {
    LOG(PR_LOG_DEBUG, ("skipping frame %" GST_TIME_FORMAT
                       " threshold %" GST_TIME_FORMAT,
                       GST_TIME_ARGS(timestamp * 1000),
                       GST_TIME_ARGS(aTimeThreshold * 1000)));
    gst_buffer_unref(buffer);
    return true;
  }

  if (!buffer)
    /* no more frames */
    return true;

#if GST_VERSION_MAJOR >= 1
  if (mConfigureAlignment && buffer->pool) {
    GstStructure *config = gst_buffer_pool_get_config(buffer->pool);
    GstVideoAlignment align;
    if (gst_buffer_pool_config_get_video_alignment(config, &align))
      gst_video_info_align(&mVideoInfo, &align);
    gst_structure_free(config);
    mConfigureAlignment = false;
  }
#endif

  nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer);
  if (!image) {
    /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
     * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
     */
    GstBuffer* tmp = nullptr;
    CopyIntoImageBuffer(buffer, &tmp, image);
    gst_buffer_unref(buffer);
    buffer = tmp;
  }

  int64_t offset = mDecoder->GetResource()->Tell(); // Estimate location in media.
  VideoData* video = VideoData::CreateFromImage(mInfo.mVideo,
                                                mDecoder->GetImageContainer(),
                                                offset, timestamp, duration,
                                                static_cast<Image*>(image.get()),
                                                isKeyframe, -1, mPicture);
  mVideoQueue.Push(video);

  gst_buffer_unref(buffer);

  return true;
}
Example #10
0
/* we expect buffers starting on startcodes. 
 */
static GstFlowReturn
gst_rtp_mp4v_pay_handle_buffer (GstBaseRTPPayload * basepayload,
    GstBuffer * buffer)
{
  GstRtpMP4VPay *rtpmp4vpay;
  GstFlowReturn ret;
  guint size, avail;
  guint packet_len;
  guint8 *data;
  gboolean flush;
  gint strip;
  GstClockTime timestamp, duration;

  ret = GST_FLOW_OK;

  rtpmp4vpay = GST_RTP_MP4V_PAY (basepayload);

  size = GST_BUFFER_SIZE (buffer);
  data = GST_BUFFER_DATA (buffer);
  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  avail = gst_adapter_available (rtpmp4vpay->adapter);

  if (duration == -1)
    duration = 0;

  /* empty buffer, take timestamp */
  if (avail == 0) {
    rtpmp4vpay->first_timestamp = timestamp;
    rtpmp4vpay->duration = 0;
  }

  /* depay incomming data and see if we need to start a new RTP
   * packet */
  flush = gst_rtp_mp4v_pay_depay_data (rtpmp4vpay, data, size, &strip);
  if (strip) {
    /* strip off config if requested */
    if (!rtpmp4vpay->send_config) {
      GstBuffer *subbuf;

      /* strip off header */
      subbuf = gst_buffer_create_sub (buffer, strip, size - strip);
      GST_BUFFER_TIMESTAMP (subbuf) = timestamp;
      gst_buffer_unref (buffer);
      buffer = subbuf;

      size = GST_BUFFER_SIZE (buffer);
      data = GST_BUFFER_DATA (buffer);
    }
  }

  /* if we need to flush, do so now */
  if (flush) {
    ret = gst_rtp_mp4v_pay_flush (rtpmp4vpay);
    rtpmp4vpay->first_timestamp = timestamp;
    rtpmp4vpay->duration = 0;
    avail = 0;
  }

  /* get packet length of data and see if we exceeded MTU. */
  packet_len = gst_rtp_buffer_calc_packet_len (avail + size, 0, 0);

  if (gst_basertppayload_is_filled (basepayload,
          packet_len, rtpmp4vpay->duration + duration)) {
    ret = gst_rtp_mp4v_pay_flush (rtpmp4vpay);
    rtpmp4vpay->first_timestamp = timestamp;
    rtpmp4vpay->duration = 0;
  }

  /* push new data */
  gst_adapter_push (rtpmp4vpay->adapter, buffer);

  rtpmp4vpay->duration += duration;

  return ret;
}
static GstFlowReturn
gst_audio_segment_clip_clip_buffer (GstSegmentClip * base, GstBuffer * buffer,
    GstBuffer ** outbuf)
{
  GstAudioSegmentClip *self = GST_AUDIO_SEGMENT_CLIP (base);
  GstSegment *segment = &base->segment;
  GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
  GstClockTime duration = GST_BUFFER_DURATION (buffer);
  guint64 offset = GST_BUFFER_OFFSET (buffer);
  guint64 offset_end = GST_BUFFER_OFFSET_END (buffer);
  guint size = gst_buffer_get_size (buffer);

  if (!self->rate || !self->framesize) {
    GST_ERROR_OBJECT (self, "Not negotiated yet");
    gst_buffer_unref (buffer);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  if (segment->format != GST_FORMAT_DEFAULT &&
      segment->format != GST_FORMAT_TIME) {
    GST_DEBUG_OBJECT (self, "Unsupported segment format %s",
        gst_format_get_name (segment->format));
    *outbuf = buffer;
    return GST_FLOW_OK;
  }

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    GST_WARNING_OBJECT (self, "Buffer without valid timestamp");
    *outbuf = buffer;
    return GST_FLOW_OK;
  }

  *outbuf =
      gst_audio_buffer_clip (buffer, segment, self->rate, self->framesize);

  if (!*outbuf) {
    GST_DEBUG_OBJECT (self, "Buffer outside the configured segment");

    /* Now return unexpected if we're before/after the end */
    if (segment->format == GST_FORMAT_TIME) {
      if (segment->rate >= 0) {
        if (segment->stop != -1 && timestamp >= segment->stop)
          return GST_FLOW_EOS;
      } else {
        if (!GST_CLOCK_TIME_IS_VALID (duration))
          duration =
              gst_util_uint64_scale_int (size, GST_SECOND,
              self->framesize * self->rate);

        if (segment->start != -1 && timestamp + duration <= segment->start)
          return GST_FLOW_EOS;
      }
    } else {
      if (segment->rate >= 0) {
        if (segment->stop != -1 && offset != -1 && offset >= segment->stop)
          return GST_FLOW_EOS;
      } else if (offset != -1 || offset_end != -1) {
        if (offset_end == -1)
          offset_end = offset + size / self->framesize;

        if (segment->start != -1 && offset_end <= segment->start)
          return GST_FLOW_EOS;
      }
    }
  }

  return GST_FLOW_OK;
}
Example #12
0
static void
gst_musepackdec_loop (GstPad * sinkpad)
{
  GstMusepackDec *musepackdec;
  GstFlowReturn flow;
  GstBuffer *out;

#ifdef MPC_IS_OLD_API
  guint32 update_acc, update_bits;
#else
  mpc_frame_info frame;
  mpc_status err;
#endif
  gint num_samples, samplerate, bitspersample;

  musepackdec = GST_MUSEPACK_DEC (GST_PAD_PARENT (sinkpad));

  samplerate = g_atomic_int_get (&musepackdec->rate);

  if (samplerate == 0) {
    if (!gst_musepack_stream_init (musepackdec))
      goto pause_task;

    gst_musepackdec_send_newsegment (musepackdec);
    samplerate = g_atomic_int_get (&musepackdec->rate);
  }

  bitspersample = g_atomic_int_get (&musepackdec->bps);

  flow = gst_pad_alloc_buffer_and_set_caps (musepackdec->srcpad, -1,
      MPC_DECODER_BUFFER_LENGTH * 4, GST_PAD_CAPS (musepackdec->srcpad), &out);

  if (flow != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));
    goto pause_task;
  }
#ifdef MPC_IS_OLD_API
  num_samples = mpc_decoder_decode (musepackdec->d,
      (MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out), &update_acc, &update_bits);

  if (num_samples < 0) {
    GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");
    GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));
    goto pause_task;
  } else if (num_samples == 0) {
    goto eos_and_pause;
  }
#else
  frame.buffer = (MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out);
  err = mpc_demux_decode (musepackdec->d, &frame);

  if (err != MPC_STATUS_OK) {
    GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");
    GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));
    goto pause_task;
  } else if (frame.bits == -1) {
    goto eos_and_pause;
  }

  num_samples = frame.samples;
#endif

  GST_BUFFER_SIZE (out) = num_samples * bitspersample;

  GST_BUFFER_OFFSET (out) = musepackdec->segment.last_stop;
  GST_BUFFER_TIMESTAMP (out) =
      gst_util_uint64_scale_int (musepackdec->segment.last_stop,
      GST_SECOND, samplerate);
  GST_BUFFER_DURATION (out) =
      gst_util_uint64_scale_int (num_samples, GST_SECOND, samplerate);

  musepackdec->segment.last_stop += num_samples;

  GST_LOG_OBJECT (musepackdec, "Pushing buffer, timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out)));

  flow = gst_pad_push (musepackdec->srcpad, out);
  if (flow != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));
    goto pause_task;
  }

  /* check if we're at the end of a configured segment */
  if (musepackdec->segment.stop != -1 &&
      musepackdec->segment.last_stop >= musepackdec->segment.stop) {
    gint64 stop_time;

    GST_DEBUG_OBJECT (musepackdec, "Reached end of configured segment");

    if ((musepackdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0)
      goto eos_and_pause;

    GST_DEBUG_OBJECT (musepackdec, "Posting SEGMENT_DONE message");

    stop_time = gst_util_uint64_scale_int (musepackdec->segment.stop,
        GST_SECOND, samplerate);

    gst_element_post_message (GST_ELEMENT (musepackdec),
        gst_message_new_segment_done (GST_OBJECT (musepackdec),
            GST_FORMAT_TIME, stop_time));

    goto pause_task;
  }

  return;

eos_and_pause:
  {
    GST_DEBUG_OBJECT (musepackdec, "sending EOS event");
    gst_pad_push_event (musepackdec->srcpad, gst_event_new_eos ());
    /* fall through to pause */
  }

pause_task:
  {
    GST_DEBUG_OBJECT (musepackdec, "Pausing task");
    gst_pad_pause_task (sinkpad);
    return;
  }
}
Example #13
0
static GstFlowReturn
gst_ac3_parse_handle_frame (GstBaseParse * parse,
    GstBaseParseFrame * frame, gint * skipsize)
{
  GstAc3Parse *ac3parse = GST_AC3_PARSE (parse);
  GstBuffer *buf = frame->buffer;
  GstByteReader reader;
  gint off;
  gboolean lost_sync, draining, eac, more = FALSE;
  guint frmsiz, blocks, sid;
  guint rate, chans;
  gboolean update_rate = FALSE;
  gint framesize = 0;
  gint have_blocks = 0;
  GstMapInfo map;
  gboolean ret = FALSE;
  GstFlowReturn res = GST_FLOW_OK;

  gst_buffer_map (buf, &map, GST_MAP_READ);

  if (G_UNLIKELY (map.size < 8)) {
    *skipsize = 1;
    goto cleanup;
  }

  gst_byte_reader_init (&reader, map.data, map.size);
  off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffff0000, 0x0b770000,
      0, map.size);

  GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);

  /* didn't find anything that looks like a sync word, skip */
  if (off < 0) {
    *skipsize = map.size - 3;
    goto cleanup;
  }

  /* possible frame header, but not at offset 0? skip bytes before sync */
  if (off > 0) {
    *skipsize = off;
    goto cleanup;
  }

  /* make sure the values in the frame header look sane */
  if (!gst_ac3_parse_frame_header (ac3parse, buf, 0, &frmsiz, &rate, &chans,
          &blocks, &sid, &eac)) {
    *skipsize = off + 2;
    goto cleanup;
  }

  GST_LOG_OBJECT (parse, "size: %u, blocks: %u, rate: %u, chans: %u", frmsiz,
      blocks, rate, chans);

  framesize = frmsiz;

  if (G_UNLIKELY (g_atomic_int_get (&ac3parse->align) ==
          GST_AC3_PARSE_ALIGN_NONE))
    gst_ac3_parse_set_alignment (ac3parse, eac);

  GST_LOG_OBJECT (parse, "got frame");

  lost_sync = GST_BASE_PARSE_LOST_SYNC (parse);
  draining = GST_BASE_PARSE_DRAINING (parse);

  if (g_atomic_int_get (&ac3parse->align) == GST_AC3_PARSE_ALIGN_IEC61937) {
    /* We need 6 audio blocks from each substream, so we keep going forwards
     * till we have it */

    g_assert (blocks > 0);
    GST_LOG_OBJECT (ac3parse, "Need %d frames before pushing", 6 / blocks);

    if (sid != 0) {
      /* We need the first substream to be the one with id 0 */
      GST_LOG_OBJECT (ac3parse, "Skipping till we find sid 0");
      *skipsize = off + 2;
      goto cleanup;
    }

    framesize = 0;

    /* Loop till we have 6 blocks per substream */
    for (have_blocks = 0; !more && have_blocks < 6; have_blocks += blocks) {
      /* Loop till we get one frame from each substream */
      do {
        framesize += frmsiz;

        if (!gst_byte_reader_skip (&reader, frmsiz)
            || map.size < (framesize + 6)) {
          more = TRUE;
          break;
        }

        if (!gst_ac3_parse_frame_header (ac3parse, buf, framesize, &frmsiz,
                NULL, NULL, NULL, &sid, &eac)) {
          *skipsize = off + 2;
          goto cleanup;
        }
      } while (sid);
    }

    /* We're now at the next frame, so no need to skip if resyncing */
    frmsiz = 0;
  }

  if (lost_sync && !draining) {
    guint16 word = 0;

    GST_DEBUG_OBJECT (ac3parse, "resyncing; checking next frame syncword");

    if (more || !gst_byte_reader_skip (&reader, frmsiz) ||
        !gst_byte_reader_get_uint16_be (&reader, &word)) {
      GST_DEBUG_OBJECT (ac3parse, "... but not sufficient data");
      gst_base_parse_set_min_frame_size (parse, framesize + 8);
      *skipsize = 0;
      goto cleanup;
    } else {
      if (word != 0x0b77) {
        GST_DEBUG_OBJECT (ac3parse, "0x%x not OK", word);
        *skipsize = off + 2;
        goto cleanup;
      } else {
        /* ok, got sync now, let's assume constant frame size */
        gst_base_parse_set_min_frame_size (parse, framesize);
      }
    }
  }

  /* expect to have found a frame here */
  g_assert (framesize);
  ret = TRUE;

  /* arrange for metadata setup */
  if (G_UNLIKELY (sid)) {
    /* dependent frame, no need to (ac)count for or consider further */
    GST_LOG_OBJECT (parse, "sid: %d", sid);
    frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME;
    /* TODO maybe also mark as DELTA_UNIT,
     * if that does not surprise baseparse elsewhere */
    /* occupies same time space as previous base frame */
    if (G_LIKELY (GST_BUFFER_TIMESTAMP (buf) >= GST_BUFFER_DURATION (buf)))
      GST_BUFFER_TIMESTAMP (buf) -= GST_BUFFER_DURATION (buf);
    /* only shortcut if we already arranged for caps */
    if (G_LIKELY (ac3parse->sample_rate > 0))
      goto cleanup;
  }

  if (G_UNLIKELY (ac3parse->sample_rate != rate || ac3parse->channels != chans
          || ac3parse->eac != eac)) {
    GstCaps *caps = gst_caps_new_simple (eac ? "audio/x-eac3" : "audio/x-ac3",
        "framed", G_TYPE_BOOLEAN, TRUE, "rate", G_TYPE_INT, rate,
        "channels", G_TYPE_INT, chans, NULL);
    gst_caps_set_simple (caps, "alignment", G_TYPE_STRING,
        g_atomic_int_get (&ac3parse->align) == GST_AC3_PARSE_ALIGN_IEC61937 ?
        "iec61937" : "frame", NULL);
    gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
    gst_caps_unref (caps);

    ac3parse->sample_rate = rate;
    ac3parse->channels = chans;
    ac3parse->eac = eac;

    update_rate = TRUE;
  }

  if (G_UNLIKELY (ac3parse->blocks != blocks)) {
    ac3parse->blocks = blocks;

    update_rate = TRUE;
  }

  if (G_UNLIKELY (update_rate))
    gst_base_parse_set_frame_rate (parse, rate, 256 * blocks, 2, 2);

cleanup:
  gst_buffer_unmap (buf, &map);

  if (ret && framesize <= map.size) {
    res = gst_base_parse_finish_frame (parse, frame, framesize);
  }

  return res;
}
GstPadProbeReturn GstEnginePipeline::HandoffCallback(GstPad*,
                                                     GstPadProbeInfo* info,
                                                     gpointer self) {
  GstEnginePipeline* instance = reinterpret_cast<GstEnginePipeline*>(self);
  GstBuffer* buf = gst_pad_probe_info_get_buffer(info);

  QList<BufferConsumer*> consumers;
  {
    QMutexLocker l(&instance->buffer_consumers_mutex_);
    consumers = instance->buffer_consumers_;
  }

  for (BufferConsumer* consumer : consumers) {
    gst_buffer_ref(buf);
    consumer->ConsumeBuffer(buf, instance->id());
  }

  // Calculate the end time of this buffer so we can stop playback if it's
  // after the end time of this song.
  if (instance->end_offset_nanosec_ > 0) {
    quint64 start_time = GST_BUFFER_TIMESTAMP(buf) - instance->segment_start_;
    quint64 duration = GST_BUFFER_DURATION(buf);
    quint64 end_time = start_time + duration;

    if (end_time > instance->end_offset_nanosec_) {
      if (instance->has_next_valid_url()) {
        if (instance->next_url_ == instance->url_ &&
            instance->next_beginning_offset_nanosec_ ==
                instance->end_offset_nanosec_) {
          // The "next" song is actually the next segment of this file - so
          // cheat and keep on playing, but just tell the Engine we've moved on.
          instance->end_offset_nanosec_ = instance->next_end_offset_nanosec_;
          instance->next_url_ = QUrl();
          instance->next_beginning_offset_nanosec_ = 0;
          instance->next_end_offset_nanosec_ = 0;

          // GstEngine will try to seek to the start of the new section, but
          // we're already there so ignore it.
          instance->ignore_next_seek_ = true;
          emit instance->EndOfStreamReached(instance->id(), true);
        } else {
          // We have a next song but we can't cheat, so move to it normally.
          instance->TransitionToNext();
        }
      } else {
        // There's no next song
        emit instance->EndOfStreamReached(instance->id(), false);
      }
    }
  }

  if (instance->emit_track_ended_on_time_discontinuity_) {
    if (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT) ||
        GST_BUFFER_OFFSET(buf) < instance->last_buffer_offset_) {
      qLog(Debug) << "Buffer discontinuity - emitting EOS";
      instance->emit_track_ended_on_time_discontinuity_ = false;
      emit instance->EndOfStreamReached(instance->id(), true);
    }
  }

  instance->last_buffer_offset_ = GST_BUFFER_OFFSET(buf);

  return GST_PAD_PROBE_OK;
}
Example #15
0
static GstFlowReturn
gst_rtp_amr_pay_handle_buffer (GstBaseRTPPayload * basepayload,
    GstBuffer * buffer)
{
  GstRtpAMRPay *rtpamrpay;
  GstFlowReturn ret;
  guint size, payload_len;
  GstBuffer *outbuf;
  guint8 *payload, *data, *payload_amr;
  GstClockTime timestamp, duration;
  guint packet_len, mtu;
  gint i, num_packets, num_nonempty_packets;
  gint amr_len;
  gint *frame_size;

  rtpamrpay = GST_RTP_AMR_PAY (basepayload);
  mtu = GST_BASE_RTP_PAYLOAD_MTU (rtpamrpay);

  size = GST_BUFFER_SIZE (buffer);
  data = GST_BUFFER_DATA (buffer);
  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);

  /* setup frame size pointer */
  if (rtpamrpay->mode == GST_RTP_AMR_P_MODE_NB)
    frame_size = nb_frame_size;
  else
    frame_size = wb_frame_size;

  GST_DEBUG_OBJECT (basepayload, "got %d bytes", size);

  /* FIXME, only 
   * octet aligned, no interleaving, single channel, no CRC,
   * no robust-sorting. To fix this you need to implement the downstream
   * negotiation function. */

  /* first count number of packets and total amr frame size */
  amr_len = num_packets = num_nonempty_packets = 0;
  for (i = 0; i < size; i++) {
    guint8 FT;
    gint fr_size;

    FT = (data[i] & 0x78) >> 3;

    fr_size = frame_size[FT];
    GST_DEBUG_OBJECT (basepayload, "frame size %d", fr_size);
    /* FIXME, we don't handle this yet.. */
    if (fr_size <= 0)
      goto wrong_size;

    amr_len += fr_size;
    num_nonempty_packets++;
    num_packets++;
    i += fr_size;
  }
  if (amr_len > size)
    goto incomplete_frame;

  /* we need one extra byte for the CMR, the ToC is in the input
   * data */
  payload_len = size + 1;

  /* get packet len to check against MTU */
  packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);
  if (packet_len > mtu)
    goto too_big;

  /* now alloc output buffer */
  outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

  /* copy timestamp */
  GST_BUFFER_TIMESTAMP (outbuf) = timestamp;

  /* FIXME: when we do more than one AMR frame per packet, fix this */
  if (duration != GST_CLOCK_TIME_NONE)
    GST_BUFFER_DURATION (outbuf) = duration;
  else {
    GST_BUFFER_DURATION (outbuf) = 20 * GST_MSECOND;
  }

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    gst_rtp_buffer_set_marker (outbuf, TRUE);
  }

  /* get payload, this is now writable */
  payload = gst_rtp_buffer_get_payload (outbuf);

  /*   0 1 2 3 4 5 6 7 
   *  +-+-+-+-+-+-+-+-+
   *  |  CMR  |R|R|R|R|
   *  +-+-+-+-+-+-+-+-+
   */
  payload[0] = 0xF0;            /* CMR, no specific mode requested */

  /* this is where we copy the AMR data, after num_packets FTs and the
   * CMR. */
  payload_amr = payload + num_packets + 1;

  /* copy data in payload, first we copy all the FTs then all
   * the AMR data. The last FT has to have the F flag cleared. */
  for (i = 1; i <= num_packets; i++) {
    guint8 FT;
    gint fr_size;

    /*   0 1 2 3 4 5 6 7
     *  +-+-+-+-+-+-+-+-+
     *  |F|  FT   |Q|P|P| more FT...
     *  +-+-+-+-+-+-+-+-+
     */
    FT = (*data & 0x78) >> 3;

    fr_size = frame_size[FT];

    if (i == num_packets)
      /* last packet, clear F flag */
      payload[i] = *data & 0x7f;
    else
      /* set F flag */
      payload[i] = *data | 0x80;

    memcpy (payload_amr, &data[1], fr_size);

    /* all sizes are > 0 since we checked for that above */
    data += fr_size + 1;
    payload_amr += fr_size;
  }

  gst_buffer_unref (buffer);

  ret = gst_basertppayload_push (basepayload, outbuf);

  return ret;

  /* ERRORS */
wrong_size:
  {
    GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
        (NULL), ("received AMR frame with size <= 0"));
    gst_buffer_unref (buffer);

    return GST_FLOW_ERROR;
  }
incomplete_frame:
  {
    GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
        (NULL), ("received incomplete AMR frames"));
    gst_buffer_unref (buffer);

    return GST_FLOW_ERROR;
  }
too_big:
  {
    GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
        (NULL), ("received too many AMR frames for MTU"));
    gst_buffer_unref (buffer);

    return GST_FLOW_ERROR;
  }
}
Example #16
0
static GstFlowReturn
gst_amc_audio_dec_handle_frame (GstAudioDecoder * decoder, GstBuffer * inbuf)
{
    GstAmcAudioDec *self;
    gint idx;
    GstAmcBuffer *buf;
    GstAmcBufferInfo buffer_info;
    guint offset = 0;
    GstClockTime timestamp, duration, timestamp_offset = 0;
    GstMapInfo minfo;

    memset (&minfo, 0, sizeof (minfo));

    self = GST_AMC_AUDIO_DEC (decoder);

    GST_DEBUG_OBJECT (self, "Handling frame");

    /* Make sure to keep a reference to the input here,
     * it can be unreffed from the other thread if
     * finish_frame() is called */
    if (inbuf)
        inbuf = gst_buffer_ref (inbuf);

    if (!self->started) {
        GST_ERROR_OBJECT (self, "Codec not started yet");
        if (inbuf)
            gst_buffer_unref (inbuf);
        return GST_FLOW_NOT_NEGOTIATED;
    }

    if (self->eos) {
        GST_WARNING_OBJECT (self, "Got frame after EOS");
        if (inbuf)
            gst_buffer_unref (inbuf);
        return GST_FLOW_EOS;
    }

    if (self->flushing)
        goto flushing;

    if (self->downstream_flow_ret != GST_FLOW_OK)
        goto downstream_error;

    if (!inbuf)
        return gst_amc_audio_dec_drain (self);

    timestamp = GST_BUFFER_PTS (inbuf);
    duration = GST_BUFFER_DURATION (inbuf);

    gst_buffer_map (inbuf, &minfo, GST_MAP_READ);

    while (offset < minfo.size) {
        /* Make sure to release the base class stream lock, otherwise
         * _loop() can't call _finish_frame() and we might block forever
         * because no input buffers are released */
        GST_AUDIO_DECODER_STREAM_UNLOCK (self);
        /* Wait at most 100ms here, some codecs don't fail dequeueing if
         * the codec is flushing, causing deadlocks during shutdown */
        idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000);
        GST_AUDIO_DECODER_STREAM_LOCK (self);

        if (idx < 0) {
            if (self->flushing)
                goto flushing;
            switch (idx) {
            case INFO_TRY_AGAIN_LATER:
                GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out");
                continue;             /* next try */
                break;
            case G_MININT:
                GST_ERROR_OBJECT (self, "Failed to dequeue input buffer");
                goto dequeue_error;
            default:
                g_assert_not_reached ();
                break;
            }

            continue;
        }

        if (idx >= self->n_input_buffers)
            goto invalid_buffer_index;

        if (self->flushing)
            goto flushing;

        if (self->downstream_flow_ret != GST_FLOW_OK) {
            memset (&buffer_info, 0, sizeof (buffer_info));
            gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info);
            goto downstream_error;
        }

        /* Now handle the frame */

        /* Copy the buffer content in chunks of size as requested
         * by the port */
        buf = &self->input_buffers[idx];

        memset (&buffer_info, 0, sizeof (buffer_info));
        buffer_info.offset = 0;
        buffer_info.size = MIN (minfo.size - offset, buf->size);

        orc_memcpy (buf->data, minfo.data + offset, buffer_info.size);

        /* Interpolate timestamps if we're passing the buffer
         * in multiple chunks */
        if (offset != 0 && duration != GST_CLOCK_TIME_NONE) {
            timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size);
        }

        if (timestamp != GST_CLOCK_TIME_NONE) {
            buffer_info.presentation_time_us =
                gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND);
            self->last_upstream_ts = timestamp + timestamp_offset;
        }
        if (duration != GST_CLOCK_TIME_NONE)
            self->last_upstream_ts += duration;

        if (offset == 0) {
            if (!GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DELTA_UNIT))
                buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME;
        }

        offset += buffer_info.size;
        GST_DEBUG_OBJECT (self,
                          "Queueing buffer %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x",
                          idx, buffer_info.size, buffer_info.presentation_time_us,
                          buffer_info.flags);
        if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info))
            goto queue_error;
    }
    gst_buffer_unmap (inbuf, &minfo);
    gst_buffer_unref (inbuf);

    return self->downstream_flow_ret;

downstream_error:
    {
        GST_ERROR_OBJECT (self, "Downstream returned %s",
                          gst_flow_get_name (self->downstream_flow_ret));
        if (minfo.data)
            gst_buffer_unmap (inbuf, &minfo);
        if (inbuf)
            gst_buffer_unref (inbuf);
        return self->downstream_flow_ret;
    }
invalid_buffer_index:
    {
        GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
                           ("Invalid input buffer index %d of %d", idx, self->n_input_buffers));
        if (minfo.data)
            gst_buffer_unmap (inbuf, &minfo);
        if (inbuf)
            gst_buffer_unref (inbuf);
        return GST_FLOW_ERROR;
    }
dequeue_error:
    {
        GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
                           ("Failed to dequeue input buffer"));
        if (minfo.data)
            gst_buffer_unmap (inbuf, &minfo);
        if (inbuf)
            gst_buffer_unref (inbuf);
        return GST_FLOW_ERROR;
    }
queue_error:
    {
        GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
                           ("Failed to queue input buffer"));
        if (minfo.data)
            gst_buffer_unmap (inbuf, &minfo);
        if (inbuf)
            gst_buffer_unref (inbuf);
        return GST_FLOW_ERROR;
    }
flushing:
    {
        GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING");
        if (minfo.data)
            gst_buffer_unmap (inbuf, &minfo);
        if (inbuf)
            gst_buffer_unref (inbuf);
        return GST_FLOW_FLUSHING;
    }
}
static int
gst_wavpack_enc_push_block (void *id, void *data, int32_t count)
{
  GstWavpackEncWriteID *wid = (GstWavpackEncWriteID *) id;
  GstWavpackEnc *enc = GST_WAVPACK_ENC (wid->wavpack_enc);
  GstFlowReturn *flow;
  GstBuffer *buffer;
  GstPad *pad;
  guchar *block = (guchar *) data;

  pad = (wid->correction) ? enc->wvcsrcpad : enc->srcpad;
  flow =
      (wid->correction) ? &enc->wvcsrcpad_last_return : &enc->
      srcpad_last_return;

  *flow = gst_pad_alloc_buffer_and_set_caps (pad, GST_BUFFER_OFFSET_NONE,
      count, GST_PAD_CAPS (pad), &buffer);

  if (*flow != GST_FLOW_OK) {
    GST_WARNING_OBJECT (enc, "flow on %s:%s = %s",
        GST_DEBUG_PAD_NAME (pad), gst_flow_get_name (*flow));
    return FALSE;
  }

  g_memmove (GST_BUFFER_DATA (buffer), block, count);

  if (count > sizeof (WavpackHeader) && memcmp (block, "wvpk", 4) == 0) {
    /* if it's a Wavpack block set buffer timestamp and duration, etc */
    WavpackHeader wph;

    GST_LOG_OBJECT (enc, "got %d bytes of encoded wavpack %sdata",
        count, (wid->correction) ? "correction " : "");

    gst_wavpack_read_header (&wph, block);

    /* Only set when pushing the first buffer again, in that case
     * we don't want to delay the buffer or push newsegment events
     */
    if (!wid->passthrough) {
      /* Only push complete blocks */
      if (enc->pending_buffer == NULL) {
        enc->pending_buffer = buffer;
        enc->pending_offset = wph.block_index;
      } else if (enc->pending_offset == wph.block_index) {
        enc->pending_buffer = gst_buffer_join (enc->pending_buffer, buffer);
      } else {
        GST_ERROR ("Got incomplete block, dropping");
        gst_buffer_unref (enc->pending_buffer);
        enc->pending_buffer = buffer;
        enc->pending_offset = wph.block_index;
      }

      if (!(wph.flags & FINAL_BLOCK))
        return TRUE;

      buffer = enc->pending_buffer;
      enc->pending_buffer = NULL;
      enc->pending_offset = 0;

      /* if it's the first wavpack block, send a NEW_SEGMENT event */
      if (wph.block_index == 0) {
        gst_pad_push_event (pad,
            gst_event_new_new_segment (FALSE,
                1.0, GST_FORMAT_TIME, 0, GST_BUFFER_OFFSET_NONE, 0));

        /* save header for later reference, so we can re-send it later on
         * EOS with fixed up values for total sample count etc. */
        if (enc->first_block == NULL && !wid->correction) {
          enc->first_block =
              g_memdup (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
          enc->first_block_size = GST_BUFFER_SIZE (buffer);
        }
      }
    }

    /* set buffer timestamp, duration, offset, offset_end from
     * the wavpack header */
    GST_BUFFER_TIMESTAMP (buffer) = enc->timestamp_offset +
        gst_util_uint64_scale_int (GST_SECOND, wph.block_index,
        enc->samplerate);
    GST_BUFFER_DURATION (buffer) =
        gst_util_uint64_scale_int (GST_SECOND, wph.block_samples,
        enc->samplerate);
    GST_BUFFER_OFFSET (buffer) = wph.block_index;
    GST_BUFFER_OFFSET_END (buffer) = wph.block_index + wph.block_samples;
  } else {
    /* if it's something else set no timestamp and duration on the buffer */
    GST_DEBUG_OBJECT (enc, "got %d bytes of unknown data", count);

    GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;
  }

  /* push the buffer and forward errors */
  GST_DEBUG_OBJECT (enc, "pushing buffer with %d bytes",
      GST_BUFFER_SIZE (buffer));
  *flow = gst_pad_push (pad, buffer);

  if (*flow != GST_FLOW_OK) {
    GST_WARNING_OBJECT (enc, "flow on %s:%s = %s",
        GST_DEBUG_PAD_NAME (pad), gst_flow_get_name (*flow));
    return FALSE;
  }

  return TRUE;
}
Example #18
0
static GstFlowReturn
pad_chain (GstPad *pad,
           GstBuffer *buf)
{
    GOmxCore *gomx;
    GOmxPort *in_port;
    GstOmxBaseFilter *self;
    GstFlowReturn ret = GST_FLOW_OK;

    self = GST_OMX_BASE_FILTER (GST_OBJECT_PARENT (pad));

    gomx = self->gomx;

    GST_LOG_OBJECT (self, "begin");
    GST_LOG_OBJECT (self, "gst_buffer: size=%u", GST_BUFFER_SIZE (buf));

    GST_LOG_OBJECT (self, "state: %d", gomx->omx_state);

    if (G_UNLIKELY (gomx->omx_state == OMX_StateLoaded))
    {
        g_mutex_lock (self->ready_lock);

        GST_INFO_OBJECT (self, "omx: prepare");

        /** @todo this should probably go after doing preparations. */
        if (self->omx_setup)
        {
            self->omx_setup (self);
        }

        setup_ports (self);

        g_omx_core_prepare (self->gomx);

        if (gomx->omx_state == OMX_StateIdle)
        {
            self->ready = TRUE;
            gst_pad_start_task (self->srcpad, output_loop, self->srcpad);
        }

        g_mutex_unlock (self->ready_lock);

        if (gomx->omx_state != OMX_StateIdle)
            goto out_flushing;
    }

    in_port = self->in_port;

    if (G_LIKELY (in_port->enabled))
    {
        guint buffer_offset = 0;

        if (G_UNLIKELY (gomx->omx_state == OMX_StateIdle))
        {
            GST_INFO_OBJECT (self, "omx: play");
            g_omx_core_start (gomx);

            if (gomx->omx_state != OMX_StateExecuting)
                goto out_flushing;

            /* send buffer with codec data flag */
            /** @todo move to util */
            if (self->codec_data)
            {
                OMX_BUFFERHEADERTYPE *omx_buffer;

                GST_LOG_OBJECT (self, "request buffer");
                omx_buffer = g_omx_port_request_buffer (in_port);

                if (G_LIKELY (omx_buffer))
                {
                    omx_buffer->nFlags |= 0x00000080; /* codec data flag */

                    omx_buffer->nFilledLen = GST_BUFFER_SIZE (self->codec_data);
                    memcpy (omx_buffer->pBuffer + omx_buffer->nOffset, GST_BUFFER_DATA (self->codec_data), omx_buffer->nFilledLen);

                    GST_LOG_OBJECT (self, "release_buffer");
                    g_omx_port_release_buffer (in_port, omx_buffer);
                }
            }
        }

        if (G_UNLIKELY (gomx->omx_state != OMX_StateExecuting))
        {
            GST_ERROR_OBJECT (self, "Whoa! very wrong");
        }

        while (G_LIKELY (buffer_offset < GST_BUFFER_SIZE (buf)))
        {
            OMX_BUFFERHEADERTYPE *omx_buffer;

            if (self->last_pad_push_return != GST_FLOW_OK ||
                !(gomx->omx_state == OMX_StateExecuting ||
                  gomx->omx_state == OMX_StatePause))
            {
                goto out_flushing;
            }

            GST_LOG_OBJECT (self, "request buffer");
            omx_buffer = g_omx_port_request_buffer (in_port);

            GST_LOG_OBJECT (self, "omx_buffer: %p", omx_buffer);

            if (G_LIKELY (omx_buffer))
            {
                GST_DEBUG_OBJECT (self, "omx_buffer: size=%lu, len=%lu, flags=%lu, offset=%lu, timestamp=%lld",
                                  omx_buffer->nAllocLen, omx_buffer->nFilledLen, omx_buffer->nFlags,
                                  omx_buffer->nOffset, omx_buffer->nTimeStamp);

                if (omx_buffer->nOffset == 0 &&
                    self->share_input_buffer)
                {
                    {
                        GstBuffer *old_buf;
                        old_buf = omx_buffer->pAppPrivate;

                        if (old_buf)
                        {
                            gst_buffer_unref (old_buf);
                        }
                        else if (omx_buffer->pBuffer)
                        {
                            g_free (omx_buffer->pBuffer);
                        }
                    }

                    omx_buffer->pBuffer = GST_BUFFER_DATA (buf);
                    omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf);
                    omx_buffer->nFilledLen = GST_BUFFER_SIZE (buf);
                    omx_buffer->pAppPrivate = buf;
                }
                else
                {
                    omx_buffer->nFilledLen = MIN (GST_BUFFER_SIZE (buf) - buffer_offset,
                                                  omx_buffer->nAllocLen - omx_buffer->nOffset);
                    memcpy (omx_buffer->pBuffer + omx_buffer->nOffset, GST_BUFFER_DATA (buf) + buffer_offset, omx_buffer->nFilledLen);
                }

                if (self->use_timestamps)
                {
                    GstClockTime timestamp_offset = 0;

                    if (buffer_offset && GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE)
                    {
                        timestamp_offset = gst_util_uint64_scale_int (buffer_offset,
                                                                      GST_BUFFER_DURATION (buf),
                                                                      GST_BUFFER_SIZE (buf));
                    }

                    omx_buffer->nTimeStamp = gst_util_uint64_scale_int (GST_BUFFER_TIMESTAMP (buf) + timestamp_offset,
                                                                        OMX_TICKS_PER_SECOND,
                                                                        GST_SECOND);
                }

                buffer_offset += omx_buffer->nFilledLen;

                GST_LOG_OBJECT (self, "release_buffer");
                /** @todo untaint buffer */
                g_omx_port_release_buffer (in_port, omx_buffer);
            }
            else
            {
                GST_WARNING_OBJECT (self, "null buffer");
                ret = GST_FLOW_WRONG_STATE;
                goto out_flushing;
            }
        }
    }
    else
    {
        GST_WARNING_OBJECT (self, "done");
        ret = GST_FLOW_UNEXPECTED;
    }

    if (!self->share_input_buffer)
    {
        gst_buffer_unref (buf);
    }

leave:

    GST_LOG_OBJECT (self, "end");

    return ret;

    /* special conditions */
out_flushing:
    {
        const gchar *error_msg = NULL;

        if (gomx->omx_error)
        {
            error_msg = "Error from OpenMAX component";
        }
        else if (gomx->omx_state != OMX_StateExecuting &&
                 gomx->omx_state != OMX_StatePause)
        {
            error_msg = "OpenMAX component in wrong state";
        }

        if (error_msg)
        {
            GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), (error_msg));
            ret = GST_FLOW_ERROR;
        }

        gst_buffer_unref (buf);

        goto leave;
    }
}
/* Pipeline Callbacks */
static gboolean
probe_cb (InsanityGstTest * ptest, GstPad * pad, GstMiniObject * object,
    gpointer userdata)
{
  InsanityTest *test = INSANITY_TEST (ptest);

  global_last_probe = g_get_monotonic_time ();

  DECODER_TEST_LOCK ();
  if (GST_IS_BUFFER (object)) {
    GstBuffer *buf;
    GstClockTime ts;

    buf = GST_BUFFER (object);
    ts = GST_BUFFER_PTS (buf);

    /* First check clipping */
    if (glob_testing_parser == FALSE && GST_CLOCK_TIME_IS_VALID (ts) &&
        glob_waiting_segment == FALSE) {
      GstClockTime ts_end, cstart, cstop;

      /* Check if buffer is completely outside the segment */
      ts_end = ts;
      if (GST_BUFFER_DURATION_IS_VALID (buf))
        ts_end += GST_BUFFER_DURATION (buf);

      /* Check if buffer is completely outside the segment */
      ts_end = ts;
      if (!gst_segment_clip (&glob_last_segment,
              glob_last_segment.format, ts, ts_end, &cstart, &cstop)) {
        char *msg = g_strdup_printf ("Got timestamp %" GST_TIME_FORMAT " -- %"
            GST_TIME_FORMAT ", outside configured segment (%" GST_TIME_FORMAT
            " -- %" GST_TIME_FORMAT "), method %s",
            GST_TIME_ARGS (ts), GST_TIME_ARGS (ts_end),
            GST_TIME_ARGS (glob_last_segment.start),
            GST_TIME_ARGS (glob_last_segment.stop),
            test_get_name (glob_in_progress));
        insanity_test_validate_checklist_item (INSANITY_TEST (ptest),
            "segment-clipping", FALSE, msg);
        g_free (msg);
        glob_bad_segment_clipping = TRUE;
      }
    }

    switch (glob_in_progress) {
      case TEST_NONE:
        if (glob_waiting_first_segment == TRUE)
          insanity_test_validate_checklist_item (test, "first-segment",
              FALSE, "Got a buffer before the first segment");

        /* Got the first buffer, starting testing dance */
        next_test (test);
        break;
      case TEST_POSITION:
        test_position (test, buf);
        break;
      case TEST_FAST_FORWARD:
      case TEST_BACKWARD_PLAYBACK:
      case TEST_FAST_BACKWARD:
      {
        gint64 stime_ts;

        if (GST_CLOCK_TIME_IS_VALID (ts) == FALSE ||
            glob_waiting_segment == TRUE) {
          break;
        }

        stime_ts = gst_segment_to_stream_time (&glob_last_segment,
            glob_last_segment.format, ts);

        if (GST_CLOCK_TIME_IS_VALID (glob_seek_first_buf_ts) == FALSE) {
          GstClockTime expected_ts =
              gst_segment_to_stream_time (&glob_last_segment,
              glob_last_segment.format,
              glob_seek_rate <
              0 ? glob_seek_stop_ts : glob_seek_segment_seektime);

          GstClockTimeDiff diff = ABS (GST_CLOCK_DIFF (stime_ts, expected_ts));

          if (diff > SEEK_THRESHOLD) {
            gchar *valmsg =
                g_strdup_printf ("Received buffer timestamp %" GST_TIME_FORMAT
                " Seeek wanted %" GST_TIME_FORMAT "",
                GST_TIME_ARGS (stime_ts),
                GST_TIME_ARGS (expected_ts));

            validate_current_test (test, FALSE, valmsg);
            next_test (test);

            g_free (valmsg);
          } else
            glob_seek_first_buf_ts = stime_ts;

        } else {
          GstClockTimeDiff diff =
              GST_CLOCK_DIFF (stime_ts, glob_seek_first_buf_ts);

          if (diff < 0)
            diff = -diff;

          if (diff >= glob_playback_duration * GST_SECOND) {
            validate_current_test (test, TRUE, NULL);
            next_test (test);
          }
        }
        break;
      }
      default:
        break;
    }

  } else if (GST_IS_EVENT (object)) {
    GstEvent *event = GST_EVENT (object);
    guint seqnum = gst_event_get_seqnum (event);

    if (G_LIKELY (glob_seqnum_found == FALSE) && seqnum == glob_seqnum)
      glob_seqnum_found = TRUE;

    if (glob_seqnum_found == TRUE && seqnum != glob_seqnum) {
      gchar *message = g_strdup_printf ("Current seqnum %i != "
          "received %i", glob_seqnum, seqnum);

      insanity_test_validate_checklist_item (test, "seqnum-management",
          FALSE, message);

      glob_wrong_seqnum = TRUE;
      g_free (message);
    }

    switch (GST_EVENT_TYPE (event)) {
      case GST_EVENT_SEGMENT:
      {
        gst_event_copy_segment (event, &glob_last_segment);

        if (glob_waiting_segment == FALSE)
          /* Cache the segment as it will be our reference but don't look
           * further */
          goto done;

        glob_last_segment_start_time = glob_last_segment.start;
        if (glob_waiting_first_segment == TRUE) {
          insanity_test_validate_checklist_item (test, "first-segment", TRUE,
              NULL);

          glob_waiting_first_segment = FALSE;
        } else if (glob_in_progress >= TEST_FAST_FORWARD &&
            glob_in_progress <= TEST_FAST_BACKWARD) {
          GstClockTimeDiff diff;
          gboolean valid_stop = TRUE;
          GstClockTimeDiff wdiff, rdiff;

          rdiff =
              ABS (GST_CLOCK_DIFF (glob_last_segment.stop,
                  glob_last_segment.start)) * ABS (glob_last_segment.rate *
              glob_last_segment.applied_rate);
          wdiff =
              ABS (GST_CLOCK_DIFF (glob_seek_stop_ts,
                  glob_seek_segment_seektime));

          diff =
              GST_CLOCK_DIFF (glob_last_segment.position,
              glob_seek_segment_seektime);
          if (diff < 0)
            diff = -diff;

          /* Now compare with the expected segment */
          if ((glob_last_segment.rate * glob_last_segment.applied_rate) ==
              glob_seek_rate && diff <= SEEK_THRESHOLD && valid_stop) {
            glob_seek_got_segment = TRUE;
          } else {
            GstClockTime stopdiff = ABS (GST_CLOCK_DIFF (rdiff, wdiff));

            gchar *validate_msg =
                g_strdup_printf ("Wrong segment received, Rate %f expected "
                "%f, start time diff %" GST_TIME_FORMAT " stop diff %"
                GST_TIME_FORMAT,
                (glob_last_segment.rate * glob_last_segment.applied_rate),
                glob_seek_rate,
                GST_TIME_ARGS (diff), GST_TIME_ARGS (stopdiff));

            validate_current_test (test, FALSE, validate_msg);
            next_test (test);
            g_free (validate_msg);
          }
        }

        glob_waiting_segment = FALSE;
        break;
      }
      default:
        break;
    }
  }

done:
  DECODER_TEST_UNLOCK ();
  return TRUE;
}
Example #20
0
static GstFlowReturn
gst_vaapidecode_step(GstVaapiDecode *decode)
{
    GstVaapiSurfaceProxy *proxy;
    GstVaapiDecoderStatus status;
    GstBuffer *buffer;
    GstFlowReturn ret;
    GstClockTime timestamp;
    gint64 end_time;

    for (;;) {
        end_time = decode->render_time_base;
        if (!end_time)
            end_time = g_get_monotonic_time();
        end_time += GST_TIME_AS_USECONDS(decode->last_buffer_time);
        end_time += G_TIME_SPAN_SECOND;

        proxy = gst_vaapi_decoder_get_surface(decode->decoder, &status);
        if (!proxy) {
            if (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE) {
                gboolean was_signalled;
                g_mutex_lock(decode->decoder_mutex);
                was_signalled = g_cond_wait_until(
                    decode->decoder_ready,
                    decode->decoder_mutex,
                    end_time
                );
                g_mutex_unlock(decode->decoder_mutex);
                if (was_signalled)
                    continue;
                goto error_decode_timeout;
            }
            if (status != GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA)
                goto error_decode;
            /* More data is needed */
            break;
        }

        g_object_weak_ref(
            G_OBJECT(proxy),
            (GWeakNotify)gst_vaapidecode_release,
            decode
        );

        buffer = gst_vaapi_video_buffer_new(decode->display);
        if (!buffer)
            goto error_create_buffer;

        timestamp = GST_VAAPI_SURFACE_PROXY_TIMESTAMP(proxy);
        if (!decode->render_time_base)
            decode->render_time_base = g_get_monotonic_time();
        decode->last_buffer_time = timestamp;

        GST_BUFFER_TIMESTAMP(buffer) = timestamp;
        GST_BUFFER_DURATION(buffer) = GST_VAAPI_SURFACE_PROXY_DURATION(proxy);
        gst_buffer_set_caps(buffer, GST_PAD_CAPS(decode->srcpad));

        if (GST_VAAPI_SURFACE_PROXY_TFF(proxy))
            GST_BUFFER_FLAG_SET(buffer, GST_VIDEO_BUFFER_TFF);

        gst_vaapi_video_buffer_set_surface_proxy(
            GST_VAAPI_VIDEO_BUFFER(buffer),
            proxy
        );

        ret = gst_pad_push(decode->srcpad, buffer);
        if (ret != GST_FLOW_OK)
            goto error_commit_buffer;

        g_object_unref(proxy);
    }
    return GST_FLOW_OK;

    /* ERRORS */
error_decode_timeout:
    {
        GST_DEBUG("decode timeout. Decoder required a VA surface but none "
                  "got available within one second");
        return GST_FLOW_UNEXPECTED;
    }
error_decode:
    {
        GST_DEBUG("decode error %d", status);
        switch (status) {
        case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC:
        case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE:
        case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT:
            ret = GST_FLOW_NOT_SUPPORTED;
            break;
        default:
            ret = GST_FLOW_UNEXPECTED;
            break;
        }
        return ret;
    }
error_create_buffer:
    {
        const GstVaapiID surface_id =
            gst_vaapi_surface_get_id(GST_VAAPI_SURFACE_PROXY_SURFACE(proxy));

        GST_DEBUG("video sink failed to create video buffer for proxy'ed "
                  "surface %" GST_VAAPI_ID_FORMAT " (error %d)",
                  GST_VAAPI_ID_ARGS(surface_id), ret);
        g_object_unref(proxy);
        return GST_FLOW_UNEXPECTED;
    }
error_commit_buffer:
    {
        GST_DEBUG("video sink rejected the video buffer (error %d)", ret);
        g_object_unref(proxy);
        return GST_FLOW_UNEXPECTED;
    }
}
static GstFlowReturn
gst_rtp_celt_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstFlowReturn ret;
  GstRtpCELTPay *rtpceltpay;
  gsize payload_len;
  GstMapInfo map;
  GstClockTime duration, packet_dur;
  guint i, ssize, packet_len;

  rtpceltpay = GST_RTP_CELT_PAY (basepayload);

  ret = GST_FLOW_OK;

  gst_buffer_map (buffer, &map, GST_MAP_READ);

  switch (rtpceltpay->packet) {
    case 0:
      /* ident packet. We need to parse the headers to construct the RTP
       * properties. */
      if (!gst_rtp_celt_pay_parse_ident (rtpceltpay, map.data, map.size))
        goto parse_error;

      goto cleanup;
    case 1:
      /* comment packet, we ignore it */
      goto cleanup;
    default:
      /* other packets go in the payload */
      break;
  }
  gst_buffer_unmap (buffer, &map);

  duration = GST_BUFFER_DURATION (buffer);

  GST_LOG_OBJECT (rtpceltpay,
      "got buffer of duration %" GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT,
      GST_TIME_ARGS (duration), map.size);

  /* calculate the size of the size field and the payload */
  ssize = 1;
  for (i = map.size; i > 0xff; i -= 0xff)
    ssize++;

  GST_DEBUG_OBJECT (rtpceltpay, "bytes for size %u", ssize);

  /* calculate what the new size and duration would be of the packet */
  payload_len = ssize + map.size + rtpceltpay->bytes + rtpceltpay->sbytes;
  if (rtpceltpay->qduration != -1 && duration != -1)
    packet_dur = rtpceltpay->qduration + duration;
  else
    packet_dur = 0;

  packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);

  if (gst_rtp_base_payload_is_filled (basepayload, packet_len, packet_dur)) {
    /* size or duration would overflow the packet, flush the queued data */
    ret = gst_rtp_celt_pay_flush_queued (rtpceltpay);
  }

  /* queue the packet */
  gst_rtp_celt_pay_add_queued (rtpceltpay, buffer, ssize, map.size, duration);

done:
  rtpceltpay->packet++;

  return ret;

  /* ERRORS */
cleanup:
  {
    gst_buffer_unmap (buffer, &map);
    goto done;
  }
parse_error:
  {
    GST_ELEMENT_ERROR (rtpceltpay, STREAM, DECODE, (NULL),
        ("Error parsing first identification packet."));
    gst_buffer_unmap (buffer, &map);
    return GST_FLOW_ERROR;
  }
}
Example #22
0
static GstFlowReturn
gst_visual_chain (GstPad * pad, GstBuffer * buffer)
{
  GstBuffer *outbuf = NULL;
  guint i;
  GstVisual *visual = GST_VISUAL (gst_pad_get_parent (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  guint avail;

  GST_DEBUG_OBJECT (visual, "chain function called");

  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (visual->srcpad) == NULL) {
    ret = get_buffer (visual, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }

  /* resync on DISCONT */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (visual->adapter);
  }

  GST_DEBUG_OBJECT (visual,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer));

  gst_adapter_push (visual->adapter, buffer);

  while (TRUE) {
    gboolean need_skip;
    const guint16 *data;
    guint64 dist, timestamp;

    GST_DEBUG_OBJECT (visual, "processing buffer");

    avail = gst_adapter_available (visual->adapter);
    GST_DEBUG_OBJECT (visual, "avail now %u", avail);

    /* we need at least 512 samples */
    if (avail < 512 * visual->bps)
      break;

    /* we need at least enough samples to make one frame */
    if (avail < visual->spf * visual->bps)
      break;

    /* get timestamp of the current adapter byte */
    timestamp = gst_adapter_prev_timestamp (visual->adapter, &dist);
    if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
      /* convert bytes to time */
      dist /= visual->bps;
      timestamp += gst_util_uint64_scale_int (dist, GST_SECOND, visual->rate);
    }

    if (timestamp != -1) {
      gint64 qostime;

      /* QoS is done on running time */
      qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME,
          timestamp);

      GST_OBJECT_LOCK (visual);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = visual->earliest_time != -1 &&
          qostime <= visual->earliest_time;
      GST_OBJECT_UNLOCK (visual);

      if (need_skip) {
        GST_WARNING_OBJECT (visual,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time));
        goto skip;
      }
    }

    /* Read 512 samples per channel */
    data =
        (const guint16 *) gst_adapter_peek (visual->adapter, 512 * visual->bps);

#if defined(VISUAL_API_VERSION) && VISUAL_API_VERSION >= 4000 && VISUAL_API_VERSION < 5000
    {
      VisBuffer *lbuf, *rbuf;
      guint16 ldata[512], rdata[512];
      VisAudioSampleRateType rate;

      lbuf = visual_buffer_new_with_buffer (ldata, sizeof (ldata), NULL);
      rbuf = visual_buffer_new_with_buffer (rdata, sizeof (rdata), NULL);

      if (visual->channels == 2) {
        for (i = 0; i < 512; i++) {
          ldata[i] = *data++;
          rdata[i] = *data++;
        }
      } else {
        for (i = 0; i < 512; i++) {
          ldata[i] = *data;
          rdata[i] = *data++;
        }
      }

      switch (visual->rate) {
        case 8000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_8000;
          break;
        case 11250:
          rate = VISUAL_AUDIO_SAMPLE_RATE_11250;
          break;
        case 22500:
          rate = VISUAL_AUDIO_SAMPLE_RATE_22500;
          break;
        case 32000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_32000;
          break;
        case 44100:
          rate = VISUAL_AUDIO_SAMPLE_RATE_44100;
          break;
        case 48000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_48000;
          break;
        case 96000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_96000;
          break;
        default:
          visual_object_unref (VISUAL_OBJECT (lbuf));
          visual_object_unref (VISUAL_OBJECT (rbuf));
          GST_ERROR_OBJECT (visual, "unsupported rate %d", visual->rate);
          ret = GST_FLOW_ERROR;
          goto beach;
          break;
      }

      visual_audio_samplepool_input_channel (visual->audio->samplepool,
          lbuf,
          rate, VISUAL_AUDIO_SAMPLE_FORMAT_S16,
          (char *) VISUAL_AUDIO_CHANNEL_LEFT);
      visual_audio_samplepool_input_channel (visual->audio->samplepool, rbuf,
          rate, VISUAL_AUDIO_SAMPLE_FORMAT_S16,
          (char *) VISUAL_AUDIO_CHANNEL_RIGHT);

      visual_object_unref (VISUAL_OBJECT (lbuf));
      visual_object_unref (VISUAL_OBJECT (rbuf));

    }
#else
    if (visual->channels == 2) {
      for (i = 0; i < 512; i++) {
        visual->audio->plugpcm[0][i] = *data++;
        visual->audio->plugpcm[1][i] = *data++;
      }
    } else {
      for (i = 0; i < 512; i++) {
        visual->audio->plugpcm[0][i] = *data;
        visual->audio->plugpcm[1][i] = *data++;
      }
    }
#endif

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (visual, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }
    visual_video_set_buffer (visual->video, GST_BUFFER_DATA (outbuf));
    visual_audio_analyze (visual->audio);
    visual_actor_run (visual->actor, visual->audio);
    visual_video_set_buffer (visual->video, NULL);
    GST_DEBUG_OBJECT (visual, "rendered one frame");

    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
    GST_BUFFER_DURATION (outbuf) = visual->duration;

    ret = gst_pad_push (visual->srcpad, outbuf);
    outbuf = NULL;

  skip:
    GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input",
        visual->spf);

    /* Flush out the number of samples per frame */
    gst_adapter_flush (visual->adapter, visual->spf * visual->bps);

    /* quit the loop if something was wrong */
    if (ret != GST_FLOW_OK)
      break;
  }

beach:

  if (outbuf != NULL)
    gst_buffer_unref (outbuf);

  gst_object_unref (visual);

  return ret;
}
Example #23
0
static void
gst_frame_store_task (GstPad *pad)
{
  GstFrameStore *fs;
  GstBuffer *buffer;
  GstEvent *event = NULL;

  fs = GST_FRAME_STORE (gst_pad_get_parent (pad));

  GST_DEBUG("task");

  g_mutex_lock (fs->lock);
  while(1) {
    if (fs->stepping == FALSE || (fs->frame_number != fs->pushed_frame_number)) {
      buffer = gst_frame_store_get_frame (fs, fs->frame_number);
    }
    if (buffer) break;
    g_cond_wait (fs->cond, fs->lock);
  }
  if (fs->need_newsegment) {
    GstClock *clock;
    GstClockTime now;
    GstClockTime stream_time;

    clock = GST_ELEMENT_CLOCK (fs);
    if (clock == NULL) {
      now = 0;
      stream_time = 0;
    } else {
      now = gst_clock_get_time (GST_ELEMENT_CLOCK (fs));
      stream_time = now - GST_ELEMENT(fs)->base_time;
    }
    GST_ERROR("now %lld buffer %lld stream_time %lld",
        now, GST_BUFFER_TIMESTAMP(buffer), stream_time);
    stream_time = GST_SECOND*10;
    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
        GST_BUFFER_TIMESTAMP(buffer), -1, stream_time);
    fs->need_newsegment = FALSE;
  }
  if (fs->stepping) {
    buffer = gst_buffer_make_metadata_writable (buffer);
    GST_BUFFER_TIMESTAMP(buffer) = -1;
    GST_BUFFER_DURATION(buffer) = -1;
  }
  fs->pushed_frame_number = fs->frame_number;
  if (!fs->stepping) {
    fs->frame_number++;
  }
  if (fs->frame_number + 1 >= fs->range_offset + fs->range_size) {
    gst_frame_store_advance (fs);
  }
  g_mutex_unlock (fs->lock);

  if (event) {
    gst_pad_push_event (fs->srcpad, event);
  }

  gst_pad_push (fs->srcpad, buffer);

  GST_DEBUG("task done");

  gst_object_unref (fs);
}
Example #24
0
static GstFlowReturn
gst_ladspa_source_type_fill (GstBaseSrc * base, guint64 offset,
    guint length, GstBuffer * buffer)
{
  GstLADSPASource *ladspa = GST_LADSPA_SOURCE (base);
  GstClockTime next_time;
  gint64 next_sample, next_byte;
  gint bytes, samples;
  GstElementClass *eclass;
  GstMapInfo map;
  gint samplerate, bpf;

  /* example for tagging generated data */
  if (!ladspa->tags_pushed) {
    GstTagList *taglist;

    taglist = gst_tag_list_new (GST_TAG_DESCRIPTION, "ladspa wave", NULL);

    eclass = GST_ELEMENT_CLASS (gst_ladspa_source_type_parent_class);
    if (eclass->send_event)
      eclass->send_event (GST_ELEMENT (base), gst_event_new_tag (taglist));
    else
      gst_tag_list_unref (taglist);
    ladspa->tags_pushed = TRUE;
  }

  if (ladspa->eos_reached) {
    GST_INFO_OBJECT (ladspa, "eos");
    return GST_FLOW_EOS;
  }

  samplerate = GST_AUDIO_INFO_RATE (&ladspa->info);
  bpf = GST_AUDIO_INFO_BPF (&ladspa->info);

  /* if no length was given, use our default length in samples otherwise convert
   * the length in bytes to samples. */
  if (length == -1)
    samples = ladspa->samples_per_buffer;
  else
    samples = length / bpf;

  /* if no offset was given, use our next logical byte */
  if (offset == -1)
    offset = ladspa->next_byte;

  /* now see if we are at the byteoffset we think we are */
  if (offset != ladspa->next_byte) {
    GST_DEBUG_OBJECT (ladspa, "seek to new offset %" G_GUINT64_FORMAT, offset);
    /* we have a discont in the expected sample offset, do a 'seek' */
    ladspa->next_sample = offset / bpf;
    ladspa->next_time =
        gst_util_uint64_scale_int (ladspa->next_sample, GST_SECOND, samplerate);
    ladspa->next_byte = offset;
  }

  /* check for eos */
  if (ladspa->check_seek_stop &&
      (ladspa->sample_stop > ladspa->next_sample) &&
      (ladspa->sample_stop < ladspa->next_sample + samples)
      ) {
    /* calculate only partial buffer */
    ladspa->generate_samples_per_buffer =
        ladspa->sample_stop - ladspa->next_sample;
    next_sample = ladspa->sample_stop;
    ladspa->eos_reached = TRUE;
  } else {
    /* calculate full buffer */
    ladspa->generate_samples_per_buffer = samples;
    next_sample =
        ladspa->next_sample + (ladspa->reverse ? (-samples) : samples);
  }

  bytes = ladspa->generate_samples_per_buffer * bpf;

  next_byte = ladspa->next_byte + (ladspa->reverse ? (-bytes) : bytes);
  next_time = gst_util_uint64_scale_int (next_sample, GST_SECOND, samplerate);

  GST_LOG_OBJECT (ladspa, "samplerate %d", samplerate);
  GST_LOG_OBJECT (ladspa,
      "next_sample %" G_GINT64_FORMAT ", ts %" GST_TIME_FORMAT, next_sample,
      GST_TIME_ARGS (next_time));

  gst_buffer_set_size (buffer, bytes);

  GST_BUFFER_OFFSET (buffer) = ladspa->next_sample;
  GST_BUFFER_OFFSET_END (buffer) = next_sample;
  if (!ladspa->reverse) {
    GST_BUFFER_TIMESTAMP (buffer) =
        ladspa->timestamp_offset + ladspa->next_time;
    GST_BUFFER_DURATION (buffer) = next_time - ladspa->next_time;
  } else {
    GST_BUFFER_TIMESTAMP (buffer) = ladspa->timestamp_offset + next_time;
    GST_BUFFER_DURATION (buffer) = ladspa->next_time - next_time;
  }

  gst_object_sync_values (GST_OBJECT (ladspa), GST_BUFFER_TIMESTAMP (buffer));

  ladspa->next_time = next_time;
  ladspa->next_sample = next_sample;
  ladspa->next_byte = next_byte;

  GST_LOG_OBJECT (ladspa, "generating %u samples at ts %" GST_TIME_FORMAT,
      ladspa->generate_samples_per_buffer,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));

  gst_buffer_map (buffer, &map, GST_MAP_WRITE);
  gst_ladspa_transform (&ladspa->ladspa, map.data,
      ladspa->generate_samples_per_buffer, NULL);
  gst_buffer_unmap (buffer, &map);

  return GST_FLOW_OK;
}
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  gint i;
  GstClock *clock;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  // FIXME: https://bugzilla.gnome.org/show_bug.cgi?id=742916
  // We need to drop late buffers here immediately instead of
  // potentially overflowing the internal queue of the hardware
  clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
  if (clock) {
    GstClockTime clock_running_time, base_time, clock_time, latency,
        max_lateness;

    base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
    clock_time = gst_clock_get_time (clock);
    if (base_time != GST_CLOCK_TIME_NONE && clock_time != GST_CLOCK_TIME_NONE) {
      clock_running_time = clock_time - base_time;
      latency = gst_base_sink_get_latency (GST_BASE_SINK_CAST (self));
      max_lateness = gst_base_sink_get_max_lateness (GST_BASE_SINK_CAST (self));

      if (clock_running_time >
          running_time + running_time_duration + latency + max_lateness) {
        GST_DEBUG_OBJECT (self,
            "Late buffer: %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
            GST_TIME_ARGS (clock_running_time),
            GST_TIME_ARGS (running_time + running_time_duration));

        if (self->last_render_time == GST_CLOCK_TIME_NONE
            || (self->last_render_time < clock_running_time
                && clock_running_time - self->last_render_time >= GST_SECOND)) {
          GST_DEBUG_OBJECT (self,
              "Rendering frame nonetheless because we had none for more than 1s");
          running_time = clock_running_time;
          running_time_duration = 0;
        } else {
          GST_WARNING_OBJECT (self, "Dropping frame");
          gst_object_unref (clock);
          return GST_FLOW_OK;
        }
      }
    }

    gst_object_unref (clock);
  }
  self->last_render_time = running_time;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], bmdFormat8BitYUV,
      bmdFrameFlagDefault, &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, GST_VIDEO_FRAME_WIDTH (&vframe) * 2);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
Example #26
0
static GstFlowReturn
celt_dec_chain_parse_data (GstCeltDec * dec, GstBuffer * buf,
    GstClockTime timestamp, GstClockTime duration)
{
  GstFlowReturn res = GST_FLOW_OK;
  gint size;
  guint8 *data;
  GstBuffer *outbuf;
  gint16 *out_data;
  gint error = CELT_OK;

  if (timestamp != -1) {
    dec->segment.last_stop = timestamp;
    dec->granulepos = -1;
  }

  if (buf) {
    data = GST_BUFFER_DATA (buf);
    size = GST_BUFFER_SIZE (buf);

    GST_DEBUG_OBJECT (dec, "received buffer of size %u", size);
    if (!GST_BUFFER_TIMESTAMP_IS_VALID (buf)
        && GST_BUFFER_OFFSET_END_IS_VALID (buf)) {
      dec->granulepos = GST_BUFFER_OFFSET_END (buf);
      GST_DEBUG_OBJECT (dec,
          "Taking granulepos from upstream: %" G_GUINT64_FORMAT,
          dec->granulepos);
    }

    /* copy timestamp */
  } else {
    /* concealment data, pass NULL as the bits parameters */
    GST_DEBUG_OBJECT (dec, "creating concealment data");
    data = NULL;
    size = 0;
  }

  res = gst_pad_alloc_buffer_and_set_caps (dec->srcpad,
      GST_BUFFER_OFFSET_NONE, dec->frame_size * dec->header.nb_channels * 2,
      GST_PAD_CAPS (dec->srcpad), &outbuf);

  if (res != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));
    return res;
  }

  out_data = (gint16 *) GST_BUFFER_DATA (outbuf);

  GST_LOG_OBJECT (dec, "decoding frame");

  error = celt_decode (dec->state, data, size, out_data);
  if (error != CELT_OK) {
    GST_WARNING_OBJECT (dec, "Decoding error: %d", error);
    return GST_FLOW_ERROR;
  }

  if (dec->granulepos == -1) {
    if (dec->segment.format != GST_FORMAT_TIME) {
      GST_WARNING_OBJECT (dec, "segment not initialized or not TIME format");
      dec->granulepos = dec->frame_size;
    } else {
      dec->granulepos = gst_util_uint64_scale_int (dec->segment.last_stop,
          dec->header.sample_rate, GST_SECOND) + dec->frame_size;
    }
    GST_DEBUG_OBJECT (dec, "granulepos=%" G_GINT64_FORMAT, dec->granulepos);
  }

  GST_BUFFER_OFFSET (outbuf) = dec->granulepos - dec->frame_size;
  GST_BUFFER_OFFSET_END (outbuf) = dec->granulepos;
  GST_BUFFER_TIMESTAMP (outbuf) =
      gst_util_uint64_scale_int (dec->granulepos - dec->frame_size, GST_SECOND,
      dec->header.sample_rate);
  GST_BUFFER_DURATION (outbuf) = dec->frame_duration;

  dec->granulepos += dec->frame_size;
  dec->segment.last_stop += dec->frame_duration;

  GST_LOG_OBJECT (dec, "pushing buffer with ts=%" GST_TIME_FORMAT ", dur=%"
      GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
      GST_TIME_ARGS (dec->frame_duration));

  res = gst_pad_push (dec->srcpad, outbuf);

  if (res != GST_FLOW_OK)
    GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));

  return res;
}
static GstFlowReturn
gst_dvbsub_overlay_chain_video (GstPad * pad, GstObject * parent,
    GstBuffer * buffer)
{
  GstDVBSubOverlay *overlay = GST_DVBSUB_OVERLAY (parent);
  GstFlowReturn ret = GST_FLOW_OK;
  gint64 start, stop;
  guint64 cstart, cstop;
  gboolean in_seg;
  GstClockTime vid_running_time, vid_running_time_end;

  if (GST_VIDEO_INFO_FORMAT (&overlay->info) == GST_VIDEO_FORMAT_UNKNOWN)
    return GST_FLOW_NOT_NEGOTIATED;

  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
    goto missing_timestamp;

  start = GST_BUFFER_TIMESTAMP (buffer);

  GST_LOG_OBJECT (overlay,
      "Video segment: %" GST_SEGMENT_FORMAT " --- Subtitle position: %"
      GST_TIME_FORMAT " --- BUFFER: ts=%" GST_TIME_FORMAT,
      &overlay->video_segment,
      GST_TIME_ARGS (overlay->subtitle_segment.position),
      GST_TIME_ARGS (start));

  /* ignore buffers that are outside of the current segment */
  if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
    stop = GST_CLOCK_TIME_NONE;
  } else {
    stop = start + GST_BUFFER_DURATION (buffer);
  }

  in_seg = gst_segment_clip (&overlay->video_segment, GST_FORMAT_TIME,
      start, stop, &cstart, &cstop);
  if (!in_seg) {
    GST_DEBUG_OBJECT (overlay, "Buffer outside configured segment -- dropping");
    gst_buffer_unref (buffer);
    return GST_FLOW_OK;
  }

  buffer = gst_buffer_make_writable (buffer);
  GST_BUFFER_TIMESTAMP (buffer) = cstart;
  if (GST_BUFFER_DURATION_IS_VALID (buffer))
    GST_BUFFER_DURATION (buffer) = cstop - cstart;

  vid_running_time =
      gst_segment_to_running_time (&overlay->video_segment, GST_FORMAT_TIME,
      cstart);
  if (GST_BUFFER_DURATION_IS_VALID (buffer))
    vid_running_time_end =
        gst_segment_to_running_time (&overlay->video_segment, GST_FORMAT_TIME,
        cstop);
  else
    vid_running_time_end = vid_running_time;

  GST_DEBUG_OBJECT (overlay, "Video running time: %" GST_TIME_FORMAT,
      GST_TIME_ARGS (vid_running_time));

  overlay->video_segment.position = GST_BUFFER_TIMESTAMP (buffer);

  g_mutex_lock (&overlay->dvbsub_mutex);
  if (!g_queue_is_empty (overlay->pending_subtitles)) {
    DVBSubtitles *tmp, *candidate = NULL;

    while (!g_queue_is_empty (overlay->pending_subtitles)) {
      tmp = g_queue_peek_head (overlay->pending_subtitles);

      if (tmp->pts > vid_running_time_end) {
        /* For a future video frame */
        break;
      } else if (tmp->num_rects == 0) {
        /* Clear screen */
        if (overlay->current_subtitle)
          dvb_subtitles_free (overlay->current_subtitle);
        overlay->current_subtitle = NULL;
        if (candidate)
          dvb_subtitles_free (candidate);
        candidate = NULL;
        g_queue_pop_head (overlay->pending_subtitles);
        dvb_subtitles_free (tmp);
        tmp = NULL;
      } else if (tmp->pts + tmp->page_time_out * GST_SECOND *
          ABS (overlay->subtitle_segment.rate) >= vid_running_time) {
        if (candidate)
          dvb_subtitles_free (candidate);
        candidate = tmp;
        g_queue_pop_head (overlay->pending_subtitles);
      } else {
        /* Too late */
        dvb_subtitles_free (tmp);
        tmp = NULL;
        g_queue_pop_head (overlay->pending_subtitles);
      }
    }

    if (candidate) {
      GST_DEBUG_OBJECT (overlay,
          "Time to show the next subtitle page (%" GST_TIME_FORMAT " >= %"
          GST_TIME_FORMAT ") - it has %u regions",
          GST_TIME_ARGS (vid_running_time), GST_TIME_ARGS (candidate->pts),
          candidate->num_rects);
      dvb_subtitles_free (overlay->current_subtitle);
      overlay->current_subtitle = candidate;
      if (overlay->current_comp)
        gst_video_overlay_composition_unref (overlay->current_comp);
      overlay->current_comp =
          gst_dvbsub_overlay_subs_to_comp (overlay, overlay->current_subtitle);
    }
  }

  /* Check that we haven't hit the fallback timeout for current subtitle page */
  if (overlay->current_subtitle
      && vid_running_time >
      (overlay->current_subtitle->pts +
          overlay->current_subtitle->page_time_out * GST_SECOND *
          ABS (overlay->subtitle_segment.rate))) {
    GST_INFO_OBJECT (overlay,
        "Subtitle page not redefined before fallback page_time_out of %u seconds (missed data?) - deleting current page",
        overlay->current_subtitle->page_time_out);
    dvb_subtitles_free (overlay->current_subtitle);
    overlay->current_subtitle = NULL;
  }

  /* Now render it */
  if (g_atomic_int_get (&overlay->enable) && overlay->current_subtitle) {
    GstVideoFrame frame;

    g_assert (overlay->current_comp);
    if (overlay->attach_compo_to_buffer) {
      GST_DEBUG_OBJECT (overlay, "Attaching overlay image to video buffer");
      gst_buffer_add_video_overlay_composition_meta (buffer,
          overlay->current_comp);
    } else {
      GST_DEBUG_OBJECT (overlay, "Blending overlay image to video buffer");
      gst_video_frame_map (&frame, &overlay->info, buffer, GST_MAP_READWRITE);
      gst_video_overlay_composition_blend (overlay->current_comp, &frame);
      gst_video_frame_unmap (&frame);
    }
  }
  g_mutex_unlock (&overlay->dvbsub_mutex);

  ret = gst_pad_push (overlay->srcpad, buffer);

  return ret;

missing_timestamp:
  {
    GST_WARNING_OBJECT (overlay, "video buffer without timestamp, discarding");
    gst_buffer_unref (buffer);
    return GST_FLOW_OK;
  }
}
static GstFlowReturn
gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
  GstVideoFrame vframe;
  IDeckLinkMutableVideoFrame *frame;
  guint8 *outdata, *indata;
  GstFlowReturn flow_ret;
  HRESULT ret;
  GstClockTime timestamp, duration;
  GstClockTime running_time, running_time_duration;
  GstClockTime latency, render_delay;
  GstClockTimeDiff ts_offset;
  gint i;
  GstDecklinkVideoFormat caps_format;
  BMDPixelFormat format;
  gint stride;
  GstVideoTimeCodeMeta *tc_meta;

  GST_DEBUG_OBJECT (self, "Preparing buffer %p", buffer);

  // FIXME: Handle no timestamps
  if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
    return GST_FLOW_ERROR;
  }

  caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
  format = gst_decklink_pixel_format_from_type (caps_format);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  if (duration == GST_CLOCK_TIME_NONE) {
    duration =
        gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
        self->info.fps_n);
  }
  running_time =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp);
  running_time_duration =
      gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
      GST_FORMAT_TIME, timestamp + duration) - running_time;

  /* See gst_base_sink_adjust_time() */
  latency = gst_base_sink_get_latency (bsink);
  render_delay = gst_base_sink_get_render_delay (bsink);
  ts_offset = gst_base_sink_get_ts_offset (bsink);

  running_time += latency;

  if (ts_offset < 0) {
    ts_offset = -ts_offset;
    if ((GstClockTime) ts_offset < running_time)
      running_time -= ts_offset;
    else
      running_time = 0;
  } else {
    running_time += ts_offset;
  }

  if (running_time > render_delay)
    running_time -= render_delay;
  else
    running_time = 0;

  ret = self->output->output->CreateVideoFrame (self->info.width,
      self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
      &frame);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to create video frame: 0x%08x", ret));
    return GST_FLOW_ERROR;
  }

  if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
    GST_ERROR_OBJECT (self, "Failed to map video frame");
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  frame->GetBytes ((void **) &outdata);
  indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  stride = MIN (GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0), frame->GetRowBytes());
  for (i = 0; i < self->info.height; i++) {
    memcpy (outdata, indata, stride);
    indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
    outdata += frame->GetRowBytes ();
  }
  gst_video_frame_unmap (&vframe);

  tc_meta = gst_buffer_get_video_time_code_meta (buffer);
  if (tc_meta) {
    BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
    gchar *tc_str;

    if (((GstVideoTimeCodeFlags) (tc_meta->tc.
                config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
    else
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
    if (tc_meta->tc.field_count == 2)
      bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);

    tc_str = gst_video_time_code_to_string (&tc_meta->tc);
    ret = frame->SetTimecodeFromComponents (self->timecode_format,
        (uint8_t) tc_meta->tc.hours,
        (uint8_t) tc_meta->tc.minutes,
        (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
    if (ret != S_OK) {
      GST_ERROR_OBJECT (self,
          "Failed to set timecode %s to video frame: 0x%08x", tc_str, ret);
      flow_ret = GST_FLOW_ERROR;
      g_free (tc_str);
      goto out;
    }
    GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
    g_free (tc_str);
  }

  convert_to_internal_clock (self, &running_time, &running_time_duration);

  if (!self->output->started) {
    GST_LOG_OBJECT (self, "Showing video frame synchronously because PAUSED");
    ret = self->output->output->DisplayVideoFrameSync (frame);
    if (ret != S_OK) {
      GST_ELEMENT_WARNING (self, STREAM, FAILED,
          (NULL), ("Failed to show video frame synchronously: 0x%08x", ret));
      ret = S_OK;
    }
  }

  GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
      " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
      GST_TIME_ARGS (running_time_duration));

  ret = self->output->output->ScheduleVideoFrame (frame,
      running_time, running_time_duration, GST_SECOND);
  if (ret != S_OK) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        (NULL), ("Failed to schedule frame: 0x%08x", ret));
    flow_ret = GST_FLOW_ERROR;
    goto out;
  }

  flow_ret = GST_FLOW_OK;

out:

  frame->Release ();

  return flow_ret;
}
Example #29
0
static GstFlowReturn
gst_gdiscreencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf)
{
  GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (push_src);
  GstBuffer *new_buf;
  GstFlowReturn res;
  gint new_buf_size;
  GstClock *clock;
  GstClockTime time;
  GstClockTime base_time;

  if (G_UNLIKELY (!src->info.bmiHeader.biWidth ||
          !src->info.bmiHeader.biHeight)) {
    GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL),
        ("format wasn't negotiated before create function"));
    return GST_FLOW_NOT_NEGOTIATED;
  } else if (G_UNLIKELY (src->rate_numerator == 0 && src->frames == 1)) {
    GST_DEBUG_OBJECT (src, "eos: 0 framerate, frame %d", (gint) src->frames);
    return GST_FLOW_UNEXPECTED;
  }

  new_buf_size = GST_ROUND_UP_4 (src->info.bmiHeader.biWidth * 3) *
      (-src->info.bmiHeader.biHeight);

  GST_LOG_OBJECT (src,
      "creating buffer of %lu bytes with %dx%d image for frame %d",
      new_buf_size, src->info.bmiHeader.biWidth,
      -src->info.bmiHeader.biHeight, (gint) src->frames);

  res =
      gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (src),
      GST_BUFFER_OFFSET_NONE, new_buf_size,
      GST_PAD_CAPS (GST_BASE_SRC_PAD (push_src)), &new_buf);
  if (res != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (src, "could not allocate buffer, reason %s",
        gst_flow_get_name (res));
    return res;
  }

  clock = gst_element_get_clock (GST_ELEMENT (src));
  if (clock) {
    /* Calculate sync time. */
    GstClockTime frame_time =
        gst_util_uint64_scale_int (src->frames * GST_SECOND,
        src->rate_denominator, src->rate_numerator);

    time = gst_clock_get_time (clock);
    base_time = gst_element_get_base_time (GST_ELEMENT (src));
    GST_BUFFER_TIMESTAMP (new_buf) = MAX (time - base_time, frame_time);
  } else {
    GST_BUFFER_TIMESTAMP (new_buf) = GST_CLOCK_TIME_NONE;
  }

  /* Do screen capture and put it into buffer... */
  gst_gdiscreencapsrc_screen_capture (src, new_buf);

  if (src->rate_numerator) {
    GST_BUFFER_DURATION (new_buf) =
        gst_util_uint64_scale_int (GST_SECOND,
        src->rate_denominator, src->rate_numerator);
    if (clock) {
      GST_BUFFER_DURATION (new_buf) =
          MAX (GST_BUFFER_DURATION (new_buf),
          gst_clock_get_time (clock) - time);
    }
  } else {
    /* NONE means forever */
    GST_BUFFER_DURATION (new_buf) = GST_CLOCK_TIME_NONE;
  }

  GST_BUFFER_OFFSET (new_buf) = src->frames;
  src->frames++;
  GST_BUFFER_OFFSET_END (new_buf) = src->frames;

  gst_object_unref (clock);

  *buf = new_buf;
  return GST_FLOW_OK;
}
Example #30
0
static GstFlowReturn
gst_rsvg_dec_chain (GstPad * pad, GstBuffer * buffer)
{
  GstRsvgDec *rsvg = GST_RSVG_DEC (GST_PAD_PARENT (pad));
  gboolean completed = FALSE;
  const guint8 *data;
  guint size;
  gboolean ret = GST_FLOW_OK;

  /* first_timestamp is used slightly differently where a framerate
     is given or not.
     If there is a frame rate, it will be used as a base.
     If there is not, it will be used to keep track of the timestamp
     of the first buffer, to be used as the timestamp of the output
     buffer. When a buffer is output, first timestamp will resync to
     the next buffer's timestamp. */
  if (rsvg->first_timestamp == GST_CLOCK_TIME_NONE) {
    if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
      rsvg->first_timestamp = GST_BUFFER_TIMESTAMP (buffer);
    else if (rsvg->fps_n != 0)
      rsvg->first_timestamp = 0;
  }

  gst_adapter_push (rsvg->adapter, buffer);

  size = gst_adapter_available (rsvg->adapter);

  /* "<svg></svg>" */
  while (size >= 5 + 6 && ret == GST_FLOW_OK) {
    guint i;

    data = gst_adapter_peek (rsvg->adapter, size);
    for (i = size - 6; i >= 5; i--) {
      if (memcmp (data + i, "</svg>", 6) == 0) {
        completed = TRUE;
        size = i + 6;
        break;
      }
    }

    if (completed) {
      GstBuffer *outbuf = NULL;

      GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size);

      data = gst_adapter_peek (rsvg->adapter, size);

      ret = gst_rsvg_decode_image (rsvg, data, size, &outbuf);
      if (ret != GST_FLOW_OK)
        break;


      if (rsvg->first_timestamp != GST_CLOCK_TIME_NONE) {
        GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;
        GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
        if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
          GstClockTime end =
              GST_BUFFER_TIMESTAMP_IS_VALID (buffer) ?
              GST_BUFFER_TIMESTAMP (buffer) : rsvg->first_timestamp;
          end += GST_BUFFER_DURATION (buffer);
          GST_BUFFER_DURATION (outbuf) = end - GST_BUFFER_TIMESTAMP (outbuf);
        }
        if (rsvg->fps_n == 0) {
          rsvg->first_timestamp = GST_CLOCK_TIME_NONE;
        } else {
          GST_BUFFER_DURATION (outbuf) =
              gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
              rsvg->fps_n * GST_SECOND);
        }
      } else if (rsvg->fps_n != 0) {
        GST_BUFFER_TIMESTAMP (outbuf) =
            rsvg->first_timestamp + gst_util_uint64_scale (rsvg->frame_count,
            rsvg->fps_d, rsvg->fps_n * GST_SECOND);
        GST_BUFFER_DURATION (outbuf) =
            gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
            rsvg->fps_n * GST_SECOND);
      } else {
        GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;
        GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
      }
      rsvg->frame_count++;

      if (rsvg->need_newsegment) {
        gst_pad_push_event (rsvg->srcpad,
            gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));
        rsvg->need_newsegment = FALSE;
      }

      if (rsvg->pending_events) {
        GList *l;

        for (l = rsvg->pending_events; l; l = l->next)
          gst_pad_push_event (rsvg->srcpad, l->data);
        g_list_free (rsvg->pending_events);
        rsvg->pending_events = NULL;
      }

      if (rsvg->pending_tags) {
        gst_element_found_tags (GST_ELEMENT_CAST (rsvg), rsvg->pending_tags);
        rsvg->pending_tags = NULL;
      }

      GST_LOG_OBJECT (rsvg, "image rendered okay");

      ret = gst_pad_push (rsvg->srcpad, outbuf);
      if (ret != GST_FLOW_OK)
        break;

      gst_adapter_flush (rsvg->adapter, size);
      size = gst_adapter_available (rsvg->adapter);
      continue;
    } else {
      break;
    }
  }

  return GST_FLOW_OK;
}