static void
gst_msdkdec_set_latency (GstMsdkDec * thiz)
{
  GstVideoInfo *info = &thiz->input_state->info;
  gint min_delayed_frames;
  GstClockTime latency;

  min_delayed_frames = thiz->async_depth;

  if (info->fps_n) {
    latency = gst_util_uint64_scale_ceil (GST_SECOND * info->fps_d,
        min_delayed_frames, info->fps_n);
  } else {
    /* FIXME: Assume 25fps. This is better than reporting no latency at
     * all and then later failing in live pipelines
     */
    latency = gst_util_uint64_scale_ceil (GST_SECOND * 1,
        min_delayed_frames, 25);
  }

  GST_INFO_OBJECT (thiz,
      "Updating latency to %" GST_TIME_FORMAT " (%d frames)",
      GST_TIME_ARGS (latency), min_delayed_frames);

  gst_video_decoder_set_latency (GST_VIDEO_DECODER (thiz), latency, latency);
}
Beispiel #2
0
static void
gst_x265_enc_set_latency (GstX265Enc * encoder)
{
  GstVideoInfo *info = &encoder->input_state->info;
  gint max_delayed_frames;
  GstClockTime latency;

  /* FIXME get a real value from the encoder, this is currently not exposed */
  if (encoder->tune > 0 && encoder->tune <= G_N_ELEMENTS (x265_tune_names) &&
      strcmp (x265_tune_names[encoder->tune + 1], "zerolatency") == 0)
    max_delayed_frames = 0;
  else
    max_delayed_frames = 5;

  if (info->fps_n) {
    latency = gst_util_uint64_scale_ceil (GST_SECOND * info->fps_d,
        max_delayed_frames, info->fps_n);
  } else {
    /* FIXME: Assume 25fps. This is better than reporting no latency at
     * all and then later failing in live pipelines
     */
    latency = gst_util_uint64_scale_ceil (GST_SECOND * 1,
        max_delayed_frames, 25);
  }

  GST_INFO_OBJECT (encoder,
      "Updating latency to %" GST_TIME_FORMAT " (%d frames)",
      GST_TIME_ARGS (latency), max_delayed_frames);

  gst_video_encoder_set_latency (GST_VIDEO_ENCODER (encoder), latency, latency);
}
static gboolean
gst_decklink_video_src_query (GstBaseSrc * bsrc, GstQuery * query)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  gboolean ret = TRUE;

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_LATENCY:{
      if (self->input) {
        GstClockTime min, max;
        const GstDecklinkMode *mode;

        g_mutex_lock (&self->lock);
        mode = gst_decklink_get_mode (self->caps_mode);
        g_mutex_unlock (&self->lock);

        min = gst_util_uint64_scale_ceil (GST_SECOND, mode->fps_d, mode->fps_n);
        max = self->buffer_size * min;

        gst_query_set_latency (query, TRUE, min, max);
        ret = TRUE;
      } else {
        ret = FALSE;
      }

      break;
    }
    default:
      ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
      break;
  }

  return ret;
}
Beispiel #4
0
static void
gst_x265_enc_set_latency (GstX265Enc * encoder)
{
  GstVideoInfo *info = &encoder->input_state->info;

  if (info->fps_n) {
    GstClockTime latency;
    gint max_delayed_frames;

    // FIXME get a real value from the encoder, this is currently not exposed
    max_delayed_frames = 5;
    latency = gst_util_uint64_scale_ceil (GST_SECOND * info->fps_d,
        max_delayed_frames, info->fps_n);

    GST_INFO_OBJECT (encoder,
        "Updating latency to %" GST_TIME_FORMAT " (%d frames)",
        GST_TIME_ARGS (latency), max_delayed_frames);

    gst_video_encoder_set_latency (GST_VIDEO_ENCODER (encoder), latency,
        latency);
  } else {
    /* We can't do live as we don't know our latency */
    gst_video_encoder_set_latency (GST_VIDEO_ENCODER (encoder),
        0, GST_CLOCK_TIME_NONE);
  }
}
Beispiel #5
0
/*
 * gst_aiff_parse_calculate_duration:
 * @aiff: aiffparse object
 *
 * Calculate duration on demand and store in @aiff.
 *
 * Returns: %TRUE if duration is available.
 */
static gboolean
gst_aiff_parse_calculate_duration (GstAiffParse * aiff)
{
  if (aiff->duration > 0)
    return TRUE;

  if (aiff->datasize > 0 && aiff->bps > 0) {
    aiff->duration =
        gst_util_uint64_scale_ceil (aiff->datasize, GST_SECOND,
        (guint64) aiff->bps);
    GST_INFO_OBJECT (aiff, "Got duration %" GST_TIME_FORMAT,
        GST_TIME_ARGS (aiff->duration));
    return TRUE;
  }
  return FALSE;
}
Beispiel #6
0
static gboolean
gst_omx_audio_enc_set_format (GstAudioEncoder * encoder, GstAudioInfo * info)
{
  GstOMXAudioEnc *self;
  GstOMXAudioEncClass *klass;
  gboolean needs_disable = FALSE;
  OMX_PARAM_PORTDEFINITIONTYPE port_def;
  OMX_AUDIO_PARAM_PCMMODETYPE pcm_param;
  gint i;
  OMX_ERRORTYPE err;

  self = GST_OMX_AUDIO_ENC (encoder);
  klass = GST_OMX_AUDIO_ENC_GET_CLASS (encoder);

  GST_DEBUG_OBJECT (self, "Setting new caps");

  /* Set audio encoder base class properties */
  gst_audio_encoder_set_frame_samples_min (encoder,
      gst_util_uint64_scale_ceil (OMX_MIN_PCMPAYLOAD_MSEC,
          GST_MSECOND * info->rate, GST_SECOND));
  gst_audio_encoder_set_frame_samples_max (encoder, 0);

  gst_omx_port_get_port_definition (self->enc_in_port, &port_def);

  needs_disable =
      gst_omx_component_get_state (self->enc,
      GST_CLOCK_TIME_NONE) != OMX_StateLoaded;
  /* If the component is not in Loaded state and a real format change happens
   * we have to disable the port and re-allocate all buffers. If no real
   * format change happened we can just exit here.
   */
  if (needs_disable) {
    GST_DEBUG_OBJECT (self, "Need to disable and drain encoder");
    gst_omx_audio_enc_drain (self);
    gst_omx_port_set_flushing (self->enc_out_port, 5 * GST_SECOND, TRUE);

    /* Wait until the srcpad loop is finished,
     * unlock GST_AUDIO_ENCODER_STREAM_LOCK to prevent deadlocks
     * caused by using this lock from inside the loop function */
    GST_AUDIO_ENCODER_STREAM_UNLOCK (self);
    gst_pad_stop_task (GST_AUDIO_ENCODER_SRC_PAD (encoder));
    GST_AUDIO_ENCODER_STREAM_LOCK (self);

    if (gst_omx_port_set_enabled (self->enc_in_port, FALSE) != OMX_ErrorNone)
      return FALSE;
    if (gst_omx_port_set_enabled (self->enc_out_port, FALSE) != OMX_ErrorNone)
      return FALSE;
    if (gst_omx_port_wait_buffers_released (self->enc_in_port,
            5 * GST_SECOND) != OMX_ErrorNone)
      return FALSE;
    if (gst_omx_port_wait_buffers_released (self->enc_out_port,
            1 * GST_SECOND) != OMX_ErrorNone)
      return FALSE;
    if (gst_omx_port_deallocate_buffers (self->enc_in_port) != OMX_ErrorNone)
      return FALSE;
    if (gst_omx_port_deallocate_buffers (self->enc_out_port) != OMX_ErrorNone)
      return FALSE;
    if (gst_omx_port_wait_enabled (self->enc_in_port,
            1 * GST_SECOND) != OMX_ErrorNone)
      return FALSE;
    if (gst_omx_port_wait_enabled (self->enc_out_port,
            1 * GST_SECOND) != OMX_ErrorNone)
      return FALSE;

    GST_DEBUG_OBJECT (self, "Encoder drained and disabled");
  }

  port_def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
  GST_DEBUG_OBJECT (self, "Setting inport port definition");
  if (gst_omx_port_update_port_definition (self->enc_in_port,
          &port_def) != OMX_ErrorNone)
    return FALSE;

  GST_OMX_INIT_STRUCT (&pcm_param);
  pcm_param.nPortIndex = self->enc_in_port->index;
  pcm_param.nChannels = info->channels;
  pcm_param.eNumData =
      ((info->finfo->flags & GST_AUDIO_FORMAT_FLAG_SIGNED) ?
      OMX_NumericalDataSigned : OMX_NumericalDataUnsigned);
  pcm_param.eEndian =
      ((info->finfo->endianness == G_LITTLE_ENDIAN) ?
      OMX_EndianLittle : OMX_EndianBig);
  pcm_param.bInterleaved = OMX_TRUE;
  pcm_param.nBitPerSample = info->finfo->width;
  pcm_param.nSamplingRate = info->rate;
  pcm_param.ePCMMode = OMX_AUDIO_PCMModeLinear;

  for (i = 0; i < pcm_param.nChannels; i++) {
    OMX_AUDIO_CHANNELTYPE pos;

    switch (info->position[i]) {
      case GST_AUDIO_CHANNEL_POSITION_MONO:
      case GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER:
        pos = OMX_AUDIO_ChannelCF;
        break;
      case GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT:
        pos = OMX_AUDIO_ChannelLF;
        break;
      case GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT:
        pos = OMX_AUDIO_ChannelRF;
        break;
      case GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT:
        pos = OMX_AUDIO_ChannelLS;
        break;
      case GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT:
        pos = OMX_AUDIO_ChannelRS;
        break;
      case GST_AUDIO_CHANNEL_POSITION_LFE1:
        pos = OMX_AUDIO_ChannelLFE;
        break;
      case GST_AUDIO_CHANNEL_POSITION_REAR_CENTER:
        pos = OMX_AUDIO_ChannelCS;
        break;
      case GST_AUDIO_CHANNEL_POSITION_REAR_LEFT:
        pos = OMX_AUDIO_ChannelLR;
        break;
      case GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT:
        pos = OMX_AUDIO_ChannelRR;
        break;
      default:
        pos = OMX_AUDIO_ChannelNone;
        break;
    }
    pcm_param.eChannelMapping[i] = pos;
  }

  GST_DEBUG_OBJECT (self, "Setting PCM parameters");
  err =
      gst_omx_component_set_parameter (self->enc, OMX_IndexParamAudioPcm,
      &pcm_param);
  if (err != OMX_ErrorNone) {
    GST_ERROR_OBJECT (self, "Failed to set PCM parameters: %s (0x%08x)",
        gst_omx_error_to_string (err), err);
    return FALSE;
  }

  if (klass->set_format) {
    if (!klass->set_format (self, self->enc_in_port, info)) {
      GST_ERROR_OBJECT (self, "Subclass failed to set the new format");
      return FALSE;
    }
  }

  GST_DEBUG_OBJECT (self, "Updating outport port definition");
  if (gst_omx_port_update_port_definition (self->enc_out_port,
          NULL) != OMX_ErrorNone)
    return FALSE;

  GST_DEBUG_OBJECT (self, "Enabling component");
  if (needs_disable) {
    if (gst_omx_port_set_enabled (self->enc_in_port, TRUE) != OMX_ErrorNone)
      return FALSE;
    if (gst_omx_port_allocate_buffers (self->enc_in_port) != OMX_ErrorNone)
      return FALSE;
    if (gst_omx_port_wait_enabled (self->enc_in_port,
            5 * GST_SECOND) != OMX_ErrorNone)
      return FALSE;
    if (gst_omx_port_mark_reconfigured (self->enc_in_port) != OMX_ErrorNone)
      return FALSE;
  } else {
    /* Disable output port */
    if (gst_omx_port_set_enabled (self->enc_out_port, FALSE) != OMX_ErrorNone)
      return FALSE;

    if (gst_omx_port_wait_enabled (self->enc_out_port,
            1 * GST_SECOND) != OMX_ErrorNone)
      return FALSE;

    if (gst_omx_component_set_state (self->enc, OMX_StateIdle) != OMX_ErrorNone)
      return FALSE;

    /* Need to allocate buffers to reach Idle state */
    if (gst_omx_port_allocate_buffers (self->enc_in_port) != OMX_ErrorNone)
      return FALSE;

    if (gst_omx_component_get_state (self->enc,
            GST_CLOCK_TIME_NONE) != OMX_StateIdle)
      return FALSE;

    if (gst_omx_component_set_state (self->enc,
            OMX_StateExecuting) != OMX_ErrorNone)
      return FALSE;

    if (gst_omx_component_get_state (self->enc,
            GST_CLOCK_TIME_NONE) != OMX_StateExecuting)
      return FALSE;
  }

  /* Unset flushing to allow ports to accept data again */
  gst_omx_port_set_flushing (self->enc_in_port, 5 * GST_SECOND, FALSE);
  gst_omx_port_set_flushing (self->enc_out_port, 5 * GST_SECOND, FALSE);

  if (gst_omx_component_get_last_error (self->enc) != OMX_ErrorNone) {
    GST_ERROR_OBJECT (self, "Component in error state: %s (0x%08x)",
        gst_omx_component_get_last_error_string (self->enc),
        gst_omx_component_get_last_error (self->enc));
    return FALSE;
  }

  /* Start the srcpad loop again */
  GST_DEBUG_OBJECT (self, "Starting task again");
  self->downstream_flow_ret = GST_FLOW_OK;
  gst_pad_start_task (GST_AUDIO_ENCODER_SRC_PAD (self),
      (GstTaskFunction) gst_omx_audio_enc_loop, encoder, NULL);

  return TRUE;
}
/* Called with the object lock for both the element and pad held,
 * as well as the aagg lock
 */
static gboolean
gst_audio_aggregator_fill_buffer (GstAudioAggregator * aagg,
    GstAudioAggregatorPad * pad, GstBuffer * inbuf)
{
  GstClockTime start_time, end_time;
  gboolean discont = FALSE;
  guint64 start_offset, end_offset;
  gint rate, bpf;

  GstAggregator *agg = GST_AGGREGATOR (aagg);
  GstAggregatorPad *aggpad = GST_AGGREGATOR_PAD (pad);

  g_assert (pad->priv->buffer == NULL);

  rate = GST_AUDIO_INFO_RATE (&pad->info);
  bpf = GST_AUDIO_INFO_BPF (&pad->info);

  pad->priv->position = 0;
  pad->priv->size = gst_buffer_get_size (inbuf) / bpf;

  if (!GST_BUFFER_PTS_IS_VALID (inbuf)) {
    if (pad->priv->output_offset == -1)
      pad->priv->output_offset = aagg->priv->offset;
    if (pad->priv->next_offset == -1)
      pad->priv->next_offset = pad->priv->size;
    else
      pad->priv->next_offset += pad->priv->size;
    goto done;
  }

  start_time = GST_BUFFER_PTS (inbuf);
  end_time =
      start_time + gst_util_uint64_scale_ceil (pad->priv->size, GST_SECOND,
      rate);

  /* Clipping should've ensured this */
  g_assert (start_time >= aggpad->segment.start);

  start_offset =
      gst_util_uint64_scale (start_time - aggpad->segment.start, rate,
      GST_SECOND);
  end_offset = start_offset + pad->priv->size;

  if (GST_BUFFER_IS_DISCONT (inbuf)
      || GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_RESYNC)
      || pad->priv->new_segment || pad->priv->next_offset == -1) {
    discont = TRUE;
    pad->priv->new_segment = FALSE;
  } else {
    guint64 diff, max_sample_diff;

    /* Check discont, based on audiobasesink */
    if (start_offset <= pad->priv->next_offset)
      diff = pad->priv->next_offset - start_offset;
    else
      diff = start_offset - pad->priv->next_offset;

    max_sample_diff =
        gst_util_uint64_scale_int (aagg->priv->alignment_threshold, rate,
        GST_SECOND);

    /* Discont! */
    if (G_UNLIKELY (diff >= max_sample_diff)) {
      if (aagg->priv->discont_wait > 0) {
        if (pad->priv->discont_time == GST_CLOCK_TIME_NONE) {
          pad->priv->discont_time = start_time;
        } else if (start_time - pad->priv->discont_time >=
            aagg->priv->discont_wait) {
          discont = TRUE;
          pad->priv->discont_time = GST_CLOCK_TIME_NONE;
        }
      } else {
        discont = TRUE;
      }
    } else if (G_UNLIKELY (pad->priv->discont_time != GST_CLOCK_TIME_NONE)) {
      /* we have had a discont, but are now back on track! */
      pad->priv->discont_time = GST_CLOCK_TIME_NONE;
    }
  }

  if (discont) {
    /* Have discont, need resync */
    if (pad->priv->next_offset != -1)
      GST_INFO_OBJECT (pad, "Have discont. Expected %"
          G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT,
          pad->priv->next_offset, start_offset);
    pad->priv->output_offset = -1;
    pad->priv->next_offset = end_offset;
  } else {
    pad->priv->next_offset += pad->priv->size;
  }

  if (pad->priv->output_offset == -1) {
    GstClockTime start_running_time;
    GstClockTime end_running_time;
    guint64 start_output_offset;
    guint64 end_output_offset;

    start_running_time =
        gst_segment_to_running_time (&aggpad->segment,
        GST_FORMAT_TIME, start_time);
    end_running_time =
        gst_segment_to_running_time (&aggpad->segment,
        GST_FORMAT_TIME, end_time);

    /* Convert to position in the output segment */
    start_output_offset =
        gst_segment_to_position (&agg->segment, GST_FORMAT_TIME,
        start_running_time);
    if (start_output_offset != -1)
      start_output_offset =
          gst_util_uint64_scale (start_output_offset - agg->segment.start, rate,
          GST_SECOND);

    end_output_offset =
        gst_segment_to_position (&agg->segment, GST_FORMAT_TIME,
        end_running_time);
    if (end_output_offset != -1)
      end_output_offset =
          gst_util_uint64_scale (end_output_offset - agg->segment.start, rate,
          GST_SECOND);

    if (start_output_offset == -1 && end_output_offset == -1) {
      /* Outside output segment, drop */
      gst_buffer_unref (inbuf);
      pad->priv->buffer = NULL;
      pad->priv->position = 0;
      pad->priv->size = 0;
      pad->priv->output_offset = -1;
      GST_DEBUG_OBJECT (pad, "Buffer outside output segment");
      return FALSE;
    }

    /* Calculate end_output_offset if it was outside the output segment */
    if (end_output_offset == -1)
      end_output_offset = start_output_offset + pad->priv->size;

    if (end_output_offset < aagg->priv->offset) {
      /* Before output segment, drop */
      gst_buffer_unref (inbuf);
      pad->priv->buffer = NULL;
      pad->priv->position = 0;
      pad->priv->size = 0;
      pad->priv->output_offset = -1;
      GST_DEBUG_OBJECT (pad,
          "Buffer before segment or current position: %" G_GUINT64_FORMAT " < %"
          G_GINT64_FORMAT, end_output_offset, aagg->priv->offset);
      return FALSE;
    }

    if (start_output_offset == -1 || start_output_offset < aagg->priv->offset) {
      guint diff;

      if (start_output_offset == -1 && end_output_offset < pad->priv->size) {
        diff = pad->priv->size - end_output_offset + aagg->priv->offset;
      } else if (start_output_offset == -1) {
        start_output_offset = end_output_offset - pad->priv->size;

        if (start_output_offset < aagg->priv->offset)
          diff = aagg->priv->offset - start_output_offset;
        else
          diff = 0;
      } else {
        diff = aagg->priv->offset - start_output_offset;
      }

      pad->priv->position += diff;
      if (pad->priv->position >= pad->priv->size) {
        /* Empty buffer, drop */
        gst_buffer_unref (inbuf);
        pad->priv->buffer = NULL;
        pad->priv->position = 0;
        pad->priv->size = 0;
        pad->priv->output_offset = -1;
        GST_DEBUG_OBJECT (pad,
            "Buffer before segment or current position: %" G_GUINT64_FORMAT
            " < %" G_GINT64_FORMAT, end_output_offset, aagg->priv->offset);
        return FALSE;
      }
    }

    if (start_output_offset == -1 || start_output_offset < aagg->priv->offset)
      pad->priv->output_offset = aagg->priv->offset;
    else
      pad->priv->output_offset = start_output_offset;

    GST_DEBUG_OBJECT (pad,
        "Buffer resynced: Pad offset %" G_GUINT64_FORMAT
        ", current audio aggregator offset %" G_GINT64_FORMAT,
        pad->priv->output_offset, aagg->priv->offset);
  }

done:

  GST_LOG_OBJECT (pad,
      "Queued new buffer at offset %" G_GUINT64_FORMAT,
      pad->priv->output_offset);
  pad->priv->buffer = inbuf;

  return TRUE;
}
Beispiel #8
0
/* This function is used to perform seeks on the element in
 * pull mode.
 *
 * It also works when event is NULL, in which case it will just
 * start from the last configured segment. This technique is
 * used when activating the element and to perform the seek in
 * READY.
 */
static gboolean
gst_aiff_parse_perform_seek (GstAiffParse * aiff, GstEvent * event)
{
  gboolean res;
  gdouble rate;
  GstFormat format, bformat;
  GstSeekFlags flags;
  GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
  gint64 cur, stop, upstream_size;
  gboolean flush;
  gboolean update;
  GstSegment seeksegment = { 0, };
  gint64 last_stop;

  if (event) {
    GST_DEBUG_OBJECT (aiff, "doing seek with event");

    gst_event_parse_seek (event, &rate, &format, &flags,
        &cur_type, &cur, &stop_type, &stop);

    /* no negative rates yet */
    if (rate < 0.0)
      goto negative_rate;

    if (format != aiff->segment.format) {
      GST_INFO_OBJECT (aiff, "converting seek-event from %s to %s",
          gst_format_get_name (format),
          gst_format_get_name (aiff->segment.format));
      res = TRUE;
      if (cur_type != GST_SEEK_TYPE_NONE)
        res =
            gst_pad_query_convert (aiff->srcpad, format, cur,
            &aiff->segment.format, &cur);
      if (res && stop_type != GST_SEEK_TYPE_NONE)
        res =
            gst_pad_query_convert (aiff->srcpad, format, stop,
            &aiff->segment.format, &stop);
      if (!res)
        goto no_format;

      format = aiff->segment.format;
    }
  } else {
    GST_DEBUG_OBJECT (aiff, "doing seek without event");
    flags = 0;
    rate = 1.0;
    cur_type = GST_SEEK_TYPE_SET;
    stop_type = GST_SEEK_TYPE_SET;
  }

  /* get flush flag */
  flush = flags & GST_SEEK_FLAG_FLUSH;

  /* now we need to make sure the streaming thread is stopped. We do this by
   * either sending a FLUSH_START event downstream which will cause the
   * streaming thread to stop with a WRONG_STATE.
   * For a non-flushing seek we simply pause the task, which will happen as soon
   * as it completes one iteration (and thus might block when the sink is
   * blocking in preroll). */
  if (flush) {
    GST_DEBUG_OBJECT (aiff, "sending flush start");
    gst_pad_push_event (aiff->srcpad, gst_event_new_flush_start ());
  } else {
    gst_pad_pause_task (aiff->sinkpad);
  }

  /* we should now be able to grab the streaming thread because we stopped it
   * with the above flush/pause code */
  GST_PAD_STREAM_LOCK (aiff->sinkpad);

  /* save current position */
  last_stop = aiff->segment.last_stop;

  GST_DEBUG_OBJECT (aiff, "stopped streaming at %" G_GINT64_FORMAT, last_stop);

  /* copy segment, we need this because we still need the old
   * segment when we close the current segment. */
  memcpy (&seeksegment, &aiff->segment, sizeof (GstSegment));

  /* configure the seek parameters in the seeksegment. We will then have the
   * right values in the segment to perform the seek */
  if (event) {
    GST_DEBUG_OBJECT (aiff, "configuring seek");
    gst_segment_set_seek (&seeksegment, rate, format, flags,
        cur_type, cur, stop_type, stop, &update);
  }

  /* figure out the last position we need to play. If it's configured (stop !=
   * -1), use that, else we play until the total duration of the file */
  if ((stop = seeksegment.stop) == -1)
    stop = seeksegment.duration;

  GST_DEBUG_OBJECT (aiff, "cur_type =%d", cur_type);
  if ((cur_type != GST_SEEK_TYPE_NONE)) {
    /* bring offset to bytes, if the bps is 0, we have the segment in BYTES and
     * we can just copy the last_stop. If not, we use the bps to convert TIME to
     * bytes. */
    if (aiff->bps > 0)
      aiff->offset =
          gst_util_uint64_scale_ceil (seeksegment.last_stop,
          (guint64) aiff->bps, GST_SECOND);
    else
      aiff->offset = seeksegment.last_stop;
    GST_LOG_OBJECT (aiff, "offset=%" G_GUINT64_FORMAT, aiff->offset);
    aiff->offset -= (aiff->offset % aiff->bytes_per_sample);
    GST_LOG_OBJECT (aiff, "offset=%" G_GUINT64_FORMAT, aiff->offset);
    aiff->offset += aiff->datastart;
    GST_LOG_OBJECT (aiff, "offset=%" G_GUINT64_FORMAT, aiff->offset);
  } else {
    GST_LOG_OBJECT (aiff, "continue from offset=%" G_GUINT64_FORMAT,
        aiff->offset);
  }

  if (stop_type != GST_SEEK_TYPE_NONE) {
    if (aiff->bps > 0)
      aiff->end_offset =
          gst_util_uint64_scale_ceil (stop, (guint64) aiff->bps, GST_SECOND);
    else
      aiff->end_offset = stop;
    GST_LOG_OBJECT (aiff, "end_offset=%" G_GUINT64_FORMAT, aiff->end_offset);
    aiff->end_offset -= (aiff->end_offset % aiff->bytes_per_sample);
    GST_LOG_OBJECT (aiff, "end_offset=%" G_GUINT64_FORMAT, aiff->end_offset);
    aiff->end_offset += aiff->datastart;
    GST_LOG_OBJECT (aiff, "end_offset=%" G_GUINT64_FORMAT, aiff->end_offset);
  } else {
    GST_LOG_OBJECT (aiff, "continue to end_offset=%" G_GUINT64_FORMAT,
        aiff->end_offset);
  }

  /* make sure filesize is not exceeded due to rounding errors or so,
   * same precaution as in _stream_headers */
  bformat = GST_FORMAT_BYTES;
  if (gst_pad_query_peer_duration (aiff->sinkpad, &bformat, &upstream_size))
    aiff->end_offset = MIN (aiff->end_offset, upstream_size);

  /* this is the range of bytes we will use for playback */
  aiff->offset = MIN (aiff->offset, aiff->end_offset);
  aiff->dataleft = aiff->end_offset - aiff->offset;

  GST_DEBUG_OBJECT (aiff,
      "seek: rate %lf, offset %" G_GUINT64_FORMAT ", end %" G_GUINT64_FORMAT
      ", segment %" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT, rate, aiff->offset,
      aiff->end_offset, GST_TIME_ARGS (seeksegment.start),
      GST_TIME_ARGS (stop));

  /* prepare for streaming again */
  if (flush) {
    /* if we sent a FLUSH_START, we now send a FLUSH_STOP */
    GST_DEBUG_OBJECT (aiff, "sending flush stop");
    gst_pad_push_event (aiff->srcpad, gst_event_new_flush_stop ());
  } else if (aiff->segment_running) {
    /* we are running the current segment and doing a non-flushing seek,
     * close the segment first based on the previous last_stop. */
    GST_DEBUG_OBJECT (aiff, "closing running segment %" G_GINT64_FORMAT
        " to %" G_GINT64_FORMAT, aiff->segment.accum, aiff->segment.last_stop);

    /* queue the segment for sending in the stream thread */
    if (aiff->close_segment)
      gst_event_unref (aiff->close_segment);
    aiff->close_segment = gst_event_new_new_segment (TRUE,
        aiff->segment.rate, aiff->segment.format,
        aiff->segment.accum, aiff->segment.last_stop, aiff->segment.accum);

    /* keep track of our last_stop */
    seeksegment.accum = aiff->segment.last_stop;
  }

  /* now we did the seek and can activate the new segment values */
  memcpy (&aiff->segment, &seeksegment, sizeof (GstSegment));

  /* if we're doing a segment seek, post a SEGMENT_START message */
  if (aiff->segment.flags & GST_SEEK_FLAG_SEGMENT) {
    gst_element_post_message (GST_ELEMENT_CAST (aiff),
        gst_message_new_segment_start (GST_OBJECT_CAST (aiff),
            aiff->segment.format, aiff->segment.last_stop));
  }

  /* now create the newsegment */
  GST_DEBUG_OBJECT (aiff, "Creating newsegment from %" G_GINT64_FORMAT
      " to %" G_GINT64_FORMAT, aiff->segment.last_stop, stop);

  /* store the newsegment event so it can be sent from the streaming thread. */
  if (aiff->start_segment)
    gst_event_unref (aiff->start_segment);
  aiff->start_segment =
      gst_event_new_new_segment (FALSE, aiff->segment.rate,
      aiff->segment.format, aiff->segment.last_stop, stop,
      aiff->segment.last_stop);

  /* mark discont if we are going to stream from another position. */
  if (last_stop != aiff->segment.last_stop) {
    GST_DEBUG_OBJECT (aiff, "mark DISCONT, we did a seek to another position");
    aiff->discont = TRUE;
  }

  /* and start the streaming task again */
  aiff->segment_running = TRUE;
  if (!aiff->streaming) {
    gst_pad_start_task (aiff->sinkpad, (GstTaskFunction) gst_aiff_parse_loop,
        aiff->sinkpad);
  }

  GST_PAD_STREAM_UNLOCK (aiff->sinkpad);

  return TRUE;

  /* ERRORS */
negative_rate:
  {
    GST_DEBUG_OBJECT (aiff, "negative playback rates are not supported yet.");
    return FALSE;
  }
no_format:
  {
    GST_DEBUG_OBJECT (aiff, "unsupported format given, seek aborted.");
    return FALSE;
  }
}
Beispiel #9
0
static gboolean
gst_aiff_parse_pad_convert (GstPad * pad,
    GstFormat src_format, gint64 src_value,
    GstFormat * dest_format, gint64 * dest_value)
{
  GstAiffParse *aiffparse;
  gboolean res = TRUE;

  aiffparse = GST_AIFF_PARSE (GST_PAD_PARENT (pad));

  if (*dest_format == src_format) {
    *dest_value = src_value;
    return TRUE;
  }

  if (aiffparse->bytes_per_sample <= 0)
    return FALSE;

  GST_INFO_OBJECT (aiffparse, "converting value from %s to %s",
      gst_format_get_name (src_format), gst_format_get_name (*dest_format));

  switch (src_format) {
    case GST_FORMAT_BYTES:
      switch (*dest_format) {
        case GST_FORMAT_DEFAULT:
          *dest_value = src_value / aiffparse->bytes_per_sample;
          break;
        case GST_FORMAT_TIME:
          if (aiffparse->bps > 0) {
            *dest_value = gst_util_uint64_scale_ceil (src_value, GST_SECOND,
                (guint64) aiffparse->bps);
            break;
          }
          /* Else fallthrough */
        default:
          res = FALSE;
          goto done;
      }
      break;

    case GST_FORMAT_DEFAULT:
      switch (*dest_format) {
        case GST_FORMAT_BYTES:
          *dest_value = src_value * aiffparse->bytes_per_sample;
          break;
        case GST_FORMAT_TIME:
          *dest_value = gst_util_uint64_scale (src_value, GST_SECOND,
              (guint64) aiffparse->rate);
          break;
        default:
          res = FALSE;
          goto done;
      }
      break;

    case GST_FORMAT_TIME:
      switch (*dest_format) {
        case GST_FORMAT_BYTES:
          if (aiffparse->bps > 0) {
            *dest_value = gst_util_uint64_scale (src_value,
                (guint64) aiffparse->bps, GST_SECOND);
            break;
          }
          /* Else fallthrough */
          break;
        case GST_FORMAT_DEFAULT:
          *dest_value = gst_util_uint64_scale (src_value,
              (guint64) aiffparse->rate, GST_SECOND);
          break;
        default:
          res = FALSE;
          goto done;
      }
      break;

    default:
      res = FALSE;
      goto done;
  }

done:
  return res;

}
Beispiel #10
0
static GstFlowReturn
gst_aiff_parse_stream_data (GstAiffParse * aiff)
{
  GstBuffer *buf = NULL;
  GstFlowReturn res = GST_FLOW_OK;
  guint64 desired, obtained;
  GstClockTime timestamp, next_timestamp, duration;
  guint64 pos, nextpos;

iterate_adapter:
  GST_LOG_OBJECT (aiff,
      "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT " , dataleft: %"
      G_GINT64_FORMAT, aiff->offset, aiff->end_offset, aiff->dataleft);

  /* Get the next n bytes and output them */
  if (aiff->dataleft == 0 || aiff->dataleft < aiff->bytes_per_sample)
    goto found_eos;

  /* scale the amount of data by the segment rate so we get equal
   * amounts of data regardless of the playback rate */
  desired =
      MIN (gst_guint64_to_gdouble (aiff->dataleft),
      MAX_BUFFER_SIZE * aiff->segment.abs_rate);

  if (desired >= aiff->bytes_per_sample && aiff->bytes_per_sample > 0)
    desired -= (desired % aiff->bytes_per_sample);

  GST_LOG_OBJECT (aiff, "Fetching %" G_GINT64_FORMAT " bytes of data "
      "from the sinkpad", desired);

  if (aiff->streaming) {
    guint avail = gst_adapter_available (aiff->adapter);

    if (avail < desired) {
      GST_LOG_OBJECT (aiff, "Got only %d bytes of data from the sinkpad",
          avail);
      return GST_FLOW_OK;
    }

    buf = gst_adapter_take_buffer (aiff->adapter, desired);
  } else {
    if ((res = gst_pad_pull_range (aiff->sinkpad, aiff->offset,
                desired, &buf)) != GST_FLOW_OK)
      goto pull_error;
  }

  /* If we have a pending close/start segment, send it now. */
  if (G_UNLIKELY (aiff->close_segment != NULL)) {
    gst_pad_push_event (aiff->srcpad, aiff->close_segment);
    aiff->close_segment = NULL;
  }
  if (G_UNLIKELY (aiff->start_segment != NULL)) {
    gst_pad_push_event (aiff->srcpad, aiff->start_segment);
    aiff->start_segment = NULL;
  }
  if (G_UNLIKELY (aiff->tags != NULL)) {
    gst_element_found_tags_for_pad (GST_ELEMENT_CAST (aiff), aiff->srcpad,
        aiff->tags);
    aiff->tags = NULL;
  }

  obtained = GST_BUFFER_SIZE (buf);

  /* our positions in bytes */
  pos = aiff->offset - aiff->datastart;
  nextpos = pos + obtained;

  /* update offsets, does not overflow. */
  GST_BUFFER_OFFSET (buf) = pos / aiff->bytes_per_sample;
  GST_BUFFER_OFFSET_END (buf) = nextpos / aiff->bytes_per_sample;

  if (aiff->bps > 0) {
    /* and timestamps if we have a bitrate, be careful for overflows */
    timestamp =
        gst_util_uint64_scale_ceil (pos, GST_SECOND, (guint64) aiff->bps);
    next_timestamp =
        gst_util_uint64_scale_ceil (nextpos, GST_SECOND, (guint64) aiff->bps);
    duration = next_timestamp - timestamp;

    /* update current running segment position */
    gst_segment_set_last_stop (&aiff->segment, GST_FORMAT_TIME, next_timestamp);
  } else {
    /* no bitrate, all we know is that the first sample has timestamp 0, all
     * other positions and durations have unknown timestamp. */
    if (pos == 0)
      timestamp = 0;
    else
      timestamp = GST_CLOCK_TIME_NONE;
    duration = GST_CLOCK_TIME_NONE;
    /* update current running segment position with byte offset */
    gst_segment_set_last_stop (&aiff->segment, GST_FORMAT_BYTES, nextpos);
  }
  if (aiff->discont) {
    GST_DEBUG_OBJECT (aiff, "marking DISCONT");
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
    aiff->discont = FALSE;
  }

  GST_BUFFER_TIMESTAMP (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;
  gst_buffer_set_caps (buf, aiff->caps);

  GST_LOG_OBJECT (aiff,
      "Got buffer. timestamp:%" GST_TIME_FORMAT " , duration:%" GST_TIME_FORMAT
      ", size:%u", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration),
      GST_BUFFER_SIZE (buf));

  if ((res = gst_pad_push (aiff->srcpad, buf)) != GST_FLOW_OK)
    goto push_error;

  if (obtained < aiff->dataleft) {
    aiff->offset += obtained;
    aiff->dataleft -= obtained;
  } else {
    aiff->offset += aiff->dataleft;
    aiff->dataleft = 0;
  }

  /* Iterate until need more data, so adapter size won't grow */
  if (aiff->streaming) {
    GST_LOG_OBJECT (aiff,
        "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT, aiff->offset,
        aiff->end_offset);
    goto iterate_adapter;
  }
  return res;

  /* ERROR */
found_eos:
  {
    GST_DEBUG_OBJECT (aiff, "found EOS");
    return GST_FLOW_UNEXPECTED;
  }
pull_error:
  {
    /* check if we got EOS */
    if (res == GST_FLOW_UNEXPECTED)
      goto found_eos;

    GST_WARNING_OBJECT (aiff,
        "Error getting %" G_GINT64_FORMAT " bytes from the "
        "sinkpad (dataleft = %" G_GINT64_FORMAT ")", desired, aiff->dataleft);
    return res;
  }
push_error:
  {
    GST_INFO_OBJECT (aiff,
        "Error pushing on srcpad %s:%s, reason %s, is linked? = %d",
        GST_DEBUG_PAD_NAME (aiff->srcpad), gst_flow_get_name (res),
        gst_pad_is_linked (aiff->srcpad));
    return res;
  }
}