Example #1
0
static gint
process_buffer (GstDtlsDec * self, GstBuffer * buffer)
{
  GstMapInfo map_info;
  gint size;

  if (!gst_buffer_map (buffer, &map_info, GST_MAP_READWRITE))
    return 0;

  if (!map_info.size) {
    gst_buffer_unmap (buffer, &map_info);
    return 0;
  }

  size =
      gst_dtls_connection_process (self->connection, map_info.data,
      map_info.size);
  gst_buffer_unmap (buffer, &map_info);

  if (size <= 0)
    return size;

  gst_buffer_set_size (buffer, size);

  return size;
}
static GstFlowReturn
gst_two_lame_flush_full (GstTwoLame * lame, gboolean push)
{
  GstBuffer *buf;
  GstMapInfo map;
  gint size;
  GstFlowReturn result = GST_FLOW_OK;

  if (!lame->glopts)
    return GST_FLOW_OK;

  buf = gst_buffer_new_and_alloc (16384);
  gst_buffer_map (buf, &map, GST_MAP_WRITE);
  size = twolame_encode_flush (lame->glopts, map.data, 16384);
  gst_buffer_unmap (buf, &map);

  if (size > 0 && push) {
    gst_buffer_set_size (buf, size);
    GST_DEBUG_OBJECT (lame, "pushing final packet of %u bytes", size);
    result = gst_audio_encoder_finish_frame (GST_AUDIO_ENCODER (lame), buf, -1);
  } else {
    GST_DEBUG_OBJECT (lame, "no final packet (size=%d, push=%d)", size, push);
    gst_buffer_unref (buf);
    result = GST_FLOW_OK;
  }
  return result;
}
void GStreamerReader::ReadAndPushData(guint aLength)
{
  int64_t offset1 = mResource.Tell();
  unused << offset1;
  nsresult rv = NS_OK;

  GstBuffer* buffer = gst_buffer_new_and_alloc(aLength);
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_WRITE);
  guint8 *data = info.data;
#else
  guint8* data = GST_BUFFER_DATA(buffer);
#endif
  uint32_t size = 0, bytesRead = 0;
  while(bytesRead < aLength) {
    rv = mResource.Read(reinterpret_cast<char*>(data + bytesRead),
                        aLength - bytesRead, &size);
    if (NS_FAILED(rv) || size == 0)
      break;

    bytesRead += size;
  }

  int64_t offset2 = mResource.Tell();
  unused << offset2;

#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
  gst_buffer_set_size(buffer, bytesRead);
#else
  GST_BUFFER_SIZE(buffer) = bytesRead;
#endif

  GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer));
  if (ret != GST_FLOW_OK) {
    LOG(LogLevel::Error, "ReadAndPushData push ret %s(%d)", gst_flow_get_name(ret), ret);
  }

  if (NS_FAILED(rv)) {
    /* Terminate the stream if there is an error in reading */
    LOG(LogLevel::Error, "ReadAndPushData read error, rv=%x", rv);
    gst_app_src_end_of_stream(mSource);
  } else if (bytesRead < aLength) {
    /* If we read less than what we wanted, we reached the end */
    LOG(LogLevel::Warning, "ReadAndPushData read underflow, "
        "bytesRead=%u, aLength=%u, offset(%lld,%lld)",
        bytesRead, aLength, offset1, offset2);
    gst_app_src_end_of_stream(mSource);
  }

  gst_buffer_unref(buffer);

  /* Ensure offset change is consistent in this function.
   * If there are other stream operations on another thread at the same time,
   * it will disturb the GStreamer state machine.
   */
  MOZ_ASSERT(offset1 + bytesRead == offset2);
}
void setGstBufferSize(GstBuffer* buffer, int newSize)
{
#ifdef GST_API_VERSION_1
    gst_buffer_set_size(buffer, static_cast<gssize>(newSize));
#else
    GST_BUFFER_SIZE(buffer) = static_cast<gsize>(newSize);
#endif
}
static gint
gst_neonhttp_src_request_dispatch (GstNeonhttpSrc * src, GstBuffer * outbuf)
{
    GstMapInfo map = GST_MAP_INFO_INIT;
    gint ret;
    gint read = 0;
    gint sizetoread;

    /* Loop sending the request:
     * Retry whilst authentication fails and we supply it. */

    ssize_t len = 0;

    if (!gst_buffer_map (outbuf, &map, GST_MAP_WRITE))
        return -1;

    sizetoread = map.size;

    while (sizetoread > 0) {
        len = ne_read_response_block (src->request, (gchar *) map.data + read,
                                      sizetoread);
        if (len > 0) {
            read += len;
            sizetoread -= len;
        } else {
            break;
        }

    }

    gst_buffer_set_size (outbuf, read);
    GST_BUFFER_OFFSET (outbuf) = src->read_position;

    if (len < 0) {
        read = -2;
        goto done;
    } else if (len == 0) {
        ret = ne_end_request (src->request);
        if (ret != NE_RETRY) {
            if (ret == NE_OK) {
                src->eos = TRUE;
            } else {
                read = -3;
            }
        }
        goto done;
    }

    if (read > 0)
        src->read_position += read;

done:

    gst_buffer_unmap (outbuf, &map);

    return read;
}
/**
 * gst_ssa_parse_push_line:
 * @parse: caller element
 * @txt: text to push
 * @start: timestamp for the buffer
 * @duration: duration for the buffer
 *
 * Parse the text in a buffer with the given properties and
 * push it to the srcpad of the @parse element
 *
 * Returns: result of the push of the created buffer
 */
static GstFlowReturn
gst_ssa_parse_push_line (GstSsaParse * parse, gchar * txt,
    GstClockTime start, GstClockTime duration)
{
  GstFlowReturn ret;
  GstBuffer *buf;
  gchar *t, *escaped;
  gint num, i, len;

  num = atoi (txt);
  GST_LOG_OBJECT (parse, "Parsing line #%d at %" GST_TIME_FORMAT,
      num, GST_TIME_ARGS (start));

  /* skip all non-text fields before the actual text */
  t = txt;
  for (i = 0; i < 8; ++i) {
    t = strchr (t, ',');
    if (t == NULL)
      return GST_FLOW_ERROR;
    ++t;
  }

  GST_LOG_OBJECT (parse, "Text : %s", t);

  if (gst_ssa_parse_remove_override_codes (parse, t)) {
    GST_LOG_OBJECT (parse, "Clean: %s", t);
  }

  /* we claim to output pango markup, so we must escape the
   * text even if we don't actually use any pango markup yet */
  escaped = g_markup_printf_escaped ("%s", t);

  len = strlen (escaped);

  /* allocate enough for a terminating NUL, but don't include it in buf size */
  buf = gst_buffer_new_and_alloc (len + 1);
  gst_buffer_fill (buf, 0, escaped, len + 1);
  gst_buffer_set_size (buf, len);
  g_free (escaped);

  GST_BUFFER_TIMESTAMP (buf) = start;
  GST_BUFFER_DURATION (buf) = duration;

  GST_LOG_OBJECT (parse, "Pushing buffer with timestamp %" GST_TIME_FORMAT
      " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (start),
      GST_TIME_ARGS (duration));

  ret = gst_pad_push (parse->srcpad, buf);

  if (ret != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (parse, "Push of text '%s' returned flow %s", txt,
        gst_flow_get_name (ret));
  }

  return ret;
}
Example #7
0
static gboolean gst_uade_raw_dec_decode(GstNonstreamAudioDecoder *dec, GstBuffer **buffer, guint *num_samples)
{
	GstBuffer *outbuf;
	GstMapInfo map;
	GstUadeRawDec *uade_raw_dec;
	long num_samples_per_outbuf, num_bytes_per_outbuf, actual_num_samples_read, actual_num_bytes_read;

	uade_raw_dec = GST_UADE_RAW_DEC(dec);

	num_samples_per_outbuf = 1024;
	num_bytes_per_outbuf = num_samples_per_outbuf * (2 * 16 / 8);

	outbuf = gst_nonstream_audio_decoder_allocate_output_buffer(dec, num_bytes_per_outbuf);
	if (G_UNLIKELY(outbuf == NULL))
		return FALSE;

	gst_buffer_map(outbuf, &map, GST_MAP_WRITE);
	actual_num_bytes_read = uade_read(map.data, map.size, uade_raw_dec->state);
	gst_buffer_unmap(outbuf, &map);

	actual_num_samples_read = actual_num_bytes_read / (2 * 16 / 8);

	GST_TRACE_OBJECT(dec, "read %ld byte", actual_num_bytes_read);

	if (actual_num_samples_read > 0)
	{
		if (actual_num_bytes_read != num_bytes_per_outbuf)
			gst_buffer_set_size(outbuf, actual_num_bytes_read);

		*buffer = outbuf;
		*num_samples = actual_num_samples_read;
	}
	else
	{
		if (actual_num_bytes_read == 0)
			GST_INFO_OBJECT(uade_raw_dec, "UADE reached end of song");
		else if (actual_num_bytes_read < 0)
			GST_ERROR_OBJECT(uade_raw_dec, "UADE reported error during playback - shutting down");
		else
			GST_WARNING_OBJECT(uade_raw_dec, "only %ld byte decoded", actual_num_bytes_read);

		gst_buffer_unref(outbuf);
		*buffer = NULL;
		*num_samples = 0;
	}

	return ((*buffer) != NULL);
}
static GstFlowReturn
gst_data_uri_src_create (GstBaseSrc * basesrc, guint64 offset, guint size,
    GstBuffer ** buf)
{
  GstDataURISrc *src = GST_DATA_URI_SRC (basesrc);
  GstFlowReturn ret;

  GST_OBJECT_LOCK (src);

  if (!src->buffer)
    goto no_buffer;

  /* This is only correct because GstBaseSrc already clips size for us to be no
   * larger than the max. available size if a segment at the end is requested */
  if (offset + size > gst_buffer_get_size (src->buffer)) {
    ret = GST_FLOW_EOS;
  } else if (*buf != NULL) {
    GstMapInfo src_info;
    GstMapInfo dest_info;
    gsize fill_size;

    gst_buffer_map (src->buffer, &src_info, GST_MAP_READ);
    gst_buffer_map (*buf, &dest_info, GST_MAP_WRITE);

    fill_size = gst_buffer_fill (*buf, 0, src_info.data + offset, size);

    gst_buffer_unmap (*buf, &dest_info);
    gst_buffer_unmap (src->buffer, &src_info);
    gst_buffer_set_size (*buf, fill_size);
    ret = GST_FLOW_OK;
  } else {
    *buf =
        gst_buffer_copy_region (src->buffer, GST_BUFFER_COPY_ALL, offset, size);
    ret = GST_FLOW_OK;
  }
  GST_OBJECT_UNLOCK (src);

  return ret;

/* ERRORS */
no_buffer:
  {
    GST_OBJECT_UNLOCK (src);
    GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, (NULL), (NULL));
    return GST_FLOW_NOT_NEGOTIATED;
  }
}
static GstFlowReturn sink_chain(GstPad *pad, GstObject *parent, GstBuffer *buffer)
{
    GstErDtlsDec *self = GST_ER_DTLS_DEC(parent);
    GstFlowReturn ret = GST_FLOW_OK;
    GstMapInfo map_info = GST_MAP_INFO_INIT;
    gint size;

    if (!self->agent) {
        gst_buffer_unref(buffer);
        return GST_FLOW_OK;
    }

    GST_DEBUG_OBJECT(self, "received buffer from %s with length %zd",
        self->connection_id, gst_buffer_get_size(buffer));

    gst_buffer_map(buffer, &map_info, GST_MAP_READWRITE);

    if (!map_info.size) {
        gst_buffer_unmap(buffer, &map_info);
        return GST_FLOW_OK;
    }

    size = er_dtls_connection_process(self->connection, map_info.data, map_info.size);
    gst_buffer_unmap(buffer, &map_info);

    if (size <= 0) {
        gst_buffer_unref(buffer);

        return GST_FLOW_OK;
    }

    g_mutex_lock(&self->src_mutex);

    if (self->src) {
        gst_buffer_set_size(buffer, size);
        GST_LOG_OBJECT(self, "decoded buffer with length %d, pushing", size);
        ret = gst_pad_push(self->src, buffer);
    } else {
        GST_LOG_OBJECT(self, "dropped buffer with length %d, not linked", size);
        gst_buffer_unref(buffer);
    }

    g_mutex_unlock(&self->src_mutex);

    return ret;
}
Example #10
0
void GStreamerReader::ReadAndPushData(guint aLength)
{
  MediaResource* resource = mDecoder->GetResource();
  NS_ASSERTION(resource, "Decoder has no media resource");
  nsresult rv = NS_OK;

  GstBuffer* buffer = gst_buffer_new_and_alloc(aLength);
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_WRITE);
  guint8 *data = info.data;
#else
  guint8* data = GST_BUFFER_DATA(buffer);
#endif
  uint32_t size = 0, bytesRead = 0;
  while(bytesRead < aLength) {
    rv = resource->Read(reinterpret_cast<char*>(data + bytesRead),
        aLength - bytesRead, &size);
    if (NS_FAILED(rv) || size == 0)
      break;

    bytesRead += size;
  }

#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
  gst_buffer_set_size(buffer, bytesRead);
#else
  GST_BUFFER_SIZE(buffer) = bytesRead;
#endif

  GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer));
  if (ret != GST_FLOW_OK) {
    LOG(PR_LOG_ERROR, ("ReadAndPushData push ret %s", gst_flow_get_name(ret)));
  }

  if (bytesRead < aLength) {
    /* If we read less than what we wanted, we reached the end */
    gst_app_src_end_of_stream(mSource);
  }

  gst_buffer_unref(buffer);
}
static void
gst_raw_parse_loop (GstElement * element)
{
  GstRawParse *rp = GST_RAW_PARSE (element);
  GstRawParseClass *rp_class = GST_RAW_PARSE_GET_CLASS (rp);
  GstFlowReturn ret;
  GstBuffer *buffer;
  gint size;

  if (!gst_raw_parse_set_src_caps (rp))
    goto no_caps;

  if (rp->start_segment) {
    GST_DEBUG_OBJECT (rp, "sending start segment");
    gst_pad_push_event (rp->srcpad, rp->start_segment);
    rp->start_segment = NULL;
  }

  if (rp_class->multiple_frames_per_buffer && rp->framesize < 4096)
    size = 4096 - (4096 % rp->framesize);
  else
    size = rp->framesize;

  if (rp->segment.rate >= 0) {
    if (rp->offset + size > rp->upstream_length) {
      GstFormat fmt = GST_FORMAT_BYTES;

      if (!gst_pad_peer_query_duration (rp->sinkpad, fmt, &rp->upstream_length)) {
        GST_WARNING_OBJECT (rp,
            "Could not get upstream duration, trying to pull frame by frame");
        size = rp->framesize;
      } else if (rp->upstream_length < rp->offset + rp->framesize) {
        ret = GST_FLOW_EOS;
        goto pause;
      } else if (rp->offset + size > rp->upstream_length) {
        size = rp->upstream_length - rp->offset;
        size -= size % rp->framesize;
      }
    }
  } else {
    if (rp->offset == 0) {
      ret = GST_FLOW_EOS;
      goto pause;
    } else if (rp->offset < size) {
      size -= rp->offset;
    }
    rp->offset -= size;
  }

  buffer = NULL;
  ret = gst_pad_pull_range (rp->sinkpad, rp->offset, size, &buffer);

  if (ret != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (rp, "pull_range (%" G_GINT64_FORMAT ", %u) "
        "failed, flow: %s", rp->offset, size, gst_flow_get_name (ret));
    buffer = NULL;
    goto pause;
  }

  if (gst_buffer_get_size (buffer) < size) {
    GST_DEBUG_OBJECT (rp, "Short read at offset %" G_GINT64_FORMAT
        ", got only %" G_GSIZE_FORMAT " of %u bytes", rp->offset,
        gst_buffer_get_size (buffer), size);

    if (size > rp->framesize) {
      gst_buffer_set_size (buffer, gst_buffer_get_size (buffer) -
          gst_buffer_get_size (buffer) % rp->framesize);
    } else {
      gst_buffer_unref (buffer);
      buffer = NULL;
      ret = GST_FLOW_EOS;
      goto pause;
    }
  }

  ret = gst_raw_parse_push_buffer (rp, buffer);
  if (ret != GST_FLOW_OK)
    goto pause;

  return;

  /* ERRORS */
no_caps:
  {
    GST_ERROR_OBJECT (rp, "could not negotiate caps");
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto pause;
  }
pause:
  {
    const gchar *reason = gst_flow_get_name (ret);

    GST_LOG_OBJECT (rp, "pausing task, reason %s", reason);
    gst_pad_pause_task (rp->sinkpad);

    if (ret == GST_FLOW_EOS) {
      if (rp->segment.flags & GST_SEEK_FLAG_SEGMENT) {
        GstClockTime stop;

        GST_LOG_OBJECT (rp, "Sending segment done");

        if ((stop = rp->segment.stop) == -1)
          stop = rp->segment.duration;

        gst_element_post_message (GST_ELEMENT_CAST (rp),
            gst_message_new_segment_done (GST_OBJECT_CAST (rp),
                rp->segment.format, stop));
        gst_pad_push_event (rp->srcpad,
            gst_event_new_segment_done (rp->segment.format, stop));
      } else {
        GST_LOG_OBJECT (rp, "Sending EOS, at end of stream");
        gst_pad_push_event (rp->srcpad, gst_event_new_eos ());
      }
    } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
      GST_ELEMENT_ERROR (rp, STREAM, FAILED,
          ("Internal data stream error."),
          ("stream stopped, reason %s", reason));
      gst_pad_push_event (rp->srcpad, gst_event_new_eos ());
    }
    return;
  }
}
Example #12
0
static GstFlowReturn
gst_opus_enc_encode (GstOpusEnc * enc, GstBuffer * buf)
{
  guint8 *bdata = NULL, *data, *mdata = NULL;
  gsize bsize, size;
  gsize bytes = enc->frame_samples * enc->n_channels * 2;
  gint ret = GST_FLOW_OK;
  GstMapInfo map;
  GstMapInfo omap;
  gint outsize;
  GstBuffer *outbuf;

  g_mutex_lock (enc->property_lock);

  if (G_LIKELY (buf)) {
    gst_buffer_map (buf, &map, GST_MAP_READ);
    bdata = map.data;
    bsize = map.size;

    if (G_UNLIKELY (bsize % bytes)) {
      GST_DEBUG_OBJECT (enc, "draining; adding silence samples");

      size = ((bsize / bytes) + 1) * bytes;
      mdata = g_malloc0 (size);
      memcpy (mdata, bdata, bsize);
      data = mdata;
    } else {
      data = bdata;
      size = bsize;
    }
  } else {
    GST_DEBUG_OBJECT (enc, "nothing to drain");
    goto done;
  }

  g_assert (size == bytes);

  outbuf = gst_buffer_new_and_alloc (enc->max_payload_size * enc->n_channels);
  if (!outbuf)
    goto done;

  GST_DEBUG_OBJECT (enc, "encoding %d samples (%d bytes)",
      enc->frame_samples, (int) bytes);

  gst_buffer_map (outbuf, &omap, GST_MAP_WRITE);

  GST_DEBUG_OBJECT (enc, "encoding %d samples (%d bytes)",
      enc->frame_samples, (int) bytes);

  outsize =
      opus_multistream_encode (enc->state, (const gint16 *) data,
      enc->frame_samples, omap.data, enc->max_payload_size * enc->n_channels);

  gst_buffer_unmap (outbuf, &omap);

  if (outsize < 0) {
    GST_ERROR_OBJECT (enc, "Encoding failed: %d", outsize);
    ret = GST_FLOW_ERROR;
    goto done;
  } else if (outsize > enc->max_payload_size) {
    GST_WARNING_OBJECT (enc,
        "Encoded size %d is higher than max payload size (%d bytes)",
        outsize, enc->max_payload_size);
    ret = GST_FLOW_ERROR;
    goto done;
  }

  GST_DEBUG_OBJECT (enc, "Output packet is %u bytes", outsize);
  gst_buffer_set_size (outbuf, outsize);

  ret =
      gst_audio_encoder_finish_frame (GST_AUDIO_ENCODER (enc), outbuf,
      enc->frame_samples);

done:

  if (bdata)
    gst_buffer_unmap (buf, &map);
  g_mutex_unlock (enc->property_lock);

  if (mdata)
    g_free (mdata);

  return ret;
}
Example #13
0
static GstFlowReturn
gst_avdtp_src_create (GstBaseSrc * bsrc, guint64 offset, guint length,
    GstBuffer ** outbuf)
{
  GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (bsrc);
  GstBuffer *buf = NULL;
  GstMapInfo info;
  int ret;

  if (g_atomic_int_get (&avdtpsrc->unlocked))
    return GST_FLOW_FLUSHING;

  /* We don't operate in GST_FORMAT_BYTES, so offset is ignored */

  while ((ret = gst_poll_wait (avdtpsrc->poll, GST_CLOCK_TIME_NONE))) {
    if (g_atomic_int_get (&avdtpsrc->unlocked))
      /* We're unlocked, time to gtfo */
      return GST_FLOW_FLUSHING;

    if (ret < 0)
      /* Something went wrong */
      goto read_error;

    if (ret > 0)
      /* Got some data */
      break;
  }

  ret = GST_BASE_SRC_CLASS (parent_class)->alloc (bsrc, offset, length, outbuf);
  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto alloc_failed;

  buf = *outbuf;

  gst_buffer_map (buf, &info, GST_MAP_WRITE);

  ret = read (avdtpsrc->pfd.fd, info.data, length);

  if (ret < 0)
    goto read_error;
  else if (ret == 0) {
    GST_INFO_OBJECT (avdtpsrc, "Got EOF on the transport fd");
    goto eof;
  }

  if (ret < length)
    gst_buffer_set_size (buf, ret);

  GST_LOG_OBJECT (avdtpsrc, "Read %d bytes", ret);

  gst_buffer_unmap (buf, &info);
  *outbuf = buf;

  return GST_FLOW_OK;

alloc_failed:
  {
    GST_DEBUG_OBJECT (bsrc, "alloc failed: %s", gst_flow_get_name (ret));
    return ret;
  }

read_error:
  GST_ERROR_OBJECT (avdtpsrc, "Error while reading audio data: %s",
      strerror (errno));
  gst_buffer_unref (buf);
  return GST_FLOW_ERROR;

eof:
  gst_buffer_unref (buf);
  return GST_FLOW_EOS;
}
static void
gst_musepackdec_loop (GstPad * sinkpad)
{
  GstMusepackDec *musepackdec;
  GstFlowReturn flow;
  GstBuffer *out;
  GstMapInfo info;
  mpc_frame_info frame;
  mpc_status err;
  gint num_samples, samplerate, bitspersample;

  musepackdec = GST_MUSEPACK_DEC (GST_PAD_PARENT (sinkpad));

  samplerate = g_atomic_int_get (&musepackdec->rate);

  if (samplerate == 0) {
    if (!gst_musepack_stream_init (musepackdec))
      goto pause_task;

    gst_musepackdec_send_newsegment (musepackdec);
    samplerate = g_atomic_int_get (&musepackdec->rate);
  }

  bitspersample = g_atomic_int_get (&musepackdec->bps);

  out = gst_buffer_new_allocate (NULL, MPC_DECODER_BUFFER_LENGTH * 4, NULL);

  gst_buffer_map (out, &info, GST_MAP_READWRITE);
  frame.buffer = (MPC_SAMPLE_FORMAT *) info.data;
  err = mpc_demux_decode (musepackdec->d, &frame);
  gst_buffer_unmap (out, &info);

  if (err != MPC_STATUS_OK) {
    GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");
    GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));
    goto pause_task;
  } else if (frame.bits == -1) {
    goto eos_and_pause;
  }

  num_samples = frame.samples;

  gst_buffer_set_size (out, num_samples * bitspersample);

  GST_BUFFER_OFFSET (out) = musepackdec->segment.position;
  GST_BUFFER_PTS (out) =
      gst_util_uint64_scale_int (musepackdec->segment.position,
      GST_SECOND, samplerate);
  GST_BUFFER_DURATION (out) =
      gst_util_uint64_scale_int (num_samples, GST_SECOND, samplerate);

  musepackdec->segment.position += num_samples;

  GST_LOG_OBJECT (musepackdec, "Pushing buffer, timestamp %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out)));

  flow = gst_pad_push (musepackdec->srcpad, out);
  if (flow != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));
    goto pause_task;
  }

  /* check if we're at the end of a configured segment */
  if (musepackdec->segment.stop != -1 &&
      musepackdec->segment.position >= musepackdec->segment.stop) {
    gint64 stop_time;

    GST_DEBUG_OBJECT (musepackdec, "Reached end of configured segment");

    if ((musepackdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0)
      goto eos_and_pause;

    GST_DEBUG_OBJECT (musepackdec, "Posting SEGMENT_DONE message");

    stop_time = gst_util_uint64_scale_int (musepackdec->segment.stop,
        GST_SECOND, samplerate);

    gst_element_post_message (GST_ELEMENT (musepackdec),
        gst_message_new_segment_done (GST_OBJECT (musepackdec),
            GST_FORMAT_TIME, stop_time));
    gst_pad_push_event (musepackdec->srcpad,
        gst_event_new_segment_done (GST_FORMAT_TIME, stop_time));

    goto pause_task;
  }

  return;

eos_and_pause:
  {
    GST_DEBUG_OBJECT (musepackdec, "sending EOS event");
    gst_pad_push_event (musepackdec->srcpad, gst_event_new_eos ());
    /* fall through to pause */
  }

pause_task:
  {
    GST_DEBUG_OBJECT (musepackdec, "Pausing task");
    gst_pad_pause_task (sinkpad);
    return;
  }
}
Example #15
0
/**
 * gst_tag_image_data_to_image_sample:
 * @image_data: the (encoded) image
 * @image_data_len: the length of the encoded image data at @image_data
 * @image_type: type of the image, or #GST_TAG_IMAGE_TYPE_UNDEFINED. Pass
 *     #GST_TAG_IMAGE_TYPE_NONE if no image type should be set at all (e.g.
 *     for preview images)
 *
 * Helper function for tag-reading plugins to create a #GstSample suitable to
 * add to a #GstTagList as an image tag (such as #GST_TAG_IMAGE or
 * #GST_TAG_PREVIEW_IMAGE) from the encoded image data and an (optional) image
 * type.
 *
 * Background: cover art and other images in tags are usually stored as a
 * blob of binary image data, often accompanied by a MIME type or some other
 * content type string (e.g. 'png', 'jpeg', 'jpg'). Sometimes there is also an
 * 'image type' to indicate what kind of image this is (e.g. front cover,
 * back cover, artist, etc.). The image data may also be an URI to the image
 * rather than the image itself.
 *
 * In GStreamer, image tags are #GstSample<!-- -->s containing the raw image
 * data, with the sample caps describing the content type of the image
 * (e.g. image/jpeg, image/png, text/uri-list). The sample info may contain
 * an additional 'image-type' field of #GST_TYPE_TAG_IMAGE_TYPE to describe
 * the type of image (front cover, back cover etc.). #GST_TAG_PREVIEW_IMAGE
 * tags should not carry an image type, their type is already indicated via
 * the special tag name.
 *
 * This function will do various checks and typefind the encoded image
 * data (we can't trust the declared mime type).
 *
 * Returns: a newly-allocated image sample for use in tag lists, or NULL
 */
GstSample *
gst_tag_image_data_to_image_sample (const guint8 * image_data,
    guint image_data_len, GstTagImageType image_type)
{
  const gchar *name;
  GstBuffer *image;
  GstSample *sample;
  GstCaps *caps;
  GstMapInfo info;
  GstStructure *image_info = NULL;

  g_return_val_if_fail (image_data != NULL, NULL);
  g_return_val_if_fail (image_data_len > 0, NULL);
  g_return_val_if_fail (gst_tag_image_type_is_valid (image_type), NULL);

  GST_DEBUG ("image data len: %u bytes", image_data_len);

  /* allocate space for a NUL terminator for an uri too */
  image = gst_buffer_new_and_alloc (image_data_len + 1);
  if (image == NULL)
    goto alloc_failed;

  gst_buffer_map (image, &info, GST_MAP_WRITE);
  memcpy (info.data, image_data, image_data_len);
  info.data[image_data_len] = '\0';
  gst_buffer_unmap (image, &info);

  /* Find GStreamer media type, can't trust declared type */
  caps = gst_type_find_helper_for_buffer (NULL, image, NULL);

  if (caps == NULL)
    goto no_type;

  GST_DEBUG ("Found GStreamer media type: %" GST_PTR_FORMAT, caps);

  /* sanity check: make sure typefound/declared caps are either URI or image */
  name = gst_structure_get_name (gst_caps_get_structure (caps, 0));

  if (!g_str_has_prefix (name, "image/") &&
      !g_str_has_prefix (name, "video/") &&
      !g_str_equal (name, "text/uri-list")) {
    GST_DEBUG ("Unexpected image type '%s', ignoring image frame", name);
    goto error;
  }

  /* Decrease size by 1 if we don't have an URI list
   * to keep the original size of the image
   */
  if (!g_str_equal (name, "text/uri-list"))
    gst_buffer_set_size (image, image_data_len);

  if (image_type != GST_TAG_IMAGE_TYPE_NONE) {
    GST_LOG ("Setting image type: %d", image_type);
    image_info = gst_structure_new ("GstTagImageInfo",
        "image-type", GST_TYPE_TAG_IMAGE_TYPE, image_type, NULL);
  }
  sample = gst_sample_new (image, caps, NULL, image_info);
  gst_buffer_unref (image);
  gst_caps_unref (caps);

  return sample;

/* ERRORS */
no_type:
  {
    GST_DEBUG ("Could not determine GStreamer media type, ignoring image");
    /* fall through */
  }
error:
  {
    if (image)
      gst_buffer_unref (image);
    if (caps)
      gst_caps_unref (caps);
    return NULL;
  }
alloc_failed:
  {
    GST_WARNING ("failed to allocate buffer of %d for image", image_data_len);
    gst_buffer_unref (image);
    return NULL;
  }

}
Example #16
0
static GstFlowReturn
gst_ladspa_source_type_fill (GstBaseSrc * base, guint64 offset,
    guint length, GstBuffer * buffer)
{
  GstLADSPASource *ladspa = GST_LADSPA_SOURCE (base);
  GstClockTime next_time;
  gint64 next_sample, next_byte;
  gint bytes, samples;
  GstElementClass *eclass;
  GstMapInfo map;
  gint samplerate, bpf;

  /* example for tagging generated data */
  if (!ladspa->tags_pushed) {
    GstTagList *taglist;

    taglist = gst_tag_list_new (GST_TAG_DESCRIPTION, "ladspa wave", NULL);

    eclass = GST_ELEMENT_CLASS (gst_ladspa_source_type_parent_class);
    if (eclass->send_event)
      eclass->send_event (GST_ELEMENT (base), gst_event_new_tag (taglist));
    else
      gst_tag_list_unref (taglist);
    ladspa->tags_pushed = TRUE;
  }

  if (ladspa->eos_reached) {
    GST_INFO_OBJECT (ladspa, "eos");
    return GST_FLOW_EOS;
  }

  samplerate = GST_AUDIO_INFO_RATE (&ladspa->info);
  bpf = GST_AUDIO_INFO_BPF (&ladspa->info);

  /* if no length was given, use our default length in samples otherwise convert
   * the length in bytes to samples. */
  if (length == -1)
    samples = ladspa->samples_per_buffer;
  else
    samples = length / bpf;

  /* if no offset was given, use our next logical byte */
  if (offset == -1)
    offset = ladspa->next_byte;

  /* now see if we are at the byteoffset we think we are */
  if (offset != ladspa->next_byte) {
    GST_DEBUG_OBJECT (ladspa, "seek to new offset %" G_GUINT64_FORMAT, offset);
    /* we have a discont in the expected sample offset, do a 'seek' */
    ladspa->next_sample = offset / bpf;
    ladspa->next_time =
        gst_util_uint64_scale_int (ladspa->next_sample, GST_SECOND, samplerate);
    ladspa->next_byte = offset;
  }

  /* check for eos */
  if (ladspa->check_seek_stop &&
      (ladspa->sample_stop > ladspa->next_sample) &&
      (ladspa->sample_stop < ladspa->next_sample + samples)
      ) {
    /* calculate only partial buffer */
    ladspa->generate_samples_per_buffer =
        ladspa->sample_stop - ladspa->next_sample;
    next_sample = ladspa->sample_stop;
    ladspa->eos_reached = TRUE;
  } else {
    /* calculate full buffer */
    ladspa->generate_samples_per_buffer = samples;
    next_sample =
        ladspa->next_sample + (ladspa->reverse ? (-samples) : samples);
  }

  bytes = ladspa->generate_samples_per_buffer * bpf;

  next_byte = ladspa->next_byte + (ladspa->reverse ? (-bytes) : bytes);
  next_time = gst_util_uint64_scale_int (next_sample, GST_SECOND, samplerate);

  GST_LOG_OBJECT (ladspa, "samplerate %d", samplerate);
  GST_LOG_OBJECT (ladspa,
      "next_sample %" G_GINT64_FORMAT ", ts %" GST_TIME_FORMAT, next_sample,
      GST_TIME_ARGS (next_time));

  gst_buffer_set_size (buffer, bytes);

  GST_BUFFER_OFFSET (buffer) = ladspa->next_sample;
  GST_BUFFER_OFFSET_END (buffer) = next_sample;
  if (!ladspa->reverse) {
    GST_BUFFER_TIMESTAMP (buffer) =
        ladspa->timestamp_offset + ladspa->next_time;
    GST_BUFFER_DURATION (buffer) = next_time - ladspa->next_time;
  } else {
    GST_BUFFER_TIMESTAMP (buffer) = ladspa->timestamp_offset + next_time;
    GST_BUFFER_DURATION (buffer) = ladspa->next_time - next_time;
  }

  gst_object_sync_values (GST_OBJECT (ladspa), GST_BUFFER_TIMESTAMP (buffer));

  ladspa->next_time = next_time;
  ladspa->next_sample = next_sample;
  ladspa->next_byte = next_byte;

  GST_LOG_OBJECT (ladspa, "generating %u samples at ts %" GST_TIME_FORMAT,
      ladspa->generate_samples_per_buffer,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));

  gst_buffer_map (buffer, &map, GST_MAP_WRITE);
  gst_ladspa_transform (&ladspa->ladspa, map.data,
      ladspa->generate_samples_per_buffer, NULL);
  gst_buffer_unmap (buffer, &map);

  return GST_FLOW_OK;
}
Example #17
0
static GstFlowReturn
gst_fdkaacenc_handle_frame (GstAudioEncoder * enc, GstBuffer * inbuf)
{
  GstFdkAacEnc *self = GST_FDKAACENC (enc);
  GstFlowReturn ret = GST_FLOW_OK;
  GstAudioInfo *info;
  GstMapInfo imap, omap;
  GstBuffer *outbuf;
  AACENC_BufDesc in_desc = { 0 };
  AACENC_BufDesc out_desc = { 0 };
  AACENC_InArgs in_args = { 0 };
  AACENC_OutArgs out_args = { 0 };
  gint in_id = IN_AUDIO_DATA, out_id = OUT_BITSTREAM_DATA;
  gint in_sizes, out_sizes;
  gint in_el_sizes, out_el_sizes;
  AACENC_ERROR err;

  info = gst_audio_encoder_get_audio_info (enc);

  if (inbuf) {
    if (self->need_reorder) {
      inbuf = gst_buffer_copy (inbuf);
      gst_buffer_map (inbuf, &imap, GST_MAP_READWRITE);
      gst_audio_reorder_channels (imap.data, imap.size,
          GST_AUDIO_INFO_FORMAT (info), GST_AUDIO_INFO_CHANNELS (info),
          &GST_AUDIO_INFO_POSITION (info, 0), self->aac_positions);
    } else {
      gst_buffer_map (inbuf, &imap, GST_MAP_READ);
    }

    in_args.numInSamples = imap.size / GST_AUDIO_INFO_BPS (info);

    in_sizes = imap.size;
    in_el_sizes = GST_AUDIO_INFO_BPS (info);
    in_desc.numBufs = 1;
  } else {
    in_args.numInSamples = -1;

    in_sizes = 0;
    in_el_sizes = 0;
    in_desc.numBufs = 0;
  }

  in_desc.bufferIdentifiers = &in_id;
  in_desc.bufs = (void *) &imap.data;
  in_desc.bufSizes = &in_sizes;
  in_desc.bufElSizes = &in_el_sizes;

  outbuf = gst_audio_encoder_allocate_output_buffer (enc, self->outbuf_size);
  if (!outbuf) {
    ret = GST_FLOW_ERROR;
    goto out;
  }

  gst_buffer_map (outbuf, &omap, GST_MAP_WRITE);
  out_sizes = omap.size;
  out_el_sizes = 1;
  out_desc.bufferIdentifiers = &out_id;
  out_desc.numBufs = 1;
  out_desc.bufs = (void *) &omap.data;
  out_desc.bufSizes = &out_sizes;
  out_desc.bufElSizes = &out_el_sizes;

  err = aacEncEncode (self->enc, &in_desc, &out_desc, &in_args, &out_args);
  if (err == AACENC_ENCODE_EOF && !inbuf)
    goto out;
  else if (err != AACENC_OK) {
    GST_ERROR_OBJECT (self, "Failed to encode data: %d", err);
    ret = GST_FLOW_ERROR;
    goto out;
  }

  if (inbuf) {
    gst_buffer_unmap (inbuf, &imap);
    if (self->need_reorder)
      gst_buffer_unref (inbuf);
    inbuf = NULL;
  }

  if (!out_args.numOutBytes)
    goto out;

  gst_buffer_unmap (outbuf, &omap);
  gst_buffer_set_size (outbuf, out_args.numOutBytes);

  ret = gst_audio_encoder_finish_frame (enc, outbuf, self->samples_per_frame);
  outbuf = NULL;

out:
  if (outbuf) {
    gst_buffer_unmap (outbuf, &omap);
    gst_buffer_unref (outbuf);
  }
  if (inbuf) {
    gst_buffer_unmap (inbuf, &imap);
    if (self->need_reorder)
      gst_buffer_unref (inbuf);
  }

  return ret;
}
Example #18
0
static GstFlowReturn
gst_sbc_enc_handle_frame (GstAudioEncoder * audio_enc, GstBuffer * buffer)
{
  GstSbcEnc *enc = GST_SBC_ENC (audio_enc);
  GstMapInfo in_map, out_map;
  GstBuffer *outbuf = NULL;
  guint samples_per_frame, frames, i = 0;

  /* no fancy draining */
  if (buffer == NULL)
    return GST_FLOW_OK;

  if (G_UNLIKELY (enc->channels == 0 || enc->blocks == 0 || enc->subbands == 0))
    return GST_FLOW_NOT_NEGOTIATED;

  samples_per_frame = enc->channels * enc->blocks * enc->subbands;

  if (!gst_buffer_map (buffer, &in_map, GST_MAP_READ))
    goto map_failed;

  frames = in_map.size / (samples_per_frame * sizeof (gint16));

  GST_LOG_OBJECT (enc,
      "encoding %" G_GSIZE_FORMAT " samples into %u SBC frames",
      in_map.size / (enc->channels * sizeof (gint16)), frames);

  if (frames > 0) {
    gsize frame_len;

    frame_len = sbc_get_frame_length (&enc->sbc);
    outbuf = gst_audio_encoder_allocate_output_buffer (audio_enc,
        frames * frame_len);

    if (outbuf == NULL)
      goto no_buffer;

    gst_buffer_map (outbuf, &out_map, GST_MAP_WRITE);

    for (i = 0; i < frames; ++i) {
      gssize ret, written = 0;

      ret = sbc_encode (&enc->sbc, in_map.data + (i * samples_per_frame * 2),
          samples_per_frame * 2, out_map.data + (i * frame_len), frame_len,
          &written);

      if (ret < 0 || written != frame_len) {
        GST_WARNING_OBJECT (enc, "encoding error, ret = %" G_GSSIZE_FORMAT ", "
            "written = %" G_GSSIZE_FORMAT, ret, written);
        break;
      }
    }

    gst_buffer_unmap (outbuf, &out_map);

    if (i > 0)
      gst_buffer_set_size (outbuf, i * frame_len);
    else
      gst_buffer_replace (&outbuf, NULL);
  }

done:

  gst_buffer_unmap (buffer, &in_map);

  return gst_audio_encoder_finish_frame (audio_enc, outbuf,
      i * (samples_per_frame / enc->channels));

/* ERRORS */
no_buffer:
  {
    GST_ERROR_OBJECT (enc, "could not allocate output buffer");
    goto done;
  }
map_failed:
  {
    GST_ERROR_OBJECT (enc, "could not map input buffer");
    goto done;
  }
}
Example #19
0
static GstFlowReturn
gst_sbc_dec_handle_frame (GstAudioDecoder * audio_dec, GstBuffer * buf)
{
  GstSbcDec *dec = GST_SBC_DEC (audio_dec);
  GstBuffer *outbuf = NULL;
  GstMapInfo out_map;
  GstMapInfo in_map;
  gsize output_size;
  guint num_frames, i;

  /* no fancy draining */
  if (G_UNLIKELY (buf == NULL))
    return GST_FLOW_OK;

  if (G_UNLIKELY (dec->frame_len == 0))
    return GST_FLOW_NOT_NEGOTIATED;

  gst_buffer_map (buf, &in_map, GST_MAP_READ);

  if (G_UNLIKELY (in_map.size == 0))
    goto done;

  /* we assume all frames are of the same size, this is implied by the
   * input caps applying to the whole input buffer, and the parser should
   * also have made sure of that */
  if (G_UNLIKELY (in_map.size % dec->frame_len != 0))
    goto mixed_frames;

  num_frames = in_map.size / dec->frame_len;
  output_size = num_frames * dec->samples_per_frame * sizeof (gint16);

  outbuf = gst_audio_decoder_allocate_output_buffer (audio_dec, output_size);

  if (outbuf == NULL)
    goto no_buffer;

  gst_buffer_map (outbuf, &out_map, GST_MAP_WRITE);

  for (i = 0; i < num_frames; ++i) {
    gssize ret;
    gsize written;

    ret = sbc_decode (&dec->sbc, in_map.data + (i * dec->frame_len),
        dec->frame_len, out_map.data + (i * dec->samples_per_frame * 2),
        dec->samples_per_frame * 2, &written);

    if (ret <= 0 || written != (dec->samples_per_frame * 2)) {
      GST_WARNING_OBJECT (dec, "decoding error, ret = %" G_GSSIZE_FORMAT ", "
          "written = %" G_GSSIZE_FORMAT, ret, written);
      break;
    }
  }

  gst_buffer_unmap (outbuf, &out_map);

  if (i > 0)
    gst_buffer_set_size (outbuf, i * dec->samples_per_frame * 2);
  else
    gst_buffer_replace (&outbuf, NULL);

done:

  gst_buffer_unmap (buf, &in_map);

  return gst_audio_decoder_finish_frame (audio_dec, outbuf, 1);

/* ERRORS */
mixed_frames:
  {
    GST_WARNING_OBJECT (dec, "inconsistent input data/frames, skipping");
    goto done;
  }
no_buffer:
  {
    GST_ERROR_OBJECT (dec, "could not allocate output buffer");
    goto done;
  }
}
Example #20
0
/*
 * This function should be called while holding the filter lock
 */
static gboolean
gst_srtp_dec_decode_buffer (GstSrtpDec * filter, GstPad * pad, GstBuffer * buf,
    gboolean is_rtcp, guint32 ssrc)
{
  GstMapInfo map;
  err_status_t err;
  gint size;

  GST_LOG_OBJECT (pad, "Received %s buffer of size %" G_GSIZE_FORMAT
      " with SSRC = %u", is_rtcp ? "RTCP" : "RTP", gst_buffer_get_size (buf),
      ssrc);

  /* Change buffer to remove protection */
  buf = gst_buffer_make_writable (buf);

  gst_buffer_map (buf, &map, GST_MAP_READWRITE);
  size = map.size;

unprotect:

  gst_srtp_init_event_reporter ();

  if (is_rtcp)
    err = srtp_unprotect_rtcp (filter->session, map.data, &size);
  else {
    /* If ROC has changed, we know we need to set the initial RTP
     * sequence number too. */
    if (filter->roc_changed) {
      srtp_stream_t stream;

      stream = srtp_get_stream (filter->session, htonl (ssrc));

      if (stream) {
        guint16 seqnum = 0;
        GstRTPBuffer rtpbuf = GST_RTP_BUFFER_INIT;

        gst_rtp_buffer_map (buf,
            GST_MAP_READ | GST_RTP_BUFFER_MAP_FLAG_SKIP_PADDING, &rtpbuf);
        seqnum = gst_rtp_buffer_get_seq (&rtpbuf);
        gst_rtp_buffer_unmap (&rtpbuf);

        /* We finally add the RTP sequence number to the current
         * rollover counter. */
        stream->rtp_rdbx.index &= ~0xFFFF;
        stream->rtp_rdbx.index |= seqnum;
      }

      filter->roc_changed = FALSE;
    }
    err = srtp_unprotect (filter->session, map.data, &size);
  }

  GST_OBJECT_UNLOCK (filter);

  if (err != err_status_ok) {
    GST_WARNING_OBJECT (pad,
        "Unable to unprotect buffer (unprotect failed code %d)", err);

    /* Signal user depending on type of error */
    switch (err) {
      case err_status_key_expired:
        GST_OBJECT_LOCK (filter);

        /* Update stream */
        if (find_stream_by_ssrc (filter, ssrc)) {
          GST_OBJECT_UNLOCK (filter);
          if (request_key_with_signal (filter, ssrc, SIGNAL_HARD_LIMIT)) {
            GST_OBJECT_LOCK (filter);
            goto unprotect;
          } else {
            GST_WARNING_OBJECT (filter, "Hard limit reached, no new key, "
                "dropping");
          }
        } else {
          GST_WARNING_OBJECT (filter, "Could not find matching stream, "
              "dropping");
        }
        break;
      case err_status_auth_fail:
        GST_WARNING_OBJECT (filter, "Error authentication packet, dropping");
        break;
      case err_status_cipher_fail:
        GST_WARNING_OBJECT (filter, "Error while decrypting packet, dropping");
        break;
      default:
        GST_WARNING_OBJECT (filter, "Other error, dropping");
        break;
    }

    gst_buffer_unmap (buf, &map);

    GST_OBJECT_LOCK (filter);
    return FALSE;
  }

  gst_buffer_unmap (buf, &map);

  gst_buffer_set_size (buf, size);

  GST_OBJECT_LOCK (filter);
  return TRUE;
}
Example #21
0
static GstFlowReturn
gst_lv2_source_fill (GstBaseSrc * base, guint64 offset,
    guint length, GstBuffer * buffer)
{
  GstLV2Source *lv2 = (GstLV2Source *) base;
  GstLV2SourceClass *klass = (GstLV2SourceClass *) GST_BASE_SRC_GET_CLASS (lv2);
  GstLV2Class *lv2_class = &klass->lv2;
  GstLV2Group *lv2_group;
  GstLV2Port *lv2_port;
  GstClockTime next_time;
  gint64 next_sample, next_byte;
  guint bytes, samples;
  GstElementClass *eclass;
  GstMapInfo map;
  gint samplerate, bpf;
  guint j, k, l;
  gfloat *out = NULL, *cv = NULL, *mem;
  gfloat val;

  /* example for tagging generated data */
  if (!lv2->tags_pushed) {
    GstTagList *taglist;

    taglist = gst_tag_list_new (GST_TAG_DESCRIPTION, "lv2 wave", NULL);

    eclass = GST_ELEMENT_CLASS (parent_class);
    if (eclass->send_event)
      eclass->send_event (GST_ELEMENT (base), gst_event_new_tag (taglist));
    else
      gst_tag_list_unref (taglist);
    lv2->tags_pushed = TRUE;
  }

  if (lv2->eos_reached) {
    GST_INFO_OBJECT (lv2, "eos");
    return GST_FLOW_EOS;
  }

  samplerate = GST_AUDIO_INFO_RATE (&lv2->info);
  bpf = GST_AUDIO_INFO_BPF (&lv2->info);

  /* if no length was given, use our default length in samples otherwise convert
   * the length in bytes to samples. */
  if (length == -1)
    samples = lv2->samples_per_buffer;
  else
    samples = length / bpf;

  /* if no offset was given, use our next logical byte */
  if (offset == -1)
    offset = lv2->next_byte;

  /* now see if we are at the byteoffset we think we are */
  if (offset != lv2->next_byte) {
    GST_DEBUG_OBJECT (lv2, "seek to new offset %" G_GUINT64_FORMAT, offset);
    /* we have a discont in the expected sample offset, do a 'seek' */
    lv2->next_sample = offset / bpf;
    lv2->next_time =
        gst_util_uint64_scale_int (lv2->next_sample, GST_SECOND, samplerate);
    lv2->next_byte = offset;
  }

  /* check for eos */
  if (lv2->check_seek_stop &&
      (lv2->sample_stop > lv2->next_sample) &&
      (lv2->sample_stop < lv2->next_sample + samples)
      ) {
    /* calculate only partial buffer */
    lv2->generate_samples_per_buffer = lv2->sample_stop - lv2->next_sample;
    next_sample = lv2->sample_stop;
    lv2->eos_reached = TRUE;

    GST_INFO_OBJECT (lv2, "eos reached");
  } else {
    /* calculate full buffer */
    lv2->generate_samples_per_buffer = samples;
    next_sample = lv2->next_sample + (lv2->reverse ? (-samples) : samples);
  }

  bytes = lv2->generate_samples_per_buffer * bpf;

  next_byte = lv2->next_byte + (lv2->reverse ? (-bytes) : bytes);
  next_time = gst_util_uint64_scale_int (next_sample, GST_SECOND, samplerate);

  GST_LOG_OBJECT (lv2, "samplerate %d", samplerate);
  GST_LOG_OBJECT (lv2,
      "next_sample %" G_GINT64_FORMAT ", ts %" GST_TIME_FORMAT, next_sample,
      GST_TIME_ARGS (next_time));

  gst_buffer_set_size (buffer, bytes);

  GST_BUFFER_OFFSET (buffer) = lv2->next_sample;
  GST_BUFFER_OFFSET_END (buffer) = next_sample;
  if (!lv2->reverse) {
    GST_BUFFER_TIMESTAMP (buffer) = lv2->timestamp_offset + lv2->next_time;
    GST_BUFFER_DURATION (buffer) = next_time - lv2->next_time;
  } else {
    GST_BUFFER_TIMESTAMP (buffer) = lv2->timestamp_offset + next_time;
    GST_BUFFER_DURATION (buffer) = lv2->next_time - next_time;
  }

  gst_object_sync_values (GST_OBJECT (lv2), GST_BUFFER_TIMESTAMP (buffer));

  lv2->next_time = next_time;
  lv2->next_sample = next_sample;
  lv2->next_byte = next_byte;

  GST_LOG_OBJECT (lv2, "generating %u samples at ts %" GST_TIME_FORMAT,
      samples, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));

  gst_buffer_map (buffer, &map, GST_MAP_WRITE);

  /* multi channel outputs */
  lv2_group = &lv2_class->out_group;
  if (lv2_group->ports->len > 1) {
    out = g_new0 (gfloat, samples * lv2_group->ports->len);
    for (j = 0; j < lv2_group->ports->len; ++j) {
      lv2_port = &g_array_index (lv2_group->ports, GstLV2Port, j);
      lilv_instance_connect_port (lv2->lv2.instance, lv2_port->index,
          out + (j * samples));
      GST_LOG_OBJECT (lv2, "connected port %d/%d", j, lv2_group->ports->len);
    }
  } else {
    lv2_port = &g_array_index (lv2_group->ports, GstLV2Port, 0);
    lilv_instance_connect_port (lv2->lv2.instance, lv2_port->index,
        (gfloat *) map.data);
    GST_LOG_OBJECT (lv2, "connected port 0");
  }

  /* cv ports */
  cv = g_new (gfloat, samples * lv2_class->num_cv_in);
  for (j = k = 0; j < lv2_class->control_in_ports->len; j++) {
    lv2_port = &g_array_index (lv2_class->control_in_ports, GstLV2Port, j);
    if (lv2_port->type != GST_LV2_PORT_CV)
      continue;

    mem = cv + (k * samples);
    val = lv2->lv2.ports.control.in[j];
    /* FIXME: use gst_control_binding_get_value_array */
    for (l = 0; l < samples; l++)
      mem[l] = val;
    lilv_instance_connect_port (lv2->lv2.instance, lv2_port->index, mem);
    k++;
  }

  lilv_instance_run (lv2->lv2.instance, samples);

  if (lv2_group->ports->len > 1) {
    gst_lv2_source_interleave_data (lv2_group->ports->len,
        (gfloat *) map.data, samples, out);
    g_free (out);
  }

  g_free (cv);

  gst_buffer_unmap (buffer, &map);

  return GST_FLOW_OK;
}
Example #22
0
static GstFlowReturn gst_imx_vpu_base_enc_handle_frame(GstVideoEncoder *encoder, GstVideoCodecFrame *frame)
{
	VpuEncRetCode enc_ret;
	VpuEncEncParam enc_enc_param;
	GstImxPhysMemMeta *phys_mem_meta;
	GstImxVpuBaseEncClass *klass;
	GstImxVpuBaseEnc *vpu_base_enc;
	VpuFrameBuffer input_framebuf;
	GstBuffer *input_buffer;
	gint src_stride;

	vpu_base_enc = GST_IMX_VPU_BASE_ENC(encoder);
	klass = GST_IMX_VPU_BASE_ENC_CLASS(G_OBJECT_GET_CLASS(vpu_base_enc));

	g_assert(klass->set_frame_enc_params != NULL);

	memset(&enc_enc_param, 0, sizeof(enc_enc_param));
	memset(&input_framebuf, 0, sizeof(input_framebuf));

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(frame->input_buffer);

	/* If the incoming frame's buffer is not using physically contiguous memory,
	 * it needs to be copied to the internal input buffer, otherwise the VPU
	 * encoder cannot read the frame */
	if (phys_mem_meta == NULL)
	{
		/* No physical memory metadata found -> buffer is not physically contiguous */

		GstVideoFrame temp_input_video_frame, temp_incoming_video_frame;

		GST_LOG_OBJECT(vpu_base_enc, "input buffer not physically contiguous - frame copy is necessary");

		if (vpu_base_enc->internal_input_buffer == NULL)
		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			GstFlowReturn flow_ret;

			if (vpu_base_enc->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstStructure *config;
				GstCaps *caps;
				GstAllocator *allocator;

				GST_DEBUG_OBJECT(vpu_base_enc, "creating internal bufferpool");

				caps = gst_video_info_to_caps(&(vpu_base_enc->video_info));
				vpu_base_enc->internal_bufferpool = gst_imx_phys_mem_buffer_pool_new(FALSE);
				allocator = gst_imx_vpu_enc_allocator_obtain();

				config = gst_buffer_pool_get_config(vpu_base_enc->internal_bufferpool);
				gst_buffer_pool_config_set_params(config, caps, vpu_base_enc->video_info.size, 2, 0);
				gst_buffer_pool_config_set_allocator(config, allocator, NULL);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_IMX_PHYS_MEM);
				gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
				gst_buffer_pool_set_config(vpu_base_enc->internal_bufferpool, config);

				gst_caps_unref(caps);

				if (vpu_base_enc->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(vpu_base_enc, "failed to create internal bufferpool");
					return GST_FLOW_ERROR;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(vpu_base_enc->internal_bufferpool))
				gst_buffer_pool_set_active(vpu_base_enc->internal_bufferpool, TRUE);

			/* Create the internal input buffer */
			flow_ret = gst_buffer_pool_acquire_buffer(vpu_base_enc->internal_bufferpool, &(vpu_base_enc->internal_input_buffer), NULL);
			if (flow_ret != GST_FLOW_OK)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
				return flow_ret;
			}
		}

		/* The internal input buffer exists at this point. Since the incoming frame
		 * is not stored in physical memory, copy its pixels to the internal
		 * input buffer, so the encoder can read them. */

		gst_video_frame_map(&temp_incoming_video_frame, &(vpu_base_enc->video_info), frame->input_buffer, GST_MAP_READ);
		gst_video_frame_map(&temp_input_video_frame, &(vpu_base_enc->video_info), vpu_base_enc->internal_input_buffer, GST_MAP_WRITE);

		gst_video_frame_copy(&temp_input_video_frame, &temp_incoming_video_frame);

		gst_video_frame_unmap(&temp_incoming_video_frame);
		gst_video_frame_unmap(&temp_input_video_frame);

		/* Set the internal input buffer as the encoder's input */
		input_buffer = vpu_base_enc->internal_input_buffer;
		/* And use the internal input buffer's physical memory metadata */
		phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(vpu_base_enc->internal_input_buffer);
	}
	else
	{
		/* Physical memory metadata found -> buffer is physically contiguous
		 * It can be used directly as input for the VPU encoder */
		input_buffer = frame->input_buffer;
	}

	/* Set up physical addresses for the input framebuffer */
	{
		gsize *plane_offsets;
		gint *plane_strides;
		GstVideoMeta *video_meta;
		unsigned char *phys_ptr;

		/* Try to use plane offset and stride information from the video
		 * metadata if present, since these can be more accurate than
		 * the information from the video info */
		video_meta = gst_buffer_get_video_meta(input_buffer);
		if (video_meta != NULL)
		{
			plane_offsets = video_meta->offset;
			plane_strides = video_meta->stride;
		}
		else
		{
			plane_offsets = vpu_base_enc->video_info.offset;
			plane_strides = vpu_base_enc->video_info.stride;
		}

		phys_ptr = (unsigned char*)(phys_mem_meta->phys_addr);

		input_framebuf.pbufY = phys_ptr;
		input_framebuf.pbufCb = phys_ptr + plane_offsets[1];
		input_framebuf.pbufCr = phys_ptr + plane_offsets[2];
		input_framebuf.pbufMvCol = NULL; /* not used by the VPU encoder */
		input_framebuf.nStrideY = plane_strides[0];
		input_framebuf.nStrideC = plane_strides[1];

		/* this is needed for framebuffers registration below */
		src_stride = plane_strides[0];

		GST_TRACE_OBJECT(vpu_base_enc, "width: %d   height: %d   stride 0: %d   stride 1: %d   offset 0: %d   offset 1: %d   offset 2: %d", GST_VIDEO_INFO_WIDTH(&(vpu_base_enc->video_info)), GST_VIDEO_INFO_HEIGHT(&(vpu_base_enc->video_info)), plane_strides[0], plane_strides[1], plane_offsets[0], plane_offsets[1], plane_offsets[2]);
	}

	/* Create framebuffers structure (if not already present) */
	if (vpu_base_enc->framebuffers == NULL)
	{
		GstImxVpuFramebufferParams fbparams;
		gst_imx_vpu_framebuffers_enc_init_info_to_params(&(vpu_base_enc->init_info), &fbparams);
		fbparams.pic_width = vpu_base_enc->open_param.nPicWidth;
		fbparams.pic_height = vpu_base_enc->open_param.nPicHeight;

		vpu_base_enc->framebuffers = gst_imx_vpu_framebuffers_new(&fbparams, gst_imx_vpu_enc_allocator_obtain());
		if (vpu_base_enc->framebuffers == NULL)
		{
			GST_ELEMENT_ERROR(vpu_base_enc, RESOURCE, NO_SPACE_LEFT, ("could not create framebuffers structure"), (NULL));
			return GST_FLOW_ERROR;
		}

		gst_imx_vpu_framebuffers_register_with_encoder(vpu_base_enc->framebuffers, vpu_base_enc->handle, src_stride);
	}

	/* Allocate physical buffer for output data (if not already present) */
	if (vpu_base_enc->output_phys_buffer == NULL)
	{
		vpu_base_enc->output_phys_buffer = (GstImxPhysMemory *)gst_allocator_alloc(gst_imx_vpu_enc_allocator_obtain(), vpu_base_enc->framebuffers->total_size, NULL);

		if (vpu_base_enc->output_phys_buffer == NULL)
		{
			GST_ERROR_OBJECT(vpu_base_enc, "could not allocate physical buffer for output data");
			return GST_FLOW_ERROR;
		}
	}

	/* Set up encoding parameters */
	enc_enc_param.nInVirtOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->mapped_virt_addr); /* TODO */
	enc_enc_param.nInPhyOutput = (unsigned int)(vpu_base_enc->output_phys_buffer->phys_addr);
	enc_enc_param.nInOutputBufLen = vpu_base_enc->output_phys_buffer->mem.size;
	enc_enc_param.nPicWidth = vpu_base_enc->framebuffers->pic_width;
	enc_enc_param.nPicHeight = vpu_base_enc->framebuffers->pic_height;
	enc_enc_param.nFrameRate = vpu_base_enc->open_param.nFrameRate;
	enc_enc_param.pInFrame = &input_framebuf;
	enc_enc_param.nForceIPicture = 0;

	/* Force I-frame if either IS_FORCE_KEYFRAME or IS_FORCE_KEYFRAME_HEADERS is set for the current frame. */
	if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME(frame) || GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS(frame))
	{
		enc_enc_param.nForceIPicture = 1;
		GST_LOG_OBJECT(vpu_base_enc, "got request to make this a keyframe - forcing I frame");
		GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT(frame);
	}

	/* Give the derived class a chance to set up encoding parameters too */
	if (!klass->set_frame_enc_params(vpu_base_enc, &enc_enc_param, &(vpu_base_enc->open_param)))
	{
		GST_ERROR_OBJECT(vpu_base_enc, "derived class could not frame enc params");
		return GST_FLOW_ERROR;
	}

	/* Main encoding block */
	{
		GstBuffer *output_buffer = NULL;
		gsize output_buffer_offset = 0;
		gboolean frame_finished = FALSE;

		frame->output_buffer = NULL;

		/* Run in a loop until the VPU reports the input as used */
		do
		{
			/* Feed input data */
			enc_ret = VPU_EncEncodeFrame(vpu_base_enc->handle, &enc_enc_param);
			if (enc_ret != VPU_ENC_RET_SUCCESS)
			{
				GST_ERROR_OBJECT(vpu_base_enc, "failed to encode frame: %s", gst_imx_vpu_strerror(enc_ret));
				VPU_EncReset(vpu_base_enc->handle);
				return GST_FLOW_ERROR;
			}

			if (frame_finished)
			{
				GST_WARNING_OBJECT(vpu_base_enc, "frame was already finished for the current input, but input not yet marked as used");
				continue;
			}

			if (enc_enc_param.eOutRetCode & (VPU_ENC_OUTPUT_DIS | VPU_ENC_OUTPUT_SEQHEADER))
			{
				/* Create an output buffer on demand */
				if (output_buffer == NULL)
				{
					output_buffer = gst_video_encoder_allocate_output_buffer(
						encoder,
						vpu_base_enc->output_phys_buffer->mem.size
					);
					frame->output_buffer = output_buffer;
				}

				GST_LOG_OBJECT(vpu_base_enc, "processing output data: %u bytes, output buffer offset %u", enc_enc_param.nOutOutputSize, output_buffer_offset);

				if (klass->fill_output_buffer != NULL)
				{
					/* Derived class fills data on its own */

					gsize cur_offset = output_buffer_offset;
					output_buffer_offset += klass->fill_output_buffer(
						vpu_base_enc,
						frame,
						cur_offset,
						vpu_base_enc->output_phys_buffer->mapped_virt_addr,
						enc_enc_param.nOutOutputSize,
						enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_SEQHEADER
					);
				}
				else
				{
					/* Use default data filling (= copy input to output) */

					gst_buffer_fill(
						output_buffer,
						output_buffer_offset,
						vpu_base_enc->output_phys_buffer->mapped_virt_addr,
						enc_enc_param.nOutOutputSize
					);
					output_buffer_offset += enc_enc_param.nOutOutputSize;
				}

				if (enc_enc_param.eOutRetCode & VPU_ENC_OUTPUT_DIS)
				{
					g_assert(output_buffer != NULL);

					/* Set the output buffer's size to the actual number of bytes
					 * filled by the derived class */
					gst_buffer_set_size(output_buffer, output_buffer_offset);

					/* Set the frame DTS */
					frame->dts = frame->pts;

					/* And finish the frame, handing the output data over to the base class */
					gst_video_encoder_finish_frame(encoder, frame);

					output_buffer = NULL;
					frame_finished = TRUE;

					if (!(enc_enc_param.eOutRetCode & VPU_ENC_INPUT_USED))
						GST_WARNING_OBJECT(vpu_base_enc, "frame finished, but VPU did not report the input as used");

					break;
				}
			}
		}
		while (!(enc_enc_param.eOutRetCode & VPU_ENC_INPUT_USED)); /* VPU_ENC_INPUT_NOT_USED has value 0x0 - cannot use it for flag checks */

		/* If output_buffer is NULL at this point, it means VPU_ENC_OUTPUT_DIS was never communicated
		 * by the VPU, and the buffer is unfinished. -> Drop it. */
		if (output_buffer != NULL)
		{
			GST_WARNING_OBJECT(vpu_base_enc, "frame unfinished ; dropping");
			gst_buffer_unref(output_buffer);
			frame->output_buffer = NULL; /* necessary to make finish_frame() drop the frame */
			gst_video_encoder_finish_frame(encoder, frame);
		}
	}

	return GST_FLOW_OK;
}
static GstFlowReturn
opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer)
{
  GstFlowReturn res = GST_FLOW_OK;
  gsize size;
  guint8 *data;
  GstBuffer *outbuf, *bufd;
  gint16 *out_data;
  int n, err;
  int samples;
  unsigned int packet_size;
  GstBuffer *buf;
  GstMapInfo map, omap;
  GstAudioClippingMeta *cmeta = NULL;

  if (dec->state == NULL) {
    /* If we did not get any headers, default to 2 channels */
    if (dec->n_channels == 0) {
      GST_INFO_OBJECT (dec, "No header, assuming single stream");
      dec->n_channels = 2;
      dec->sample_rate = 48000;
      /* default stereo mapping */
      dec->channel_mapping_family = 0;
      dec->channel_mapping[0] = 0;
      dec->channel_mapping[1] = 1;
      dec->n_streams = 1;
      dec->n_stereo_streams = 1;

      if (!gst_opus_dec_negotiate (dec, NULL))
        return GST_FLOW_NOT_NEGOTIATED;
    }

    if (dec->n_channels == 2 && dec->n_streams == 1
        && dec->n_stereo_streams == 0) {
      /* if we are automatically decoding 2 channels, but only have
         a single encoded one, direct both channels to it */
      dec->channel_mapping[1] = 0;
    }

    GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz",
        dec->n_channels, dec->sample_rate);
#ifndef GST_DISABLE_GST_DEBUG
    gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug,
        "Mapping table", dec->n_channels, dec->channel_mapping);
#endif

    GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams,
        dec->n_stereo_streams);
    dec->state =
        opus_multistream_decoder_create (dec->sample_rate, dec->n_channels,
        dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err);
    if (!dec->state || err != OPUS_OK)
      goto creation_failed;
  }

  if (buffer) {
    GST_DEBUG_OBJECT (dec, "Received buffer of size %" G_GSIZE_FORMAT,
        gst_buffer_get_size (buffer));
  } else {
    GST_DEBUG_OBJECT (dec, "Received missing buffer");
  }

  /* if using in-band FEC, we introdude one extra frame's delay as we need
     to potentially wait for next buffer to decode a missing buffer */
  if (dec->use_inband_fec && !dec->primed) {
    GST_DEBUG_OBJECT (dec, "First buffer received in FEC mode, early out");
    gst_buffer_replace (&dec->last_buffer, buffer);
    dec->primed = TRUE;
    goto done;
  }

  /* That's the buffer we'll be sending to the opus decoder. */
  buf = (dec->use_inband_fec
      && gst_buffer_get_size (dec->last_buffer) >
      0) ? dec->last_buffer : buffer;

  /* That's the buffer we get duration from */
  bufd = dec->use_inband_fec ? dec->last_buffer : buffer;

  if (buf && gst_buffer_get_size (buf) > 0) {
    gst_buffer_map (buf, &map, GST_MAP_READ);
    data = map.data;
    size = map.size;
    GST_DEBUG_OBJECT (dec, "Using buffer of size %" G_GSIZE_FORMAT, size);
  } else {
    /* concealment data, pass NULL as the bits parameters */
    GST_DEBUG_OBJECT (dec, "Using NULL buffer");
    data = NULL;
    size = 0;
  }

  if (gst_buffer_get_size (bufd) == 0) {
    GstClockTime const opus_plc_alignment = 2500 * GST_USECOND;
    GstClockTime aligned_missing_duration;
    GstClockTime missing_duration = GST_BUFFER_DURATION (bufd);

    if (!GST_CLOCK_TIME_IS_VALID (missing_duration) || missing_duration == 0) {
      if (GST_CLOCK_TIME_IS_VALID (dec->last_known_buffer_duration)) {
        missing_duration = dec->last_known_buffer_duration;
        GST_WARNING_OBJECT (dec,
            "Missing duration, using last duration %" GST_TIME_FORMAT,
            GST_TIME_ARGS (missing_duration));
      } else {
        GST_WARNING_OBJECT (dec,
            "Missing buffer, but unknown duration, and no previously known duration, assuming 20 ms");
        missing_duration = 20 * GST_MSECOND;
      }
    }

    GST_DEBUG_OBJECT (dec,
        "missing buffer, doing PLC duration %" GST_TIME_FORMAT
        " plus leftover %" GST_TIME_FORMAT, GST_TIME_ARGS (missing_duration),
        GST_TIME_ARGS (dec->leftover_plc_duration));

    /* add the leftover PLC duration to that of the buffer */
    missing_duration += dec->leftover_plc_duration;

    /* align the combined buffer and leftover PLC duration to multiples
     * of 2.5ms, rounding to nearest, and store excess duration for later */
    aligned_missing_duration =
        ((missing_duration +
            opus_plc_alignment / 2) / opus_plc_alignment) * opus_plc_alignment;
    dec->leftover_plc_duration = missing_duration - aligned_missing_duration;

    /* Opus' PLC cannot operate with less than 2.5ms; skip PLC
     * and accumulate the missing duration in the leftover_plc_duration
     * for the next PLC attempt */
    if (aligned_missing_duration < opus_plc_alignment) {
      GST_DEBUG_OBJECT (dec,
          "current duration %" GST_TIME_FORMAT
          " of missing data not enough for PLC (minimum needed: %"
          GST_TIME_FORMAT ") - skipping", GST_TIME_ARGS (missing_duration),
          GST_TIME_ARGS (opus_plc_alignment));
      goto done;
    }

    /* convert the duration (in nanoseconds) to sample count */
    samples =
        gst_util_uint64_scale_int (aligned_missing_duration, dec->sample_rate,
        GST_SECOND);

    GST_DEBUG_OBJECT (dec,
        "calculated PLC frame length: %" GST_TIME_FORMAT
        " num frame samples: %d new leftover: %" GST_TIME_FORMAT,
        GST_TIME_ARGS (aligned_missing_duration), samples,
        GST_TIME_ARGS (dec->leftover_plc_duration));
  } else {
    /* use maximum size (120 ms) as the number of returned samples is
       not constant over the stream. */
    samples = 120 * dec->sample_rate / 1000;
  }
  packet_size = samples * dec->n_channels * 2;

  outbuf =
      gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (dec),
      packet_size);
  if (!outbuf) {
    goto buffer_failed;
  }

  if (size > 0)
    dec->last_known_buffer_duration = packet_duration_opus (data, size);

  gst_buffer_map (outbuf, &omap, GST_MAP_WRITE);
  out_data = (gint16 *) omap.data;

  do {
    if (dec->use_inband_fec) {
      if (gst_buffer_get_size (dec->last_buffer) > 0) {
        /* normal delayed decode */
        GST_LOG_OBJECT (dec, "FEC enabled, decoding last delayed buffer");
        n = opus_multistream_decode (dec->state, data, size, out_data, samples,
            0);
      } else {
        /* FEC reconstruction decode */
        GST_LOG_OBJECT (dec, "FEC enabled, reconstructing last buffer");
        n = opus_multistream_decode (dec->state, data, size, out_data, samples,
            1);
      }
    } else {
      /* normal decode */
      GST_LOG_OBJECT (dec, "FEC disabled, decoding buffer");
      n = opus_multistream_decode (dec->state, data, size, out_data, samples,
          0);
    }
    if (n == OPUS_BUFFER_TOO_SMALL) {
      /* if too small, add 2.5 milliseconds and try again, up to the
       * Opus max size of 120 milliseconds */
      if (samples >= 120 * dec->sample_rate / 1000)
        break;
      samples += 25 * dec->sample_rate / 10000;
      packet_size = samples * dec->n_channels * 2;
      gst_buffer_unmap (outbuf, &omap);
      gst_buffer_unref (outbuf);
      outbuf =
          gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (dec),
          packet_size);
      if (!outbuf) {
        goto buffer_failed;
      }
      gst_buffer_map (outbuf, &omap, GST_MAP_WRITE);
      out_data = (gint16 *) omap.data;
    }
  } while (n == OPUS_BUFFER_TOO_SMALL);
  gst_buffer_unmap (outbuf, &omap);
  if (data != NULL)
    gst_buffer_unmap (buf, &map);

  if (n < 0) {
    GstFlowReturn ret = GST_FLOW_ERROR;

    gst_buffer_unref (outbuf);
    GST_AUDIO_DECODER_ERROR (dec, 1, STREAM, DECODE, (NULL),
        ("Decoding error (%d): %s", n, opus_strerror (n)), ret);
    return ret;
  }
  GST_DEBUG_OBJECT (dec, "decoded %d samples", n);
  gst_buffer_set_size (outbuf, n * 2 * dec->n_channels);
  GST_BUFFER_DURATION (outbuf) = samples * GST_SECOND / dec->sample_rate;
  samples = n;

  cmeta = gst_buffer_get_audio_clipping_meta (buf);

  g_assert (!cmeta || cmeta->format == GST_FORMAT_DEFAULT);

  /* Skip any samples that need skipping */
  if (cmeta && cmeta->start) {
    guint pre_skip = cmeta->start;
    guint scaled_pre_skip = pre_skip * dec->sample_rate / 48000;
    guint skip = scaled_pre_skip > n ? n : scaled_pre_skip;
    guint scaled_skip = skip * 48000 / dec->sample_rate;

    gst_buffer_resize (outbuf, skip * 2 * dec->n_channels, -1);

    GST_INFO_OBJECT (dec,
        "Skipping %u samples at the beginning (%u at 48000 Hz)",
        skip, scaled_skip);
  }

  if (cmeta && cmeta->end) {
    guint post_skip = cmeta->end;
    guint scaled_post_skip = post_skip * dec->sample_rate / 48000;
    guint skip = scaled_post_skip > n ? n : scaled_post_skip;
    guint scaled_skip = skip * 48000 / dec->sample_rate;
    guint outsize = gst_buffer_get_size (outbuf);
    guint skip_bytes = skip * 2 * dec->n_channels;

    if (outsize > skip_bytes)
      outsize -= skip_bytes;
    else
      outsize = 0;

    gst_buffer_resize (outbuf, 0, outsize);

    GST_INFO_OBJECT (dec,
        "Skipping %u samples at the end (%u at 48000 Hz)", skip, scaled_skip);
  }

  if (gst_buffer_get_size (outbuf) == 0) {
    gst_buffer_unref (outbuf);
    outbuf = NULL;
  } else if (dec->opus_pos[0] != GST_AUDIO_CHANNEL_POSITION_INVALID) {
    gst_audio_buffer_reorder_channels (outbuf, GST_AUDIO_FORMAT_S16,
        dec->n_channels, dec->opus_pos, dec->info.position);
  }

  /* Apply gain */
  /* Would be better off leaving this to a volume element, as this is
     a naive conversion that does too many int/float conversions.
     However, we don't have control over the pipeline...
     So make it optional if the user program wants to use a volume,
     but do it by default so the correct volume goes out by default */
  if (dec->apply_gain && outbuf && dec->r128_gain) {
    gsize rsize;
    unsigned int i, nsamples;
    double volume = dec->r128_gain_volume;
    gint16 *samples;

    gst_buffer_map (outbuf, &omap, GST_MAP_READWRITE);
    samples = (gint16 *) omap.data;
    rsize = omap.size;
    GST_DEBUG_OBJECT (dec, "Applying gain: volume %f", volume);
    nsamples = rsize / 2;
    for (i = 0; i < nsamples; ++i) {
      int sample = (int) (samples[i] * volume + 0.5);
      samples[i] = sample < -32768 ? -32768 : sample > 32767 ? 32767 : sample;
    }
    gst_buffer_unmap (outbuf, &omap);
  }

  if (dec->use_inband_fec) {
    gst_buffer_replace (&dec->last_buffer, buffer);
  }

  res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1);

  if (res != GST_FLOW_OK)
    GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));

done:
  return res;

creation_failed:
  GST_ELEMENT_ERROR (dec, LIBRARY, INIT, ("Failed to create Opus decoder"),
      ("Failed to create Opus decoder (%d): %s", err, opus_strerror (err)));
  return GST_FLOW_ERROR;

buffer_failed:
  GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL),
      ("Failed to create %u byte buffer", packet_size));
  return GST_FLOW_ERROR;
}
static GstFlowReturn
gst_two_lame_handle_frame (GstAudioEncoder * enc, GstBuffer * buf)
{
  GstTwoLame *twolame;
  gint mp3_buffer_size, mp3_size;
  GstBuffer *mp3_buf;
  GstFlowReturn result;
  gint num_samples;
  GstMapInfo map, mp3_map;

  twolame = GST_TWO_LAME (enc);

  /* squeeze remaining and push */
  if (G_UNLIKELY (buf == NULL))
    return gst_two_lame_flush_full (twolame, TRUE);

  gst_buffer_map (buf, &map, GST_MAP_READ);

  if (twolame->float_input)
    num_samples = map.size / 4;
  else
    num_samples = map.size / 2;

  /* allocate space for output */
  mp3_buffer_size = 1.25 * num_samples + 16384;
  mp3_buf = gst_buffer_new_and_alloc (mp3_buffer_size);
  gst_buffer_map (mp3_buf, &mp3_map, GST_MAP_WRITE);

  if (twolame->num_channels == 1) {
    if (twolame->float_input)
      mp3_size = twolame_encode_buffer_float32 (twolame->glopts,
          (float *) map.data,
          (float *) map.data, num_samples, mp3_map.data, mp3_buffer_size);
    else
      mp3_size = twolame_encode_buffer (twolame->glopts,
          (short int *) map.data,
          (short int *) map.data, num_samples, mp3_map.data, mp3_buffer_size);
  } else {
    if (twolame->float_input)
      mp3_size = twolame_encode_buffer_float32_interleaved (twolame->glopts,
          (float *) map.data,
          num_samples / twolame->num_channels, mp3_map.data, mp3_buffer_size);
    else
      mp3_size = twolame_encode_buffer_interleaved (twolame->glopts,
          (short int *) map.data,
          num_samples / twolame->num_channels, mp3_map.data, mp3_buffer_size);
  }

  GST_LOG_OBJECT (twolame, "encoded %" G_GSIZE_FORMAT " bytes of audio "
      "to %d bytes of mp3", map.size, mp3_size);

  gst_buffer_unmap (buf, &map);
  gst_buffer_unmap (mp3_buf, &mp3_map);

  if (mp3_size > 0) {
    gst_buffer_set_size (mp3_buf, mp3_size);
    result = gst_audio_encoder_finish_frame (enc, mp3_buf, -1);
  } else {
    if (mp3_size < 0) {
      /* eat error ? */
      g_warning ("error %d", mp3_size);
    }
    gst_buffer_unref (mp3_buf);
    result = GST_FLOW_OK;
  }

  return result;
}
Example #25
0
static GstFlowReturn
gst_rtp_asf_pay_handle_packet (GstRtpAsfPay * rtpasfpay, GstBuffer * buffer)
{
  GstRTPBasePayload *rtppay;
  GstAsfPacketInfo *packetinfo;
  guint8 flags;
  guint8 *data;
  guint32 packet_util_size;
  guint32 packet_offset;
  guint32 size_left;
  GstFlowReturn ret = GST_FLOW_OK;

  rtppay = GST_RTP_BASE_PAYLOAD (rtpasfpay);
  packetinfo = &rtpasfpay->packetinfo;

  if (!gst_asf_parse_packet (buffer, packetinfo, TRUE,
          rtpasfpay->asfinfo.packet_size)) {
    GST_ERROR_OBJECT (rtpasfpay, "Error while parsing asf packet");
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }

  if (packetinfo->packet_size == 0)
    packetinfo->packet_size = rtpasfpay->asfinfo.packet_size;

  GST_LOG_OBJECT (rtpasfpay, "Packet size: %" G_GUINT32_FORMAT
      ", padding: %" G_GUINT32_FORMAT, packetinfo->packet_size,
      packetinfo->padding);

  /* update padding field to 0 */
  if (packetinfo->padding > 0) {
    GstAsfPacketInfo info;
    /* find padding field offset */
    guint offset = packetinfo->err_cor_len + 2 +
        gst_asf_get_var_size_field_len (packetinfo->packet_field_type) +
        gst_asf_get_var_size_field_len (packetinfo->seq_field_type);
    buffer = gst_buffer_make_writable (buffer);
    switch (packetinfo->padd_field_type) {
      case ASF_FIELD_TYPE_DWORD:
        gst_buffer_memset (buffer, offset, 0, 4);
        break;
      case ASF_FIELD_TYPE_WORD:
        gst_buffer_memset (buffer, offset, 0, 2);
        break;
      case ASF_FIELD_TYPE_BYTE:
        gst_buffer_memset (buffer, offset, 0, 1);
        break;
      case ASF_FIELD_TYPE_NONE:
      default:
        break;
    }
    gst_asf_parse_packet (buffer, &info, FALSE, 0);
  }

  if (packetinfo->padding != 0)
    packet_util_size = rtpasfpay->asfinfo.packet_size - packetinfo->padding;
  else
    packet_util_size = packetinfo->packet_size;
  packet_offset = 0;
  while (packet_util_size > 0) {
    /* Even if we don't fill completely an output buffer we
     * push it when we add an fragment. Because it seems that
     * it is not possible to determine where a asf packet
     * fragment ends inside a rtp packet payload.
     * This flag tells us to push the packet.
     */
    gboolean force_push = FALSE;
    GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;

    /* we have no output buffer pending, create one */
    if (rtpasfpay->current == NULL) {
      GST_LOG_OBJECT (rtpasfpay, "Creating new output buffer");
      rtpasfpay->current =
          gst_rtp_buffer_new_allocate_len (GST_RTP_BASE_PAYLOAD_MTU (rtpasfpay),
          0, 0);
      rtpasfpay->cur_off = 0;
      rtpasfpay->has_ts = FALSE;
      rtpasfpay->marker = FALSE;
    }
    gst_rtp_buffer_map (rtpasfpay->current, GST_MAP_READWRITE, &rtp);
    data = gst_rtp_buffer_get_payload (&rtp);
    data += rtpasfpay->cur_off;
    size_left = gst_rtp_buffer_get_payload_len (&rtp) - rtpasfpay->cur_off;

    GST_DEBUG_OBJECT (rtpasfpay, "Input buffer bytes consumed: %"
        G_GUINT32_FORMAT "/%" G_GSIZE_FORMAT, packet_offset,
        gst_buffer_get_size (buffer));

    GST_DEBUG_OBJECT (rtpasfpay, "Output rtpbuffer status");
    GST_DEBUG_OBJECT (rtpasfpay, "Current offset: %" G_GUINT32_FORMAT,
        rtpasfpay->cur_off);
    GST_DEBUG_OBJECT (rtpasfpay, "Size left: %" G_GUINT32_FORMAT, size_left);
    GST_DEBUG_OBJECT (rtpasfpay, "Has ts: %s",
        rtpasfpay->has_ts ? "yes" : "no");
    if (rtpasfpay->has_ts) {
      GST_DEBUG_OBJECT (rtpasfpay, "Ts: %" G_GUINT32_FORMAT, rtpasfpay->ts);
    }

    flags = 0;
    if (packetinfo->has_keyframe) {
      flags = flags | 0x80;
    }
    flags = flags | 0x20;       /* Relative timestamp is present */

    if (!rtpasfpay->has_ts) {
      /* this is the first asf packet, its send time is the 
       * rtp packet timestamp */
      rtpasfpay->has_ts = TRUE;
      rtpasfpay->ts = packetinfo->send_time;
    }

    if (size_left >= packet_util_size + 8) {
      /* enough space for the rest of the packet */
      if (packet_offset == 0) {
        flags = flags | 0x40;
        GST_WRITE_UINT24_BE (data + 1, packet_util_size);
      } else {
        GST_WRITE_UINT24_BE (data + 1, packet_offset);
        force_push = TRUE;
      }
      data[0] = flags;
      GST_WRITE_UINT32_BE (data + 4,
          (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts);
      gst_buffer_extract (buffer, packet_offset, data + 8, packet_util_size);

      /* updating status variables */
      rtpasfpay->cur_off += 8 + packet_util_size;
      size_left -= packet_util_size + 8;
      packet_offset += packet_util_size;
      packet_util_size = 0;
      rtpasfpay->marker = TRUE;
    } else {
      /* fragment packet */
      data[0] = flags;
      GST_WRITE_UINT24_BE (data + 1, packet_offset);
      GST_WRITE_UINT32_BE (data + 4,
          (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts);
      gst_buffer_extract (buffer, packet_offset, data + 8, size_left - 8);

      /* updating status variables */
      rtpasfpay->cur_off += size_left;
      packet_offset += size_left - 8;
      packet_util_size -= size_left - 8;
      size_left = 0;
      force_push = TRUE;
    }

    /* there is not enough room for any more buffers */
    if (force_push || size_left <= 8) {

      gst_rtp_buffer_set_ssrc (&rtp, rtppay->current_ssrc);
      gst_rtp_buffer_set_marker (&rtp, rtpasfpay->marker);
      gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (rtppay));
      gst_rtp_buffer_set_seq (&rtp, rtppay->seqnum + 1);
      gst_rtp_buffer_set_timestamp (&rtp, packetinfo->send_time);
      gst_rtp_buffer_unmap (&rtp);

      /* trim remaining bytes not used */
      if (size_left != 0) {
        gst_buffer_set_size (rtpasfpay->current,
            gst_buffer_get_size (rtpasfpay->current) - size_left);
      }

      GST_BUFFER_TIMESTAMP (rtpasfpay->current) = GST_BUFFER_TIMESTAMP (buffer);

      rtppay->seqnum++;
      rtppay->timestamp = packetinfo->send_time;

      GST_DEBUG_OBJECT (rtpasfpay, "Pushing rtp buffer");
      ret = gst_rtp_base_payload_push (rtppay, rtpasfpay->current);
      rtpasfpay->current = NULL;
      if (ret != GST_FLOW_OK) {
        gst_buffer_unref (buffer);
        return ret;
      }
    }
  }
  gst_buffer_unref (buffer);
  return ret;
}
static gboolean
gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
                               GstClockTime duration)
{
    GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (src_object);
    GstBuffer *buf = NULL;
    GstMapInfo map;
    IPin *pPin = NULL;
    HRESULT hres = S_FALSE;
    AM_MEDIA_TYPE *pMediaType = NULL;

    if (!buffer || size == 0 || !src) {
        return FALSE;
    }

    /* create a new buffer assign to it the clock time as timestamp */
    buf = gst_buffer_new_and_alloc (size);

    gst_buffer_set_size(buf, size);

    GstClock *clock = gst_element_get_clock (GST_ELEMENT (src));
    GST_BUFFER_PTS (buf) =
        GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (src)), gst_clock_get_time (clock));
    //GST_BUFFER_DTS(buf) = GST_BUFFER_PTS (buf);
    GST_BUFFER_DTS(buf) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_OFFSET(buf) = src->offset++;
    GST_BUFFER_OFFSET_END(buf) = src->offset;
    GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_LIVE);

    gst_object_unref (clock);

    GST_BUFFER_DURATION (buf) = duration;

    gst_buffer_map(buf, &map, GST_MAP_WRITE);

    if (src->is_rgb) {
        /* FOR RGB directshow decoder will return bottom-up BITMAP
        * There is probably a way to get top-bottom video frames from
        * the decoder...
        */
        gint line = 0;
        gint stride = size / src->height;

        for (; line < src->height; line++) {
            memcpy (map.data + (line * stride),
                    buffer + (size - ((line + 1) * (stride))), stride);
        }
    } else {
        memcpy (map.data, buffer, size);
    }

    gst_buffer_unmap(buf, &map);

    src->time += duration;
    gst_object_sync_values (GST_OBJECT (src), src->time);

    GST_DEBUG ("push_buffer => pts %" GST_TIME_FORMAT "duration %"
               GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
               GST_TIME_ARGS (duration));

    g_mutex_lock (&src->buffer_mutex);
    if (src->buffer != NULL)
        gst_buffer_unref (src->buffer);
    src->buffer = buf;
    g_cond_signal (&src->buffer_cond);
    g_mutex_unlock (&src->buffer_mutex);

    return TRUE;
}
Example #27
0
static GstFlowReturn
opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer)
{
  GstFlowReturn res = GST_FLOW_OK;
  gsize size;
  guint8 *data;
  GstBuffer *outbuf;
  gint16 *out_data;
  int n, err;
  int samples;
  unsigned int packet_size;
  GstBuffer *buf;
  GstMapInfo map, omap;

  if (dec->state == NULL) {
    /* If we did not get any headers, default to 2 channels */
    if (dec->n_channels == 0) {
      GST_INFO_OBJECT (dec, "No header, assuming single stream");
      dec->n_channels = 2;
      dec->sample_rate = 48000;
      /* default stereo mapping */
      dec->channel_mapping_family = 0;
      dec->channel_mapping[0] = 0;
      dec->channel_mapping[1] = 1;
      dec->n_streams = 1;
      dec->n_stereo_streams = 1;

      gst_opus_dec_negotiate (dec, NULL);
    }

    GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz",
        dec->n_channels, dec->sample_rate);
#ifndef GST_DISABLE_GST_DEBUG
    gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug,
        "Mapping table", dec->n_channels, dec->channel_mapping);
#endif

    GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams,
        dec->n_stereo_streams);
    dec->state =
        opus_multistream_decoder_create (dec->sample_rate, dec->n_channels,
        dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err);
    if (!dec->state || err != OPUS_OK)
      goto creation_failed;
  }

  if (buffer) {
    GST_DEBUG_OBJECT (dec, "Received buffer of size %" G_GSIZE_FORMAT,
        gst_buffer_get_size (buffer));
  } else {
    GST_DEBUG_OBJECT (dec, "Received missing buffer");
  }

  /* if using in-band FEC, we introdude one extra frame's delay as we need
     to potentially wait for next buffer to decode a missing buffer */
  if (dec->use_inband_fec && !dec->primed) {
    GST_DEBUG_OBJECT (dec, "First buffer received in FEC mode, early out");
    gst_buffer_replace (&dec->last_buffer, buffer);
    dec->primed = TRUE;
    goto done;
  }

  /* That's the buffer we'll be sending to the opus decoder. */
  buf = (dec->use_inband_fec
      && gst_buffer_get_size (dec->last_buffer) >
      0) ? dec->last_buffer : buffer;

  if (buf && gst_buffer_get_size (buf) > 0) {
    gst_buffer_map (buf, &map, GST_MAP_READ);
    data = map.data;
    size = map.size;
    GST_DEBUG_OBJECT (dec, "Using buffer of size %" G_GSIZE_FORMAT, size);
  } else {
    /* concealment data, pass NULL as the bits parameters */
    GST_DEBUG_OBJECT (dec, "Using NULL buffer");
    data = NULL;
    size = 0;
  }

  /* use maximum size (120 ms) as the number of returned samples is
     not constant over the stream. */
  samples = 120 * dec->sample_rate / 1000;
  packet_size = samples * dec->n_channels * 2;

  outbuf =
      gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (dec),
      packet_size);
  if (!outbuf) {
    goto buffer_failed;
  }

  gst_buffer_map (outbuf, &omap, GST_MAP_WRITE);
  out_data = (gint16 *) omap.data;

  if (dec->use_inband_fec) {
    if (dec->last_buffer) {
      /* normal delayed decode */
      GST_LOG_OBJECT (dec, "FEC enabled, decoding last delayed buffer");
      n = opus_multistream_decode (dec->state, data, size, out_data, samples,
          0);
    } else {
      /* FEC reconstruction decode */
      GST_LOG_OBJECT (dec, "FEC enabled, reconstructing last buffer");
      n = opus_multistream_decode (dec->state, data, size, out_data, samples,
          1);
    }
  } else {
    /* normal decode */
    GST_LOG_OBJECT (dec, "FEC disabled, decoding buffer");
    n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0);
  }
  gst_buffer_unmap (outbuf, &omap);
  if (data != NULL)
    gst_buffer_unmap (buf, &map);

  if (n < 0) {
    GST_ELEMENT_ERROR (dec, STREAM, DECODE, ("Decoding error: %d", n), (NULL));
    gst_buffer_unref (outbuf);
    return GST_FLOW_ERROR;
  }
  GST_DEBUG_OBJECT (dec, "decoded %d samples", n);
  gst_buffer_set_size (outbuf, n * 2 * dec->n_channels);

  /* Skip any samples that need skipping */
  if (dec->pre_skip > 0) {
    guint scaled_pre_skip = dec->pre_skip * dec->sample_rate / 48000;
    guint skip = scaled_pre_skip > n ? n : scaled_pre_skip;
    guint scaled_skip = skip * 48000 / dec->sample_rate;

    gst_buffer_resize (outbuf, skip * 2 * dec->n_channels, -1);
    dec->pre_skip -= scaled_skip;
    GST_INFO_OBJECT (dec,
        "Skipping %u samples (%u at 48000 Hz, %u left to skip)", skip,
        scaled_skip, dec->pre_skip);
  }

  if (gst_buffer_get_size (outbuf) == 0) {
    gst_buffer_unref (outbuf);
    outbuf = NULL;
  } else if (dec->opus_pos[0] != GST_AUDIO_CHANNEL_POSITION_INVALID) {
    gst_audio_buffer_reorder_channels (outbuf, GST_AUDIO_FORMAT_S16,
        dec->n_channels, dec->opus_pos, dec->info.position);
  }

  /* Apply gain */
  /* Would be better off leaving this to a volume element, as this is
     a naive conversion that does too many int/float conversions.
     However, we don't have control over the pipeline...
     So make it optional if the user program wants to use a volume,
     but do it by default so the correct volume goes out by default */
  if (dec->apply_gain && outbuf && dec->r128_gain) {
    gsize rsize;
    unsigned int i, nsamples;
    double volume = dec->r128_gain_volume;
    gint16 *samples;

    gst_buffer_map (outbuf, &omap, GST_MAP_READWRITE);
    samples = (gint16 *) omap.data;
    rsize = omap.size;
    GST_DEBUG_OBJECT (dec, "Applying gain: volume %f", volume);
    nsamples = rsize / 2;
    for (i = 0; i < nsamples; ++i) {
      int sample = (int) (samples[i] * volume + 0.5);
      samples[i] = sample < -32768 ? -32768 : sample > 32767 ? 32767 : sample;
    }
    gst_buffer_unmap (outbuf, &omap);
  }

  if (dec->use_inband_fec) {
    gst_buffer_replace (&dec->last_buffer, buffer);
  }

  res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1);

  if (res != GST_FLOW_OK)
    GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));

done:
  return res;

creation_failed:
  GST_ERROR_OBJECT (dec, "Failed to create Opus decoder: %d", err);
  return GST_FLOW_ERROR;

buffer_failed:
  GST_ERROR_OBJECT (dec, "Failed to create %u byte buffer", packet_size);
  return GST_FLOW_ERROR;
}
Example #28
0
static GstFlowReturn
gst_srtp_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf,
    gboolean is_rtcp)
{
  GstSrtpDec *filter = GST_SRTP_DEC (parent);
  GstPad *otherpad;
  err_status_t err = err_status_ok;
  GstSrtpDecSsrcStream *stream = NULL;
  GstFlowReturn ret = GST_FLOW_OK;
  gint size;
  guint32 ssrc = 0;
  GstMapInfo map;

  GST_OBJECT_LOCK (filter);

  /* Check if this stream exists, if not create a new stream */

  if (!(stream = validate_buffer (filter, buf, &ssrc, &is_rtcp))) {
    GST_OBJECT_UNLOCK (filter);
    GST_WARNING_OBJECT (filter, "Invalid buffer, dropping");
    goto drop_buffer;
  }

  if (!STREAM_HAS_CRYPTO (stream)) {
    GST_OBJECT_UNLOCK (filter);
    goto push_out;
  }

  GST_LOG_OBJECT (pad, "Received %s buffer of size %" G_GSIZE_FORMAT
      " with SSRC = %u", is_rtcp ? "RTCP" : "RTP", gst_buffer_get_size (buf),
      ssrc);

  /* Change buffer to remove protection */
  buf = gst_buffer_make_writable (buf);

unprotect:

  gst_buffer_map (buf, &map, GST_MAP_READWRITE);
  size = map.size;

  gst_srtp_init_event_reporter ();

  if (is_rtcp)
    err = srtp_unprotect_rtcp (filter->session, map.data, &size);
  else {
    /* If ROC has changed, we know we need to set the initial RTP
     * sequence number too. */
    if (filter->roc_changed) {
      srtp_stream_t stream;

      stream = srtp_get_stream (filter->session, htonl (ssrc));

      if (stream) {
        guint16 seqnum = 0;
        GstRTPBuffer rtpbuf = GST_RTP_BUFFER_INIT;

        gst_rtp_buffer_map (buf, GST_MAP_READ, &rtpbuf);
        seqnum = gst_rtp_buffer_get_seq (&rtpbuf);
        gst_rtp_buffer_unmap (&rtpbuf);

        /* We finally add the RTP sequence number to the current
         * rollover counter. */
        stream->rtp_rdbx.index &= ~0xFFFF;
        stream->rtp_rdbx.index |= seqnum;
      }

      filter->roc_changed = FALSE;
    }
    err = srtp_unprotect (filter->session, map.data, &size);
  }

  gst_buffer_unmap (buf, &map);

  GST_OBJECT_UNLOCK (filter);

  if (err != err_status_ok) {
    GST_WARNING_OBJECT (pad,
        "Unable to unprotect buffer (unprotect failed code %d)", err);

    /* Signal user depending on type of error */
    switch (err) {
      case err_status_key_expired:
        GST_OBJECT_LOCK (filter);

        /* Update stream */
        if (find_stream_by_ssrc (filter, ssrc)) {
          GST_OBJECT_UNLOCK (filter);
          if (request_key_with_signal (filter, ssrc, SIGNAL_HARD_LIMIT)) {
            GST_OBJECT_LOCK (filter);
            goto unprotect;
          } else {
            GST_WARNING_OBJECT (filter, "Hard limit reached, no new key, "
                "dropping");
          }
        } else {
          GST_WARNING_OBJECT (filter, "Could not find matching stream, "
              "dropping");
        }
        break;
      case err_status_auth_fail:
        GST_WARNING_OBJECT (filter, "Error authentication packet, dropping");
        break;
      case err_status_cipher_fail:
        GST_WARNING_OBJECT (filter, "Error while decrypting packet, dropping");
        break;
      default:
        GST_WARNING_OBJECT (filter, "Other error, dropping");
        break;
    }

    goto drop_buffer;
  }

  gst_buffer_set_size (buf, size);

  /* If all is well, we may have reached soft limit */
  if (gst_srtp_get_soft_limit_reached ())
    request_key_with_signal (filter, ssrc, SIGNAL_SOFT_LIMIT);

push_out:
  /* Push buffer to source pad */
  if (is_rtcp) {
    otherpad = filter->rtcp_srcpad;
    if (!filter->rtcp_has_segment)
      gst_srtp_dec_push_early_events (filter, filter->rtcp_srcpad,
          filter->rtp_srcpad, TRUE);
  } else {
    otherpad = filter->rtp_srcpad;
    if (!filter->rtp_has_segment)
      gst_srtp_dec_push_early_events (filter, filter->rtp_srcpad,
          filter->rtcp_srcpad, FALSE);
  }
  ret = gst_pad_push (otherpad, buf);

  return ret;

drop_buffer:
  /* Drop buffer, except if gst_pad_push returned OK or an error */

  gst_buffer_unref (buf);

  return ret;
}