Example #1
0
static GstFlowReturn
gst_smfdec_chain (GstPad * pad, GstBuffer * data)
{
	GstSmfdec *dec = GST_SMFDEC (gst_pad_get_parent (pad));
	gint i, ticks;
	guint len, midi_len;

	/* we must be negotiated */
	g_assert ( dec != NULL );
	g_assert (dec->buf_denom > 0);

	gst_adapter_push (dec->adapter, GST_BUFFER (data));
	if (dec->chunk.fourcc) {
		chunk_ensure (dec->adapter, &dec->chunk, 0);
	}

	while (dec->chunk.fourcc || get_next_chunk (dec->adapter, &dec->chunk)) {
		switch (dec->chunk.fourcc) {
			case GST_MAKE_FOURCC ('M', 'T', 'h', 'd'):
				if (dec->format) {
					GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), 
							("got a header chunk while already initialized"));
					goto error;
				}
				if (dec->chunk.length != 6) {
					GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), 
							("header chunk %d bytes long, not 6", (int) dec->chunk.length));
					goto error;
				}
				if (!chunk_ensure (dec->adapter, &dec->chunk, 6))
					goto out;
				/* we add one, so we can use 0 for uninitialized */
				dec->format = GST_READ_UINT16_BE (dec->chunk.data) + 1;
				if (dec->format > 2)
					g_warning ("I have no clue if midi format %u works", dec->format - 1);
				dec->tracks_missing = GST_READ_UINT16_BE (dec->chunk.data + 2);
				//g_assert (dec->tracks_missing == 1);
				/* ignore the number of track atoms */
				i = (gint16) GST_READ_UINT16_BE (dec->chunk.data + 4);
				if (i < 0) {
					GST_ELEMENT_ERROR (dec, STREAM, NOT_IMPLEMENTED, (NULL), 
							("can't handle smpte timing"));
					goto error;
				} else {
					dec->start = 0;
					dec->division = i;
				}
				chunk_flush (dec->adapter, &dec->chunk);
				break;
			case GST_MAKE_FOURCC ('M', 'T', 'r', 'k'):
				if (dec->format == 0) {
					GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), 
							("got a track chunk while not yet initialized"));
					goto error;
				}
				/* figure out if we have enough data */
				ticks = gst_midi_data_parse_varlen (dec->chunk.data, dec->chunk.available, &len);
				if (ticks == -1) goto out;
				if (ticks == -2) {
					GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), 
							("invalid delta time value"));
					goto error;
				}
				midi_len = gst_midi_data_get_length (dec->chunk.data + len, 
						dec->chunk.available - len, dec->status);
				if (midi_len == 0) goto out;
				/* we have enough data, process */
				dec->ticks += ticks;
				//g_print ("got %u ticks, now %u\n", ticks, dec->ticks);
				if (dec->chunk.data[len] & 0x80) {
					dec->status = dec->chunk.data[len];
					len++;
					midi_len--;
				}
				if (dec->status == 0xFF) {
					if (!gst_smfdec_meta_event (dec, dec->chunk.data + len, midi_len))
						goto error;
				} else {
					gst_smfdec_buffer_append (dec, dec->status, dec->chunk.data + len, midi_len);
				}
				chunk_skip (dec->adapter, &dec->chunk, len + midi_len);
				if (dec->chunk.length == 0)
					chunk_clear (&dec->chunk);
				break;
			default:
				goto out;
		}
	}
out:
	gst_object_unref(dec);
	return GST_FLOW_OK;;

error:
	gst_smfdec_reset (dec);
	return GST_FLOW_ERROR;
}
Example #2
0
static void
gst_midi_parse_loop (GstPad * sinkpad)
{
  GstMidiParse *midiparse = GST_MIDI_PARSE (GST_PAD_PARENT (sinkpad));
  GstFlowReturn ret;

  switch (midiparse->state) {
    case GST_MIDI_PARSE_STATE_LOAD:
    {
      GstBuffer *buffer = NULL;

      GST_DEBUG_OBJECT (midiparse, "loading song");

      ret =
          gst_pad_pull_range (midiparse->sinkpad, midiparse->offset, -1,
          &buffer);

      if (ret == GST_FLOW_EOS) {
        GST_DEBUG_OBJECT (midiparse, "Song loaded");
        midiparse->state = GST_MIDI_PARSE_STATE_PARSE;
      } else if (ret != GST_FLOW_OK) {
        GST_ELEMENT_ERROR (midiparse, STREAM, DECODE, (NULL),
            ("Unable to read song"));
        goto pause;
      } else {
        GST_DEBUG_OBJECT (midiparse, "pushing buffer");
        gst_adapter_push (midiparse->adapter, buffer);
        midiparse->offset += gst_buffer_get_size (buffer);
      }
      break;
    }
    case GST_MIDI_PARSE_STATE_PARSE:
      ret = gst_midi_parse_parse_song (midiparse);
      if (ret != GST_FLOW_OK)
        goto pause;
      midiparse->state = GST_MIDI_PARSE_STATE_PLAY;
      break;
    case GST_MIDI_PARSE_STATE_PLAY:
      ret = gst_midi_parse_do_play (midiparse);
      if (ret != GST_FLOW_OK)
        goto pause;
      break;
    default:
      break;
  }
  return;

pause:
  {
    const gchar *reason = gst_flow_get_name (ret);
    GstEvent *event;

    GST_DEBUG_OBJECT (midiparse, "pausing task, reason %s", reason);
    gst_pad_pause_task (sinkpad);
    if (ret == GST_FLOW_EOS) {
      /* perform EOS logic */
      event = gst_event_new_eos ();
      gst_pad_push_event (midiparse->srcpad, event);
    } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
      event = gst_event_new_eos ();
      /* for fatal errors we post an error message, post the error
       * first so the app knows about the error first. */
      GST_ELEMENT_ERROR (midiparse, STREAM, FAILED,
          ("Internal data flow error."),
          ("streaming task paused, reason %s (%d)", reason, ret));
      gst_pad_push_event (midiparse->srcpad, event);
    }
  }
}
Example #3
0
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstBaseVideoDecoderClass *klass;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
  klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (!base_video_decoder->have_segment) {
    GstEvent *event;
    GstFlowReturn ret;

    GST_WARNING
        ("Received buffer without a new-segment. Assuming timestamps start from 0.");

    gst_segment_set_newsegment_full (&base_video_decoder->segment,
        FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
    base_video_decoder->have_segment = TRUE;

    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
        GST_CLOCK_TIME_NONE, 0);

    ret =
        gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
        event);
    if (!ret) {
      GST_ERROR ("new segment event ret=%d", ret);
      return GST_FLOW_ERROR;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    gst_base_video_decoder_reset (base_video_decoder);
  }

  if (!base_video_decoder->started) {
    klass->start (base_video_decoder);
    base_video_decoder->started = TRUE;
  }

  if (base_video_decoder->current_frame == NULL) {
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
  }
  base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);

#if 0
  if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE &&
      GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
    GST_DEBUG ("got new offset %" GST_TIME_FORMAT,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
    base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
  }
#endif

  if (base_video_decoder->packetized) {
    base_video_decoder->current_frame->sink_buffer = buf;

    ret = gst_base_video_decoder_have_frame_2 (base_video_decoder);
  } else {

    gst_adapter_push (base_video_decoder->input_adapter, buf);

    if (!base_video_decoder->have_sync) {
      int n, m;

      GST_DEBUG ("no sync, scanning");

      n = gst_adapter_available (base_video_decoder->input_adapter);
      m = klass->scan_for_sync (base_video_decoder, FALSE, 0, n);
      if (m == -1) {
        gst_object_unref (base_video_decoder);
        return GST_FLOW_OK;
      }

      if (m < 0) {
        g_warning ("subclass returned negative scan %d", m);
      }

      if (m >= n) {
        GST_ERROR ("subclass scanned past end %d >= %d", m, n);
      }

      gst_adapter_flush (base_video_decoder->input_adapter, m);

      if (m < n) {
        GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);

        /* this is only "maybe" sync */
        base_video_decoder->have_sync = TRUE;
      }

      if (!base_video_decoder->have_sync) {
        gst_object_unref (base_video_decoder);
        return GST_FLOW_OK;
      }
    }

    do {
      ret = klass->parse_data (base_video_decoder, FALSE);
    } while (ret == GST_FLOW_OK);

    if (ret == GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA) {
      gst_object_unref (base_video_decoder);
      return GST_FLOW_OK;
    }
  }

  gst_object_unref (base_video_decoder);
  return ret;
}
Example #4
0
static void
gst_droidadec_data_available (void *data, DroidMediaCodecData * encoded)
{
  GstFlowReturn flow_ret;
  GstDroidADec *dec = (GstDroidADec *) data;
  GstAudioDecoder *decoder = GST_AUDIO_DECODER (dec);
  GstBuffer *out;
  GstMapInfo info;

  GST_DEBUG_OBJECT (dec, "data available of size %d", encoded->data.size);

  GST_AUDIO_DECODER_STREAM_LOCK (decoder);

  if (G_UNLIKELY (dec->downstream_flow_ret != GST_FLOW_OK)) {
    GST_DEBUG_OBJECT (dec, "not handling data in error state: %s",
        gst_flow_get_name (dec->downstream_flow_ret));
    flow_ret = dec->downstream_flow_ret;
    gst_audio_decoder_finish_frame (decoder, NULL, 1);
    goto out;
  }

  if (G_UNLIKELY (gst_audio_decoder_get_audio_info (GST_AUDIO_DECODER
              (dec))->finfo->format == GST_AUDIO_FORMAT_UNKNOWN)) {
    DroidMediaCodecMetaData md;
    DroidMediaRect crop;        /* TODO: get rid of that */
    GstAudioInfo info;

    memset (&md, 0x0, sizeof (md));
    droid_media_codec_get_output_info (dec->codec, &md, &crop);
    GST_INFO_OBJECT (dec, "output rate=%d, output channels=%d", md.sample_rate,
        md.channels);

    gst_audio_info_init (&info);
    gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, md.sample_rate,
        md.channels, NULL);

    if (!gst_audio_decoder_set_output_format (decoder, &info)) {
      flow_ret = GST_FLOW_ERROR;
      goto out;
    }

    dec->info = gst_audio_decoder_get_audio_info (GST_AUDIO_DECODER (dec));
  }

  out = gst_audio_decoder_allocate_output_buffer (decoder, encoded->data.size);

  gst_buffer_map (out, &info, GST_MAP_READWRITE);
  orc_memcpy (info.data, encoded->data.data, encoded->data.size);
  gst_buffer_unmap (out, &info);

  //  GST_WARNING_OBJECT (dec, "bpf %d, bps %d", dec->info->bpf, GST_AUDIO_INFO_BPS(dec->info));
  if (dec->spf == -1 || (encoded->data.size == dec->spf * dec->info->bpf
          && gst_adapter_available (dec->adapter) == 0)) {
    /* fast path. no need for anything */
    goto push;
  }

  gst_adapter_push (dec->adapter, out);

  if (gst_adapter_available (dec->adapter) >= dec->spf * dec->info->bpf) {
    out = gst_adapter_take_buffer (dec->adapter, dec->spf * dec->info->bpf);
  } else {
    flow_ret = GST_FLOW_OK;
    goto out;
  }

push:
  GST_DEBUG_OBJECT (dec, "pushing %d bytes out", gst_buffer_get_size (out));

  flow_ret = gst_audio_decoder_finish_frame (decoder, out, 1);

  if (flow_ret == GST_FLOW_OK || flow_ret == GST_FLOW_FLUSHING) {
    goto out;
  } else if (flow_ret == GST_FLOW_EOS) {
    GST_INFO_OBJECT (dec, "eos");
  } else if (flow_ret < GST_FLOW_OK) {
    GST_ELEMENT_ERROR (dec, STREAM, FAILED,
        ("Internal data stream error."), ("stream stopped, reason %s",
            gst_flow_get_name (flow_ret)));
  }

out:
  dec->downstream_flow_ret = flow_ret;
  GST_AUDIO_DECODER_STREAM_UNLOCK (decoder);
}
static GstFlowReturn
gst_rtp_g723_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  guint8 *data;
  guint size;
  guint8 HDR;
  GstRTPG723Pay *pay;
  GstClockTime packet_dur, timestamp;
  guint payload_len, packet_len;

  pay = GST_RTP_G723_PAY (payload);

  size = GST_BUFFER_SIZE (buf);
  data = GST_BUFFER_DATA (buf);
  timestamp = GST_BUFFER_TIMESTAMP (buf);

  if (GST_BUFFER_IS_DISCONT (buf)) {
    /* flush everything on discont */
    gst_adapter_clear (pay->adapter);
    pay->timestamp = GST_CLOCK_TIME_NONE;
    pay->duration = 0;
    pay->discont = TRUE;
  }

  /* should be one of these sizes */
  if (size != 4 && size != 20 && size != 24)
    goto invalid_size;

  /* check size by looking at the header bits */
  HDR = data[0] & 0x3;
  if (size_tab[HDR] != size)
    goto wrong_size;

  /* calculate packet size and duration */
  payload_len = gst_adapter_available (pay->adapter) + size;
  packet_dur = pay->duration + G723_FRAME_DURATION;
  packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);

  if (gst_basertppayload_is_filled (payload, packet_len, packet_dur)) {
    /* size or duration would overflow the packet, flush the queued data */
    ret = gst_rtp_g723_pay_flush (pay);
  }

  /* update timestamp, we keep the timestamp for the first packet in the adapter
   * but are able to calculate it from next packets. */
  if (timestamp != GST_CLOCK_TIME_NONE && pay->timestamp == GST_CLOCK_TIME_NONE) {
    if (timestamp > pay->duration)
      pay->timestamp = timestamp - pay->duration;
    else
      pay->timestamp = 0;
  }

  /* add packet to the queue */
  gst_adapter_push (pay->adapter, buf);
  pay->duration = packet_dur;

  /* check if we can flush now */
  if (pay->duration >= payload->min_ptime) {
    ret = gst_rtp_g723_pay_flush (pay);
  }

  return ret;

  /* WARNINGS */
invalid_size:
  {
    GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE,
        ("Invalid input buffer size"),
        ("Input size should be 4, 20 or 24, got %u", size));
    gst_buffer_unref (buf);
    return GST_FLOW_OK;
  }
wrong_size:
  {
    GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE,
        ("Wrong input buffer size"),
        ("Expected input buffer size %u but got %u", size_tab[HDR], size));
    gst_buffer_unref (buf);
    return GST_FLOW_OK;
  }
}
Example #6
0
static GstFlowReturn
gst_base_video_parse_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoParse *base_video_parse;
  GstBaseVideoParseClass *klass;
  GstBuffer *buffer;
  GstFlowReturn ret;

  GST_DEBUG ("chain with %d bytes", GST_BUFFER_SIZE (buf));

  base_video_parse = GST_BASE_VIDEO_PARSE (GST_PAD_PARENT (pad));
  klass = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse);

  if (!base_video_parse->started) {
    klass->start (base_video_parse);
    base_video_parse->started = TRUE;
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_parse, "received DISCONT buffer");
    gst_base_video_parse_reset (base_video_parse);
    base_video_parse->discont = TRUE;
    base_video_parse->have_sync = FALSE;
  }

  if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
    base_video_parse->last_timestamp = GST_BUFFER_TIMESTAMP (buf);
  }
  gst_adapter_push (base_video_parse->input_adapter, buf);

  if (!base_video_parse->have_sync) {
    int n, m;

    GST_DEBUG ("no sync, scanning");

    n = gst_adapter_available (base_video_parse->input_adapter);
    m = klass->scan_for_sync (base_video_parse->input_adapter, FALSE, 0, n);

    gst_adapter_flush (base_video_parse->input_adapter, m);

    if (m < n) {
      GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);

      /* this is only "maybe" sync */
      base_video_parse->have_sync = TRUE;
    }

    if (!base_video_parse->have_sync) {
      return GST_FLOW_OK;
    }
  }

  /* FIXME: use gst_adapter_prev_timestamp() here instead? */
  buffer = gst_adapter_get_buffer (base_video_parse->input_adapter);

  gst_buffer_unref (buffer);

  /* FIXME check klass->parse_data */

  do {
    ret = klass->parse_data (base_video_parse, FALSE);
  } while (ret == GST_FLOW_OK);

  if (ret == GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA) {
    return GST_FLOW_OK;
  }
  return ret;
}
Example #7
0
static GstFlowReturn
gst_visual_chain (GstPad * pad, GstBuffer * buffer)
{
  GstBuffer *outbuf = NULL;
  guint i;
  GstVisual *visual = GST_VISUAL (gst_pad_get_parent (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  guint avail;

  GST_DEBUG_OBJECT (visual, "chain function called");

  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (visual->srcpad) == NULL) {
    ret = get_buffer (visual, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }

  /* resync on DISCONT */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (visual->adapter);
    visual->next_ts = -1;
  }

  /* Match timestamps from the incoming audio */
  if (GST_BUFFER_TIMESTAMP (buffer) != GST_CLOCK_TIME_NONE)
    visual->next_ts = GST_BUFFER_TIMESTAMP (buffer);

  GST_DEBUG_OBJECT (visual,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer));

  gst_adapter_push (visual->adapter, buffer);

  while (TRUE) {
    gboolean need_skip;
    const guint16 *data;

    GST_DEBUG_OBJECT (visual, "processing buffer");

    avail = gst_adapter_available (visual->adapter);
    GST_DEBUG_OBJECT (visual, "avail now %u", avail);

    /* we need at least 512 samples */
    if (avail < 512 * visual->bps)
      break;

    /* we need at least enough samples to make one frame */
    if (avail < visual->spf * visual->bps)
      break;

    if (visual->next_ts != -1) {
      gint64 qostime;

      /* QoS is done on running time */
      qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME,
          visual->next_ts);

      GST_OBJECT_LOCK (visual);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = visual->earliest_time != -1 &&
          qostime <= visual->earliest_time;
      GST_OBJECT_UNLOCK (visual);

      if (need_skip) {
        GST_WARNING_OBJECT (visual,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time));
        goto skip;
      }
    }

    /* Read 512 samples per channel */
    data =
        (const guint16 *) gst_adapter_peek (visual->adapter, 512 * visual->bps);

#if defined(VISUAL_API_VERSION) && VISUAL_API_VERSION >= 4000 && VISUAL_API_VERSION < 5000
    {
      VisBuffer *lbuf, *rbuf;
      guint16 ldata[512], rdata[512];
      VisAudioSampleRateType rate;

      lbuf = visual_buffer_new_with_buffer (ldata, sizeof (ldata), NULL);
      rbuf = visual_buffer_new_with_buffer (rdata, sizeof (rdata), NULL);

      if (visual->channels == 2) {
        for (i = 0; i < 512; i++) {
          ldata[i] = *data++;
          rdata[i] = *data++;
        }
      } else {
        for (i = 0; i < 512; i++) {
          ldata[i] = *data;
          rdata[i] = *data++;
        }
      }

      switch (visual->rate) {
        case 8000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_8000;
          break;
        case 11250:
          rate = VISUAL_AUDIO_SAMPLE_RATE_11250;
          break;
        case 22500:
          rate = VISUAL_AUDIO_SAMPLE_RATE_22500;
          break;
        case 32000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_32000;
          break;
        case 44100:
          rate = VISUAL_AUDIO_SAMPLE_RATE_44100;
          break;
        case 48000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_48000;
          break;
        case 96000:
          rate = VISUAL_AUDIO_SAMPLE_RATE_96000;
          break;
        default:
          visual_object_unref (VISUAL_OBJECT (lbuf));
          visual_object_unref (VISUAL_OBJECT (rbuf));
          GST_ERROR_OBJECT (visual, "unsupported rate %d", visual->rate);
          ret = GST_FLOW_ERROR;
          goto beach;
          break;
      }

      visual_audio_samplepool_input_channel (visual->audio->samplepool,
          lbuf,
          rate, VISUAL_AUDIO_SAMPLE_FORMAT_S16, VISUAL_AUDIO_CHANNEL_LEFT);
      visual_audio_samplepool_input_channel (visual->audio->samplepool,
          rbuf,
          rate, VISUAL_AUDIO_SAMPLE_FORMAT_S16, VISUAL_AUDIO_CHANNEL_RIGHT);

      visual_object_unref (VISUAL_OBJECT (lbuf));
      visual_object_unref (VISUAL_OBJECT (rbuf));

    }
#else
    if (visual->channels == 2) {
      for (i = 0; i < 512; i++) {
        visual->audio->plugpcm[0][i] = *data++;
        visual->audio->plugpcm[1][i] = *data++;
      }
    } else {
      for (i = 0; i < 512; i++) {
        visual->audio->plugpcm[0][i] = *data;
        visual->audio->plugpcm[1][i] = *data++;
      }
    }
#endif

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (visual, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }
    visual_video_set_buffer (visual->video, GST_BUFFER_DATA (outbuf));
    visual_audio_analyze (visual->audio);
    visual_actor_run (visual->actor, visual->audio);
    visual_video_set_buffer (visual->video, NULL);
    GST_DEBUG_OBJECT (visual, "rendered one frame");

    GST_BUFFER_TIMESTAMP (outbuf) = visual->next_ts;
    GST_BUFFER_DURATION (outbuf) = visual->duration;

    ret = gst_pad_push (visual->srcpad, outbuf);
    outbuf = NULL;

  skip:
    /* interpollate next timestamp */
    if (visual->next_ts != -1)
      visual->next_ts += visual->duration;

    GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input",
        visual->spf);

    /* Flush out the number of samples per frame */
    gst_adapter_flush (visual->adapter, visual->spf * visual->bps);

    /* quit the loop if something was wrong */
    if (ret != GST_FLOW_OK)
      break;
  }

beach:

  if (outbuf != NULL)
    gst_buffer_unref (outbuf);

  gst_object_unref (visual);

  return ret;
}
static GstFlowReturn
gst_y4m_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstY4mDec *y4mdec;
  int n_avail;
  GstFlowReturn flow_ret = GST_FLOW_OK;
#define MAX_HEADER_LENGTH 80
  char header[MAX_HEADER_LENGTH];
  int i;
  int len;

  y4mdec = GST_Y4M_DEC (parent);

  GST_DEBUG_OBJECT (y4mdec, "chain");

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG ("got discont");
    gst_adapter_clear (y4mdec->adapter);
  }

  gst_adapter_push (y4mdec->adapter, buffer);
  n_avail = gst_adapter_available (y4mdec->adapter);

  if (!y4mdec->have_header) {
    gboolean ret;
    GstCaps *caps;
    GstQuery *query;

    if (n_avail < MAX_HEADER_LENGTH)
      return GST_FLOW_OK;

    gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH);

    header[MAX_HEADER_LENGTH - 1] = 0;
    for (i = 0; i < MAX_HEADER_LENGTH; i++) {
      if (header[i] == 0x0a)
        header[i] = 0;
    }

    ret = gst_y4m_dec_parse_header (y4mdec, header);
    if (!ret) {
      GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE,
          ("Failed to parse YUV4MPEG header"), (NULL));
      return GST_FLOW_ERROR;
    }

    y4mdec->header_size = strlen (header) + 1;
    gst_adapter_flush (y4mdec->adapter, y4mdec->header_size);

    caps = gst_video_info_to_caps (&y4mdec->info);
    ret = gst_pad_set_caps (y4mdec->srcpad, caps);

    query = gst_query_new_allocation (caps, FALSE);
    y4mdec->video_meta = FALSE;

    if (y4mdec->pool) {
      gst_buffer_pool_set_active (y4mdec->pool, FALSE);
      gst_object_unref (y4mdec->pool);
    }
    y4mdec->pool = NULL;

    if (gst_pad_peer_query (y4mdec->srcpad, query)) {
      y4mdec->video_meta =
          gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);

      /* We only need a pool if we need to do stride conversion for downstream */
      if (!y4mdec->video_meta && memcmp (&y4mdec->info, &y4mdec->out_info,
              sizeof (y4mdec->info)) != 0) {
        GstBufferPool *pool = NULL;
        GstAllocator *allocator = NULL;
        GstAllocationParams params;
        GstStructure *config;
        guint size, min, max;

        if (gst_query_get_n_allocation_params (query) > 0) {
          gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
        } else {
          allocator = NULL;
          gst_allocation_params_init (&params);
        }

        if (gst_query_get_n_allocation_pools (query) > 0) {
          gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min,
              &max);
          size = MAX (size, y4mdec->out_info.size);
        } else {
          pool = NULL;
          size = y4mdec->out_info.size;
          min = max = 0;
        }

        if (pool == NULL) {
          pool = gst_video_buffer_pool_new ();
        }

        config = gst_buffer_pool_get_config (pool);
        gst_buffer_pool_config_set_params (config, caps, size, min, max);
        gst_buffer_pool_config_set_allocator (config, allocator, &params);
        gst_buffer_pool_set_config (pool, config);

        if (allocator)
          gst_object_unref (allocator);

        y4mdec->pool = pool;
      }
    } else if (memcmp (&y4mdec->info, &y4mdec->out_info,
            sizeof (y4mdec->info)) != 0) {
      GstBufferPool *pool;
      GstStructure *config;

      /* No pool, create our own if we need to do stride conversion */
      pool = gst_video_buffer_pool_new ();
      config = gst_buffer_pool_get_config (pool);
      gst_buffer_pool_config_set_params (config, caps, y4mdec->out_info.size, 0,
          0);
      gst_buffer_pool_set_config (pool, config);
      y4mdec->pool = pool;
    }
    if (y4mdec->pool) {
      gst_buffer_pool_set_active (y4mdec->pool, TRUE);
    }
    gst_query_unref (query);
    gst_caps_unref (caps);
    if (!ret) {
      GST_DEBUG_OBJECT (y4mdec, "Couldn't set caps on src pad");
      return GST_FLOW_ERROR;
    }

    y4mdec->have_header = TRUE;
  }

  if (y4mdec->have_new_segment) {
    GstEvent *event;
    GstClockTime start = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.start);
    GstClockTime stop = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.stop);
    GstClockTime time = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.time);
    GstSegment seg;

    gst_segment_init (&seg, GST_FORMAT_TIME);
    seg.start = start;
    seg.stop = stop;
    seg.time = time;
    event = gst_event_new_segment (&seg);

    gst_pad_push_event (y4mdec->srcpad, event);
    //gst_event_unref (event);

    y4mdec->have_new_segment = FALSE;
    y4mdec->frame_index = gst_y4m_dec_bytes_to_frames (y4mdec,
        y4mdec->segment.time);
    GST_DEBUG ("new frame_index %d", y4mdec->frame_index);

  }

  while (1) {
    n_avail = gst_adapter_available (y4mdec->adapter);
    if (n_avail < MAX_HEADER_LENGTH)
      break;

    gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH);
    header[MAX_HEADER_LENGTH - 1] = 0;
    for (i = 0; i < MAX_HEADER_LENGTH; i++) {
      if (header[i] == 0x0a)
        header[i] = 0;
    }
    if (memcmp (header, "FRAME", 5) != 0) {
      GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE,
          ("Failed to parse YUV4MPEG frame"), (NULL));
      flow_ret = GST_FLOW_ERROR;
      break;
    }

    len = strlen (header);
    if (n_avail < y4mdec->info.size + len + 1) {
      /* not enough data */
      GST_DEBUG ("not enough data for frame %d < %" G_GSIZE_FORMAT,
          n_avail, y4mdec->info.size + len + 1);
      break;
    }

    gst_adapter_flush (y4mdec->adapter, len + 1);

    buffer = gst_adapter_take_buffer (y4mdec->adapter, y4mdec->info.size);

    GST_BUFFER_TIMESTAMP (buffer) =
        gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index);
    GST_BUFFER_DURATION (buffer) =
        gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index + 1) -
        GST_BUFFER_TIMESTAMP (buffer);

    y4mdec->frame_index++;

    if (y4mdec->video_meta) {
      gst_buffer_add_video_meta_full (buffer, 0, y4mdec->info.finfo->format,
          y4mdec->info.width, y4mdec->info.height, y4mdec->info.finfo->n_planes,
          y4mdec->info.offset, y4mdec->info.stride);
    } else if (memcmp (&y4mdec->info, &y4mdec->out_info,
            sizeof (y4mdec->info)) != 0) {
      GstBuffer *outbuf;
      GstVideoFrame iframe, oframe;
      gint i, j;
      gint w, h, istride, ostride;
      guint8 *src, *dest;

      /* Allocate a new buffer and do stride conversion */
      g_assert (y4mdec->pool != NULL);

      flow_ret = gst_buffer_pool_acquire_buffer (y4mdec->pool, &outbuf, NULL);
      if (flow_ret != GST_FLOW_OK) {
        gst_buffer_unref (buffer);
        break;
      }

      gst_video_frame_map (&iframe, &y4mdec->info, buffer, GST_MAP_READ);
      gst_video_frame_map (&oframe, &y4mdec->out_info, outbuf, GST_MAP_WRITE);

      for (i = 0; i < 3; i++) {
        w = GST_VIDEO_FRAME_COMP_WIDTH (&iframe, i);
        h = GST_VIDEO_FRAME_COMP_HEIGHT (&iframe, i);
        istride = GST_VIDEO_FRAME_COMP_STRIDE (&iframe, i);
        ostride = GST_VIDEO_FRAME_COMP_STRIDE (&oframe, i);
        src = GST_VIDEO_FRAME_COMP_DATA (&iframe, i);
        dest = GST_VIDEO_FRAME_COMP_DATA (&oframe, i);

        for (j = 0; j < h; j++) {
          memcpy (dest, src, w);

          dest += ostride;
          src += istride;
        }
      }

      gst_video_frame_unmap (&iframe);
      gst_video_frame_unmap (&oframe);
      gst_buffer_copy_into (outbuf, buffer, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
      gst_buffer_unref (buffer);
      buffer = outbuf;
    }

    flow_ret = gst_pad_push (y4mdec->srcpad, buffer);
    if (flow_ret != GST_FLOW_OK)
      break;
  }

  GST_DEBUG ("returning %d", flow_ret);

  return flow_ret;
}
Example #9
0
static GstFlowReturn
gst_siren_enc_chain (GstPad * pad, GstBuffer * buf)
{
    GstSirenEnc *enc;
    GstFlowReturn ret = GST_FLOW_OK;
    GstBuffer *out_buf;
    guint8 *in_data, *out_data;
    guint8 *to_free = NULL;
    guint i, size, num_frames;
    gint out_size, in_size;
    gint encode_ret;
    gboolean discont;
    GstClockTime timestamp;
    guint64 distance;
    GstCaps *outcaps;

    enc = GST_SIREN_ENC (GST_PAD_PARENT (pad));

    discont = GST_BUFFER_IS_DISCONT (buf);
    if (discont) {
        GST_DEBUG_OBJECT (enc, "received DISCONT, flush adapter");
        gst_adapter_clear (enc->adapter);
        enc->discont = TRUE;
    }

    gst_adapter_push (enc->adapter, buf);

    size = gst_adapter_available (enc->adapter);

    GST_LOG_OBJECT (enc, "Received buffer of size %d with adapter of size : %d",
                    GST_BUFFER_SIZE (buf), size);

    /* we need to process 640 input bytes to produce 40 output bytes */
    /* calculate the amount of frames we will handle */
    num_frames = size / 640;

    /* no frames, wait some more */
    if (num_frames == 0)
        goto done;

    /* this is the input/output size */
    in_size = num_frames * 640;
    out_size = num_frames * 40;

    GST_LOG_OBJECT (enc, "we have %u frames, %u in, %u out", num_frames, in_size,
                    out_size);

    /* set output caps when needed */
    if ((outcaps = GST_PAD_CAPS (enc->srcpad)) == NULL) {
        outcaps = gst_static_pad_template_get_caps (&srctemplate);
        gst_pad_set_caps (enc->srcpad, outcaps);
        gst_caps_unref (outcaps);
    }

    /* get a buffer */
    ret = gst_pad_alloc_buffer_and_set_caps (enc->srcpad, -1,
            out_size, outcaps, &out_buf);
    if (ret != GST_FLOW_OK)
        goto alloc_failed;

    /* get the timestamp for the output buffer */
    timestamp = gst_adapter_prev_timestamp (enc->adapter, &distance);

    /* add the amount of time taken by the distance */
    if (timestamp != -1)
        timestamp += gst_util_uint64_scale_int (distance / 2, GST_SECOND, 16000);

    GST_LOG_OBJECT (enc,
                    "timestamp %" GST_TIME_FORMAT ", distance %" G_GUINT64_FORMAT,
                    GST_TIME_ARGS (timestamp), distance);

    /* get the input data for all the frames */
    to_free = in_data = gst_adapter_take (enc->adapter, in_size);
    out_data = GST_BUFFER_DATA (out_buf);

    for (i = 0; i < num_frames; i++) {
        GST_LOG_OBJECT (enc, "Encoding frame %u/%u", i, num_frames);

        /* encode 640 input bytes to 40 output bytes */
        encode_ret = Siren7_EncodeFrame (enc->encoder, in_data, out_data);
        if (encode_ret != 0)
            goto encode_error;

        /* move to next frame */
        out_data += 40;
        in_data += 640;
    }

    GST_LOG_OBJECT (enc, "Finished encoding");

    /* mark discont */
    if (enc->discont) {
        GST_BUFFER_FLAG_SET (out_buf, GST_BUFFER_FLAG_DISCONT);
        enc->discont = FALSE;
    }
    GST_BUFFER_TIMESTAMP (out_buf) = timestamp;
    GST_BUFFER_DURATION (out_buf) = num_frames * FRAME_DURATION;

    ret = gst_pad_push (enc->srcpad, out_buf);

done:
    if (to_free)
        g_free (to_free);

    return ret;

    /* ERRORS */
alloc_failed:
    {
        GST_DEBUG_OBJECT (enc, "failed to pad_alloc buffer: %d (%s)", ret,
                          gst_flow_get_name (ret));
        goto done;
    }
encode_error:
    {
        GST_ELEMENT_ERROR (enc, STREAM, ENCODE, (NULL),
                           ("Error encoding frame: %d", encode_ret));
        ret = GST_FLOW_ERROR;
        gst_buffer_unref (out_buf);
        goto done;
    }
}
Example #10
0
static GstBuffer *
gst_rtp_vp8_depay_process (GstBaseRTPDepayload * depay, GstBuffer * buf)
{
  GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (depay);
  GstBuffer *payload;
  guint8 *data;
  guint offset;
  guint size = gst_rtp_buffer_get_payload_len (buf);

  if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (buf))) {
    GST_LOG_OBJECT (self, "Discontinuity, flushing adapter");
    gst_adapter_clear (self->adapter);
    self->started = FALSE;
  }

  /* At least one header and one vp8 byte */
  if (G_UNLIKELY (size < 2))
    goto too_small;

  data = gst_rtp_buffer_get_payload (buf);

  if (G_UNLIKELY (!self->started)) {
    /* Check if this is the start of a VP8 frame, otherwise bail */
    if ((data[0] & 0x1) == 0)
      return NULL;

    self->started = TRUE;
  }

  offset = 1;
  if ((data[0] & 0x10) != 0) {
    /* Skip Picture identifier bytes */
    for (; (data[offset] & 0x80) != 0; offset++) {
      /* should be at least one more pictureID byte and at least one byte in
       * the vp8 payload */
      if (G_UNLIKELY (offset + 2 >= size))
        goto too_small;
    }
    offset++;
  }

  if (G_UNLIKELY (offset >= size))
    goto too_small;

  payload = gst_rtp_buffer_get_payload_subbuffer (buf, offset, -1);
  gst_adapter_push (self->adapter, payload);

  /* Marker indicates that it was the last rtp packet for this frame */
  if (gst_rtp_buffer_get_marker (buf)) {
    GstBuffer *out;

    out = gst_adapter_take_buffer (self->adapter,
        gst_adapter_available (self->adapter));

    self->started = FALSE;
    return out;
  }

  return NULL;

too_small:
  GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring");
  gst_adapter_clear (self->adapter);
  self->started = FALSE;
  return NULL;
}
static GstVaapiDecoderStatus
decode_step (GstVaapiDecoder * decoder)
{
  GstVaapiParserState *const ps = &decoder->parser_state;
  GstVaapiDecoderStatus status;
  GstBuffer *buffer;
  gboolean got_frame;
  guint got_unit_size, input_size;

  status = gst_vaapi_decoder_check_status (decoder);
  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
    return status;

  /* Fill adapter with all buffers we have in the queue */
  for (;;) {
    buffer = pop_buffer (decoder);
    if (!buffer)
      break;

    ps->at_eos = GST_BUFFER_IS_EOS (buffer);
    if (!ps->at_eos)
      gst_adapter_push (ps->input_adapter, buffer);
  }

  /* Parse and decode all decode units */
  input_size = gst_adapter_available (ps->input_adapter);
  if (input_size == 0) {
    if (ps->at_eos)
      return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
  }

  do {
    if (!ps->current_frame) {
      ps->current_frame = g_slice_new0 (GstVideoCodecFrame);
      if (!ps->current_frame)
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
      ps->current_frame->ref_count = 1;
      ps->current_frame->system_frame_number = ps->current_frame_number++;
    }

    status = do_parse (decoder, ps->current_frame, ps->input_adapter,
        ps->at_eos, &got_unit_size, &got_frame);
    GST_DEBUG ("parse frame (status = %d)", status);
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
      if (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA && ps->at_eos)
        status = GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
      break;
    }

    if (got_unit_size > 0) {
      buffer = gst_adapter_take_buffer (ps->input_adapter, got_unit_size);
      input_size -= got_unit_size;

      if (gst_adapter_available (ps->output_adapter) == 0) {
        ps->current_frame->pts =
            gst_adapter_prev_timestamp (ps->input_adapter, NULL);
      }
      gst_adapter_push (ps->output_adapter, buffer);
    }

    if (got_frame) {
      ps->current_frame->input_buffer =
          gst_adapter_take_buffer (ps->output_adapter,
          gst_adapter_available (ps->output_adapter));

      status = do_decode (decoder, ps->current_frame);
      GST_DEBUG ("decode frame (status = %d)", status);

      gst_video_codec_frame_unref (ps->current_frame);
      ps->current_frame = NULL;
      break;
    }
  } while (input_size > 0);
  return status;
}
static GstFlowReturn
gst_asf_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstAsfParse *asfparse;
  GstFlowReturn ret = GST_FLOW_OK;

  asfparse = GST_ASF_PARSE (parent);
  gst_adapter_push (asfparse->adapter, buffer);

  switch (asfparse->parse_state) {
    case ASF_PARSING_HEADERS:
      if (asfparse->headers_size == 0 &&
          gst_adapter_available (asfparse->adapter) >= ASF_GUID_OBJSIZE_SIZE) {

        /* we can peek at the object size */
        asfparse->headers_size =
            gst_asf_match_and_peek_obj_size (gst_adapter_map
            (asfparse->adapter, ASF_GUID_OBJSIZE_SIZE),
            &(guids[ASF_HEADER_OBJECT_INDEX]));
        gst_adapter_unmap (asfparse->adapter);

        if (asfparse->headers_size == 0) {
          /* something is wrong, this probably ain't an ASF stream */
          GST_ERROR_OBJECT (asfparse, "ASF starting identifier missing");
          ret = GST_FLOW_ERROR;
          goto end;
        }
      }
      if (gst_adapter_available (asfparse->adapter) >= asfparse->headers_size) {
        GstBuffer *headers = gst_adapter_take_buffer (asfparse->adapter,
            asfparse->headers_size);
        if (gst_asf_parse_headers (headers, asfparse->asfinfo)) {
          ret = gst_asf_parse_push (asfparse, headers);
          asfparse->parse_state = ASF_PARSING_DATA;
        } else {
          ret = GST_FLOW_ERROR;
          GST_ERROR_OBJECT (asfparse, "Failed to parse headers");
        }
      }
      break;
    case ASF_PARSING_DATA:
      if (asfparse->data_size == 0 &&
          gst_adapter_available (asfparse->adapter) >= ASF_GUID_OBJSIZE_SIZE) {

        /* we can peek at the object size */
        asfparse->data_size =
            gst_asf_match_and_peek_obj_size (gst_adapter_map
            (asfparse->adapter, ASF_GUID_OBJSIZE_SIZE),
            &(guids[ASF_DATA_OBJECT_INDEX]));
        gst_adapter_unmap (asfparse->adapter);

        if (asfparse->data_size == 0) {
          /* something is wrong */
          GST_ERROR_OBJECT (asfparse, "Unexpected object after headers, was "
              "expecting a data object");
          ret = GST_FLOW_ERROR;
          goto end;
        }
      }
      /* if we have received the full data object headers */
      if (gst_adapter_available (asfparse->adapter) >= ASF_DATA_OBJECT_SIZE) {
        ret = gst_asf_parse_parse_data_object (asfparse,
            gst_adapter_take_buffer (asfparse->adapter, ASF_DATA_OBJECT_SIZE));
        if (ret != GST_FLOW_OK) {
          goto end;
        }
        asfparse->parse_state = ASF_PARSING_PACKETS;
      }
      break;
    case ASF_PARSING_PACKETS:
      g_assert (asfparse->asfinfo->packet_size);
      while ((asfparse->asfinfo->broadcast ||
              asfparse->parsed_packets < asfparse->asfinfo->packets_count) &&
          gst_adapter_available (asfparse->adapter) >=
          asfparse->asfinfo->packet_size) {
        GstBuffer *packet = gst_adapter_take_buffer (asfparse->adapter,
            asfparse->asfinfo->packet_size);
        asfparse->parsed_packets++;
        ret = gst_asf_parse_parse_packet (asfparse, packet);
        if (ret != GST_FLOW_OK)
          goto end;
      }
      if (!asfparse->asfinfo->broadcast &&
          asfparse->parsed_packets >= asfparse->asfinfo->packets_count) {
        GST_INFO_OBJECT (asfparse, "Finished parsing packets");
        asfparse->parse_state = ASF_PARSING_INDEXES;
      }
      break;
    case ASF_PARSING_INDEXES:
      /* we currently don't care about any of those objects */
      if (gst_adapter_available (asfparse->adapter) >= ASF_GUID_OBJSIZE_SIZE) {
        guint64 obj_size;
        /* we can peek at the object size */
        obj_size = gst_asf_match_and_peek_obj_size (gst_adapter_map
            (asfparse->adapter, ASF_GUID_OBJSIZE_SIZE), NULL);
        gst_adapter_unmap (asfparse->adapter);
        if (gst_adapter_available (asfparse->adapter) >= obj_size) {
          GST_DEBUG_OBJECT (asfparse, "Skiping object");
          ret = gst_asf_parse_push (asfparse,
              gst_adapter_take_buffer (asfparse->adapter, obj_size));
          if (ret != GST_FLOW_OK) {
            goto end;
          }
        }
      }
      break;
    default:
      break;
  }

end:
  return ret;
}
Example #13
0
static GstFlowReturn
gst_msdkdec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame)
{
  GstMsdkDec *thiz = GST_MSDKDEC (decoder);
  GstMsdkDecClass *klass = GST_MSDKDEC_GET_CLASS (thiz);
  GstFlowReturn flow;
  GstBuffer *buffer, *input_buffer = NULL;
  GstVideoInfo alloc_info;
  MsdkDecTask *task = NULL;
  mfxBitstream bitstream;
  MsdkSurface *surface = NULL;
  mfxSession session;
  mfxStatus status;
  GstMapInfo map_info;
  guint i;
  gsize data_size;
  gboolean hard_reset = FALSE;

  /* configure the subclass in order to fill the CodecID field of
   * mfxVideoParam and also to load the PluginID for some of the
   * codecs which is mandatory to invoke the
   * MFXVideoDECODE_DecodeHeader API.
   *
   * For non packetized formats (currently only vc1), there
   * could be headers received as codec_data which are not available
   * instream and in that case subclass implementation will
   * push it to the internal adapter. We invoke the subclass configure
   * well early to make sure the codec_data received has been correctly
   * pushed to the adapter by the subclasses before doing
   * the DecodeHeader() later on
   */
  if (!thiz->initialized || thiz->do_renego) {
    /* Clear the internal adapter in renegotiation for non-packetized
     * formats */
    if (!thiz->is_packetized)
      gst_adapter_clear (thiz->adapter);

    if (!klass->configure || !klass->configure (thiz)) {
      flow = GST_FLOW_OK;
      goto error;
    }
  }

  /* Current frame-codec could be pushed and released before this
   * function ends -- because msdkdec pushes the oldest frame,
   * according its PTS, and it could be this very same frame-codec
   * among others pending frame-codecs.
   *
   * Instead of copying the input data into the mfxBitstream, let's
   * keep an extra reference to frame-codec's input buffer */
  input_buffer = gst_buffer_ref (frame->input_buffer);
  if (!gst_buffer_map (input_buffer, &map_info, GST_MAP_READ)) {
    gst_buffer_unref (input_buffer);
    return GST_FLOW_ERROR;
  }

  memset (&bitstream, 0, sizeof (bitstream));

  if (thiz->is_packetized) {
    /* Packetized stream: We prefer to have a parser as connected upstream
     * element to the decoder */
    bitstream.Data = map_info.data;
    bitstream.DataLength = map_info.size;
    bitstream.MaxLength = map_info.size;
    bitstream.DataFlag = MFX_BITSTREAM_COMPLETE_FRAME;
  } else {
    /* Non packetized streams: eg: vc1 advanced profile with per buffer bdu */
    gst_adapter_push (thiz->adapter, gst_buffer_ref (input_buffer));
    data_size = gst_adapter_available (thiz->adapter);

    bitstream.Data = (mfxU8 *) gst_adapter_map (thiz->adapter, data_size);
    bitstream.DataLength = (mfxU32) data_size;
    bitstream.MaxLength = bitstream.DataLength;
  }
  GST_INFO_OBJECT (thiz,
      "mfxBitStream=> DataLength:%d DataOffset:%d MaxLength:%d",
      bitstream.DataLength, bitstream.DataOffset, bitstream.MaxLength);

  session = gst_msdk_context_get_session (thiz->context);

  if (!thiz->initialized || thiz->do_renego) {

    /* gstreamer caps will not bring all the necessary parameters
     * required for optimal decode configuration. For eg: the required numbers
     * of surfaces to be allocated can be calculated based on H264 SEI header
     * and this information can't be retrieved from the negotiated caps.
     * So instead of introducing the codecparser dependency to parse the headers
     * inside msdk plugin, we simply use the mfx apis to extract header information */
    status = MFXVideoDECODE_DecodeHeader (session, &bitstream, &thiz->param);
    if (status == MFX_ERR_MORE_DATA) {
      flow = GST_FLOW_OK;
      goto done;
    }

    if (!thiz->initialized)
      hard_reset = TRUE;
    else if (thiz->allocation_caps) {
      gst_video_info_from_caps (&alloc_info, thiz->allocation_caps);

      /* Check whether we need complete reset for dynamic resolution change */
      if (thiz->param.mfx.FrameInfo.Width > GST_VIDEO_INFO_WIDTH (&alloc_info)
          || thiz->param.mfx.FrameInfo.Height >
          GST_VIDEO_INFO_HEIGHT (&alloc_info))
        hard_reset = TRUE;
    }

    /* if subclass requested for the force reset */
    if (thiz->force_reset_on_res_change)
      hard_reset = TRUE;

    /* Config changed dynamically and we are going to do a full reset,
     * this will unref the input frame which has the new configuration.
     * Keep a ref to the input_frame to keep it alive */
    if (thiz->initialized && thiz->do_renego)
      gst_video_codec_frame_ref (frame);

    if (!gst_msdkdec_negotiate (thiz, hard_reset)) {
      GST_ELEMENT_ERROR (thiz, CORE, NEGOTIATION,
          ("Could not negotiate the stream"), (NULL));
      flow = GST_FLOW_ERROR;
      goto error;
    }
  }

  for (;;) {
    task = &g_array_index (thiz->tasks, MsdkDecTask, thiz->next_task);
    flow = gst_msdkdec_finish_task (thiz, task);
    if (flow != GST_FLOW_OK)
      goto error;
    if (!surface) {
      flow = allocate_output_buffer (thiz, &buffer);
      if (flow != GST_FLOW_OK)
        goto error;
      surface = get_surface (thiz, buffer);
      if (!surface) {
        /* Can't get a surface for some reason, finish tasks to see if
           a surface becomes available. */
        for (i = 0; i < thiz->tasks->len - 1; i++) {
          thiz->next_task = (thiz->next_task + 1) % thiz->tasks->len;
          task = &g_array_index (thiz->tasks, MsdkDecTask, thiz->next_task);
          flow = gst_msdkdec_finish_task (thiz, task);
          if (flow != GST_FLOW_OK)
            goto error;
          surface = get_surface (thiz, buffer);
          if (surface)
            break;
        }
        if (!surface) {
          GST_ERROR_OBJECT (thiz, "Couldn't get a surface");
          flow = GST_FLOW_ERROR;
          goto error;
        }
      }
    }

    status =
        MFXVideoDECODE_DecodeFrameAsync (session, &bitstream, surface->surface,
        &task->surface, &task->sync_point);

    /* media-sdk requires complete reset since the surface is inadaquate to
     * do further decoding */
    if (status == MFX_ERR_INCOMPATIBLE_VIDEO_PARAM) {
      /* Requires memory re-allocation, do a hard reset */
      if (!gst_msdkdec_negotiate (thiz, TRUE))
        goto error;
      status =
          MFXVideoDECODE_DecodeFrameAsync (session, &bitstream,
          surface->surface, &task->surface, &task->sync_point);
    }

    if (G_LIKELY (status == MFX_ERR_NONE)
        || (status == MFX_WRN_VIDEO_PARAM_CHANGED)) {
      thiz->next_task = (thiz->next_task + 1) % thiz->tasks->len;

      if (surface->surface->Data.Locked > 0 || !thiz->use_video_memory)
        surface = NULL;

      if (bitstream.DataLength == 0) {
        flow = GST_FLOW_OK;
        break;
      }
    } else if (status == MFX_ERR_MORE_DATA) {
      if (task->surface) {
        task->decode_only = TRUE;
        thiz->next_task = (thiz->next_task + 1) % thiz->tasks->len;
      }

      if (surface->surface->Data.Locked > 0)
        surface = NULL;
      flow = GST_VIDEO_DECODER_FLOW_NEED_DATA;
      break;
    } else if (status == MFX_ERR_MORE_SURFACE) {
      surface = NULL;
      continue;
    } else if (status == MFX_WRN_DEVICE_BUSY) {
      /* If device is busy, wait 1ms and retry, as per MSDK's recomendation */
      g_usleep (1000);

      /* If the current surface is still busy, we should do sync oepration
       * then tries to decode again
       */
      thiz->next_task = (thiz->next_task + 1) % thiz->tasks->len;
    } else if (status < MFX_ERR_NONE) {
      GST_ERROR_OBJECT (thiz, "DecodeFrameAsync failed (%s)",
          msdk_status_to_string (status));
      flow = GST_FLOW_ERROR;
      break;
    }
  }

  if (!thiz->is_packetized) {
    /* flush out the data which is already consumed by msdk */
    gst_adapter_flush (thiz->adapter, bitstream.DataOffset);
    flow = GST_FLOW_OK;
  }

done:
  if (surface)
    free_surface (thiz, surface);

  gst_buffer_unmap (input_buffer, &map_info);
  gst_buffer_unref (input_buffer);
  return flow;

error:
  if (input_buffer) {
    gst_buffer_unmap (input_buffer, &map_info);
    gst_buffer_unref (input_buffer);
  }
  gst_video_decoder_drop_frame (decoder, frame);

  return flow;
}
static void
flush_data (GstRtpQDM2Depay * depay)
{
  guint i;
  guint avail;

  if ((avail = gst_adapter_available (depay->adapter)))
    gst_adapter_flush (depay->adapter, avail);

  GST_DEBUG ("Flushing %d packets", depay->nbpackets);

  for (i = 0; depay->packets[i]; i++) {
    QDM2Packet *pack = depay->packets[i];
    guint32 crc = 0;
    int i = 0;
    GstBuffer *buf;
    guint8 *data;

    /* CRC is the sum of everything (including first bytes) */

    data = pack->data;

    if (G_UNLIKELY (data == NULL))
      continue;

    /* If the packet size is bigger than 0xff, we need 2 bytes to store the size */
    if (depay->packetsize > 0xff) {
      /* Expanded size 0x02 | 0x80 */
      data[0] = 0x82;
      GST_WRITE_UINT16_BE (data + 1, depay->packetsize - 3);
    } else {
      data[0] = 0x2;
      data[1] = depay->packetsize - 2;
    }

    /* Calculate CRC */
    for (; i < depay->packetsize; i++)
      crc += data[i];

    GST_DEBUG ("CRC is 0x%x", crc);

    /* Write CRC */
    if (depay->packetsize > 0xff)
      GST_WRITE_UINT16_BE (data + 3, crc);
    else
      GST_WRITE_UINT16_BE (data + 2, crc);

    GST_MEMDUMP ("Extracted packet", data, depay->packetsize);

    buf = gst_buffer_new ();
    gst_buffer_append_memory (buf,
        gst_memory_new_wrapped (0, data, depay->packetsize, 0,
            depay->packetsize, data, g_free));

    gst_adapter_push (depay->adapter, buf);

    if (pack->data) {
      pack->data = NULL;
    }
  }
}
static GstFlowReturn
gst_multi_file_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
{
  GstMultiFileSink *sink = GST_MULTI_FILE_SINK (bsink);
  GstFlowReturn flow = GST_FLOW_OK;
  gboolean key_unit, header;

  header = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER);
  key_unit = !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);

  if (sink->aggregate_gops) {
    GstBuffer *gop_buffer = NULL;
    guint avail;

    avail = gst_adapter_available (sink->gop_adapter);

    GST_LOG_OBJECT (sink, "aggregate GOP: received %s%s unit buffer: "
        "%" GST_PTR_FORMAT,
        (key_unit) ? "key" : "delta", (header) ? " header" : "", buffer);

    /* If it's a header buffer, it might potentially be for the next GOP */
    if (header) {
      GST_LOG_OBJECT (sink, "Accumulating buffer to potential next GOP");
      sink->potential_next_gop =
          g_list_append (sink->potential_next_gop, gst_buffer_ref (buffer));
    } else {
      if (key_unit && avail > 0) {
        GstClockTime pts, dts;
        GST_LOG_OBJECT (sink, "Grabbing pending completed GOP");
        pts = gst_adapter_prev_pts_at_offset (sink->gop_adapter, 0, NULL);
        dts = gst_adapter_prev_dts_at_offset (sink->gop_adapter, 0, NULL);
        gop_buffer = gst_adapter_take_buffer (sink->gop_adapter, avail);
        GST_BUFFER_PTS (gop_buffer) = pts;
        GST_BUFFER_DTS (gop_buffer) = dts;
      }

      /* just accumulate the buffer */
      if (sink->potential_next_gop) {
        GList *tmp;
        GST_LOG_OBJECT (sink,
            "Carrying over pending next GOP data into adapter");
        /* If we have pending data, put that first in the adapter */
        for (tmp = sink->potential_next_gop; tmp; tmp = tmp->next) {
          GstBuffer *tmpb = (GstBuffer *) tmp->data;
          gst_adapter_push (sink->gop_adapter, tmpb);
        }
        g_list_free (sink->potential_next_gop);
        sink->potential_next_gop = NULL;
      }
      GST_LOG_OBJECT (sink, "storing buffer in adapter");
      gst_adapter_push (sink->gop_adapter, gst_buffer_ref (buffer));

      if (gop_buffer != NULL) {
        GST_DEBUG_OBJECT (sink, "writing out pending GOP, %u bytes", avail);
        GST_DEBUG_OBJECT (sink,
            "gop buffer pts:%" GST_TIME_FORMAT " dts:%" GST_TIME_FORMAT
            " duration:%" GST_TIME_FORMAT,
            GST_TIME_ARGS (GST_BUFFER_PTS (gop_buffer)),
            GST_TIME_ARGS (GST_BUFFER_DTS (gop_buffer)),
            GST_TIME_ARGS (GST_BUFFER_DURATION (gop_buffer)));
        flow = gst_multi_file_sink_write_buffer (sink, gop_buffer);
        gst_buffer_unref (gop_buffer);
      }
    }
  } else {
    flow = gst_multi_file_sink_write_buffer (sink, buffer);
  }
  return flow;
}
Example #16
0
static GstFlowReturn
gst_gdp_depay_chain (GstPad * pad, GstBuffer * buffer)
{
  GstGDPDepay *this;
  GstFlowReturn ret = GST_FLOW_OK;
  GstCaps *caps;
  GstBuffer *buf;
  GstEvent *event;
  guint available;

  this = GST_GDP_DEPAY (gst_pad_get_parent (pad));

  /* On DISCONT, get rid of accumulated data. We assume a buffer after the
   * DISCONT contains (part of) a new valid header, if not we error because we
   * lost sync */
  if (GST_BUFFER_IS_DISCONT (buffer)) {
    gst_adapter_clear (this->adapter);
    this->state = GST_GDP_DEPAY_STATE_HEADER;
  }
  gst_adapter_push (this->adapter, buffer);

  while (TRUE) {
    switch (this->state) {
      case GST_GDP_DEPAY_STATE_HEADER:
      {
        guint8 *header;

        /* collect a complete header, validate and store the header. Figure out
         * the payload length and switch to the PAYLOAD state */
        available = gst_adapter_available (this->adapter);
        if (available < GST_DP_HEADER_LENGTH)
          goto done;

        GST_LOG_OBJECT (this, "reading GDP header from adapter");
        header = gst_adapter_take (this->adapter, GST_DP_HEADER_LENGTH);
        if (!gst_dp_validate_header (GST_DP_HEADER_LENGTH, header)) {
          g_free (header);
          goto header_validate_error;
        }

        /* store types and payload length. Also store the header, which we need
         * to make the payload. */
        this->payload_length = gst_dp_header_payload_length (header);
        this->payload_type = gst_dp_header_payload_type (header);
        /* free previous header and store new one. */
        g_free (this->header);
        this->header = header;

        GST_LOG_OBJECT (this,
            "read GDP header, payload size %d, payload type %d, switching to state PAYLOAD",
            this->payload_length, this->payload_type);
        this->state = GST_GDP_DEPAY_STATE_PAYLOAD;
        break;
      }
      case GST_GDP_DEPAY_STATE_PAYLOAD:
      {
        /* in this state we wait for all the payload data to be available in the
         * adapter. Then we switch to the state where we actually process the
         * payload. */
        available = gst_adapter_available (this->adapter);
        if (available < this->payload_length)
          goto done;

        /* change state based on type */
        if (this->payload_type == GST_DP_PAYLOAD_BUFFER) {
          GST_LOG_OBJECT (this, "switching to state BUFFER");
          this->state = GST_GDP_DEPAY_STATE_BUFFER;
        } else if (this->payload_type == GST_DP_PAYLOAD_CAPS) {
          GST_LOG_OBJECT (this, "switching to state CAPS");
          this->state = GST_GDP_DEPAY_STATE_CAPS;
        } else if (this->payload_type >= GST_DP_PAYLOAD_EVENT_NONE) {
          GST_LOG_OBJECT (this, "switching to state EVENT");
          this->state = GST_GDP_DEPAY_STATE_EVENT;
        } else {
          goto wrong_type;
        }

        if (this->payload_length
            && (!gst_dp_validate_payload (GST_DP_HEADER_LENGTH, this->header,
                    gst_adapter_peek (this->adapter, this->payload_length)))) {
          goto payload_validate_error;
        }

        break;
      }
      case GST_GDP_DEPAY_STATE_BUFFER:
      {

        /* if we receive a buffer without caps first, we error out */
        if (!this->caps)
          goto no_caps;

        GST_LOG_OBJECT (this, "reading GDP buffer from adapter");
        buf = gst_dp_buffer_from_header (GST_DP_HEADER_LENGTH, this->header);
        if (!buf)
          goto buffer_failed;

        /* now take the payload if there is any */
        if (this->payload_length > 0) {
          guint8 *payload;

          payload = gst_adapter_take (this->adapter, this->payload_length);
          memcpy (GST_BUFFER_DATA (buf), payload, this->payload_length);
          g_free (payload);
        }

        /* set caps and push */
        gst_buffer_set_caps (buf, this->caps);
        GST_LOG_OBJECT (this, "deserialized buffer %p, pushing, timestamp %"
            GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT
            ", offset %" G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT
            ", size %d, flags 0x%x",
            buf,
            GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
            GST_TIME_ARGS (GST_BUFFER_DURATION (buf)),
            GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf),
            GST_BUFFER_SIZE (buf), GST_BUFFER_FLAGS (buf));
        ret = gst_pad_push (this->srcpad, buf);
        if (ret != GST_FLOW_OK)
          goto push_error;

        GST_LOG_OBJECT (this, "switching to state HEADER");
        this->state = GST_GDP_DEPAY_STATE_HEADER;
        break;
      }
      case GST_GDP_DEPAY_STATE_CAPS:
      {
        guint8 *payload;

        /* take the payload of the caps */
        GST_LOG_OBJECT (this, "reading GDP caps from adapter");
        payload = gst_adapter_take (this->adapter, this->payload_length);
        caps = gst_dp_caps_from_packet (GST_DP_HEADER_LENGTH, this->header,
            payload);
        g_free (payload);
        if (!caps)
          goto caps_failed;

        GST_DEBUG_OBJECT (this, "deserialized caps %" GST_PTR_FORMAT, caps);
        gst_caps_replace (&(this->caps), caps);
        gst_pad_set_caps (this->srcpad, caps);
        /* drop the creation ref we still have */
        gst_caps_unref (caps);

        GST_LOG_OBJECT (this, "switching to state HEADER");
        this->state = GST_GDP_DEPAY_STATE_HEADER;
        break;
      }
      case GST_GDP_DEPAY_STATE_EVENT:
      {
        guint8 *payload;

        GST_LOG_OBJECT (this, "reading GDP event from adapter");

        /* adapter doesn't like 0 length payload */
        if (this->payload_length > 0)
          payload = gst_adapter_take (this->adapter, this->payload_length);
        else
          payload = NULL;
        event = gst_dp_event_from_packet (GST_DP_HEADER_LENGTH, this->header,
            payload);
        g_free (payload);
        if (!event)
          goto event_failed;

        GST_DEBUG_OBJECT (this, "deserialized event %p of type %s, pushing",
            event, gst_event_type_get_name (event->type));
        gst_pad_push_event (this->srcpad, event);

        GST_LOG_OBJECT (this, "switching to state HEADER");
        this->state = GST_GDP_DEPAY_STATE_HEADER;
        break;
      }
    }
  }

done:
  gst_object_unref (this);
  return ret;

  /* ERRORS */
header_validate_error:
  {
    GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL),
        ("GDP packet header does not validate"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
payload_validate_error:
  {
    GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL),
        ("GDP packet payload does not validate"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
wrong_type:
  {
    GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL),
        ("GDP packet header is of wrong type"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
no_caps:
  {
    GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL),
        ("Received a buffer without first receiving caps"));
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
buffer_failed:
  {
    GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL),
        ("could not create buffer from GDP packet"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
push_error:
  {
    GST_WARNING_OBJECT (this, "pushing depayloaded buffer returned %d", ret);
    goto done;
  }
caps_failed:
  {
    GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL),
        ("could not create caps from GDP packet"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
event_failed:
  {
    GST_ELEMENT_ERROR (this, STREAM, DECODE, (NULL),
        ("could not create event from GDP packet"));
    ret = GST_FLOW_ERROR;
    goto done;
  }
}
static GstFlowReturn
gst_rtp_ac3_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstRtpAC3Pay *rtpac3pay;
  GstFlowReturn ret;
  gsize avail, left, NF;
  GstMapInfo map;
  guint8 *p;
  guint packet_len;
  GstClockTime duration, timestamp;

  rtpac3pay = GST_RTP_AC3_PAY (basepayload);

  gst_buffer_map (buffer, &map, GST_MAP_READ);
  duration = GST_BUFFER_DURATION (buffer);
  timestamp = GST_BUFFER_PTS (buffer);

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG_OBJECT (rtpac3pay, "DISCONT");
    gst_rtp_ac3_pay_reset (rtpac3pay);
  }

  /* count the amount of incomming packets */
  NF = 0;
  left = map.size;
  p = map.data;
  while (TRUE) {
    guint bsid, fscod, frmsizecod, frame_size;

    if (left < 6)
      break;

    if (p[0] != 0x0b || p[1] != 0x77)
      break;

    bsid = p[5] >> 3;
    if (bsid > 8)
      break;

    frmsizecod = p[4] & 0x3f;
    fscod = p[4] >> 6;

    GST_DEBUG_OBJECT (rtpac3pay, "fscod %u, %u", fscod, frmsizecod);

    if (fscod >= 3 || frmsizecod >= 38)
      break;

    frame_size = frmsizecod_tbl[frmsizecod].frm_size[fscod] * 2;
    if (frame_size > left)
      break;

    NF++;
    GST_DEBUG_OBJECT (rtpac3pay, "found frame %" G_GSIZE_FORMAT " of size %u",
        NF, frame_size);

    p += frame_size;
    left -= frame_size;
  }
  gst_buffer_unmap (buffer, &map);
  if (NF == 0)
    goto no_frames;

  avail = gst_adapter_available (rtpac3pay->adapter);

  /* get packet length of previous data and this new data,
   * payload length includes a 4 byte header */
  packet_len = gst_rtp_buffer_calc_packet_len (2 + avail + map.size, 0, 0);

  /* if this buffer is going to overflow the packet, flush what we
   * have. */
  if (gst_rtp_base_payload_is_filled (basepayload,
          packet_len, rtpac3pay->duration + duration)) {
    ret = gst_rtp_ac3_pay_flush (rtpac3pay);
    avail = 0;
  } else {
    ret = GST_FLOW_OK;
  }

  if (avail == 0) {
    GST_DEBUG_OBJECT (rtpac3pay,
        "first packet, save timestamp %" GST_TIME_FORMAT,
        GST_TIME_ARGS (timestamp));
    rtpac3pay->first_ts = timestamp;
    rtpac3pay->duration = 0;
    rtpac3pay->NF = 0;
  }

  gst_adapter_push (rtpac3pay->adapter, buffer);
  rtpac3pay->duration += duration;
  rtpac3pay->NF += NF;

  return ret;

  /* ERRORS */
no_frames:
  {
    GST_WARNING_OBJECT (rtpac3pay, "no valid AC3 frames found");
    return GST_FLOW_OK;
  }
}
Example #18
0
static GstFlowReturn
gst_gsmdec_chain (GstPad * pad, GstBuffer * buf)
{
  GstGSMDec *gsmdec;
  gsm_byte *data;
  GstFlowReturn ret = GST_FLOW_OK;
  GstClockTime timestamp;
  gint needed;

  gsmdec = GST_GSMDEC (gst_pad_get_parent (pad));

  timestamp = GST_BUFFER_TIMESTAMP (buf);

  if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (gsmdec->adapter);
    gsmdec->next_ts = GST_CLOCK_TIME_NONE;
    /* FIXME, do some good offset */
    gsmdec->next_of = 0;
  }
  gst_adapter_push (gsmdec->adapter, buf);

  needed = 33;
  /* do we have enough bytes to read a frame */
  while (gst_adapter_available (gsmdec->adapter) >= needed) {
    GstBuffer *outbuf;

    /* always the same amount of output samples */
    outbuf = gst_buffer_new_and_alloc (ENCODED_SAMPLES * sizeof (gsm_signal));

    /* If we are not given any timestamp, interpolate from last seen
     * timestamp (if any). */
    if (timestamp == GST_CLOCK_TIME_NONE)
      timestamp = gsmdec->next_ts;

    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;

    /* interpolate in the next run */
    if (timestamp != GST_CLOCK_TIME_NONE)
      gsmdec->next_ts = timestamp + gsmdec->duration;
    timestamp = GST_CLOCK_TIME_NONE;

    GST_BUFFER_DURATION (outbuf) = gsmdec->duration;
    GST_BUFFER_OFFSET (outbuf) = gsmdec->next_of;
    if (gsmdec->next_of != -1)
      gsmdec->next_of += ENCODED_SAMPLES;
    GST_BUFFER_OFFSET_END (outbuf) = gsmdec->next_of;

    gst_buffer_set_caps (outbuf, GST_PAD_CAPS (gsmdec->srcpad));

    /* now encode frame into the output buffer */
    data = (gsm_byte *) gst_adapter_peek (gsmdec->adapter, needed);
    if (gsm_decode (gsmdec->state, data,
            (gsm_signal *) GST_BUFFER_DATA (outbuf)) < 0) {
      /* invalid frame */
      GST_WARNING_OBJECT (gsmdec, "tried to decode an invalid frame, skipping");
    }
    gst_adapter_flush (gsmdec->adapter, needed);

    /* WAV49 requires alternating 33 and 32 bytes of input */
    if (gsmdec->use_wav49)
      needed = (needed == 33 ? 32 : 33);

    GST_DEBUG_OBJECT (gsmdec, "Pushing buffer of size %d ts %" GST_TIME_FORMAT,
        GST_BUFFER_SIZE (outbuf),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));

    /* push */
    ret = gst_pad_push (gsmdec->srcpad, outbuf);
  }

  gst_object_unref (gsmdec);

  return ret;
}
static GstFlowReturn
gst_flxdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstByteReader reader;
  GstBuffer *input;
  GstMapInfo map_info;
  GstCaps *caps;
  guint available;
  GstFlowReturn res = GST_FLOW_OK;

  GstFlxDec *flxdec;
  FlxHeader *flxh;

  g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
  flxdec = (GstFlxDec *) parent;
  g_return_val_if_fail (flxdec != NULL, GST_FLOW_ERROR);

  gst_adapter_push (flxdec->adapter, buf);
  available = gst_adapter_available (flxdec->adapter);
  input = gst_adapter_get_buffer (flxdec->adapter, available);
  if (!gst_buffer_map (input, &map_info, GST_MAP_READ)) {
    GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
        ("%s", "Failed to map buffer"), (NULL));
    goto error;
  }
  gst_byte_reader_init (&reader, map_info.data, map_info.size);

  if (flxdec->state == GST_FLXDEC_READ_HEADER) {
    if (available >= FlxHeaderSize) {
      GstByteReader header;
      GstCaps *templ;

      if (!gst_byte_reader_get_sub_reader (&reader, &header, FlxHeaderSize)) {
        GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
            ("%s", "Could not read header"), (NULL));
        goto unmap_input_error;
      }
      gst_adapter_flush (flxdec->adapter, FlxHeaderSize);
      available -= FlxHeaderSize;

      if (!_read_flx_header (flxdec, &header, &flxdec->hdr)) {
        GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
            ("%s", "Failed to parse header"), (NULL));
        goto unmap_input_error;
      }

      flxh = &flxdec->hdr;

      /* check header */
      if (flxh->type != FLX_MAGICHDR_FLI &&
          flxh->type != FLX_MAGICHDR_FLC && flxh->type != FLX_MAGICHDR_FLX) {
        GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE, (NULL),
            ("not a flx file (type %x)", flxh->type));
        goto unmap_input_error;
      }

      GST_INFO_OBJECT (flxdec, "size      :  %d", flxh->size);
      GST_INFO_OBJECT (flxdec, "frames    :  %d", flxh->frames);
      GST_INFO_OBJECT (flxdec, "width     :  %d", flxh->width);
      GST_INFO_OBJECT (flxdec, "height    :  %d", flxh->height);
      GST_INFO_OBJECT (flxdec, "depth     :  %d", flxh->depth);
      GST_INFO_OBJECT (flxdec, "speed     :  %d", flxh->speed);

      flxdec->next_time = 0;

      if (flxh->type == FLX_MAGICHDR_FLI) {
        flxdec->frame_time = JIFFIE * flxh->speed;
      } else if (flxh->speed == 0) {
        flxdec->frame_time = GST_SECOND / 70;
      } else {
        flxdec->frame_time = flxh->speed * GST_MSECOND;
      }

      flxdec->duration = flxh->frames * flxdec->frame_time;
      GST_LOG ("duration   :  %" GST_TIME_FORMAT,
          GST_TIME_ARGS (flxdec->duration));

      templ = gst_pad_get_pad_template_caps (flxdec->srcpad);
      caps = gst_caps_copy (templ);
      gst_caps_unref (templ);
      gst_caps_set_simple (caps,
          "width", G_TYPE_INT, flxh->width,
          "height", G_TYPE_INT, flxh->height,
          "framerate", GST_TYPE_FRACTION, (gint) GST_MSECOND,
          (gint) flxdec->frame_time / 1000, NULL);

      gst_pad_set_caps (flxdec->srcpad, caps);
      gst_caps_unref (caps);

      if (flxdec->need_segment) {
        gst_pad_push_event (flxdec->srcpad,
            gst_event_new_segment (&flxdec->segment));
        flxdec->need_segment = FALSE;
      }

      /* zero means 8 */
      if (flxh->depth == 0)
        flxh->depth = 8;

      if (flxh->depth != 8) {
        GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE,
            ("%s", "Don't know how to decode non 8 bit depth streams"), (NULL));
        goto unmap_input_error;
      }

      flxdec->converter =
          flx_colorspace_converter_new (flxh->width, flxh->height);

      if (flxh->type == FLX_MAGICHDR_FLC || flxh->type == FLX_MAGICHDR_FLX) {
        GST_INFO_OBJECT (flxdec, "(FLC) aspect_dx :  %d", flxh->aspect_dx);
        GST_INFO_OBJECT (flxdec, "(FLC) aspect_dy :  %d", flxh->aspect_dy);
        GST_INFO_OBJECT (flxdec, "(FLC) oframe1   :  0x%08x", flxh->oframe1);
        GST_INFO_OBJECT (flxdec, "(FLC) oframe2   :  0x%08x", flxh->oframe2);
      }

      flxdec->size = ((guint) flxh->width * (guint) flxh->height);
      if (flxdec->size >= G_MAXSIZE / 4) {
        GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
            ("%s", "Cannot allocate required memory"), (NULL));
        goto unmap_input_error;
      }

      /* create delta and output frame */
      flxdec->frame_data = g_malloc0 (flxdec->size);
      flxdec->delta_data = g_malloc0 (flxdec->size);

      flxdec->state = GST_FLXDEC_PLAYING;
    }
  } else if (flxdec->state == GST_FLXDEC_PLAYING) {
    GstBuffer *out;

    /* while we have enough data in the adapter */
    while (available >= FlxFrameChunkSize && res == GST_FLOW_OK) {
      guint32 size;
      guint16 type;

      if (!gst_byte_reader_get_uint32_le (&reader, &size))
        goto parse_error;
      if (available < size)
        goto need_more_data;

      available -= size;
      gst_adapter_flush (flxdec->adapter, size);

      if (!gst_byte_reader_get_uint16_le (&reader, &type))
        goto parse_error;

      switch (type) {
        case FLX_FRAME_TYPE:{
          GstByteReader chunks;
          GstByteWriter writer;
          guint16 n_chunks;
          GstMapInfo map;

          GST_LOG_OBJECT (flxdec, "Have frame type 0x%02x of size %d", type,
              size);

          if (!gst_byte_reader_get_sub_reader (&reader, &chunks,
                  size - FlxFrameChunkSize))
            goto parse_error;

          if (!gst_byte_reader_get_uint16_le (&chunks, &n_chunks))
            goto parse_error;
          GST_LOG_OBJECT (flxdec, "Have %d chunks", n_chunks);

          if (n_chunks == 0)
            break;
          if (!gst_byte_reader_skip (&chunks, 8))       /* reserved */
            goto parse_error;

          gst_byte_writer_init_with_data (&writer, flxdec->frame_data,
              flxdec->size, TRUE);

          /* decode chunks */
          if (!flx_decode_chunks (flxdec, n_chunks, &chunks, &writer)) {
            GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
                ("%s", "Could not decode chunk"), NULL);
            goto unmap_input_error;
          }
          gst_byte_writer_reset (&writer);

          /* save copy of the current frame for possible delta. */
          memcpy (flxdec->delta_data, flxdec->frame_data, flxdec->size);

          out = gst_buffer_new_and_alloc (flxdec->size * 4);
          if (!gst_buffer_map (out, &map, GST_MAP_WRITE)) {
            GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
                ("%s", "Could not map output buffer"), NULL);
            gst_buffer_unref (out);
            goto unmap_input_error;
          }

          /* convert current frame. */
          flx_colorspace_convert (flxdec->converter, flxdec->frame_data,
              map.data);
          gst_buffer_unmap (out, &map);

          GST_BUFFER_TIMESTAMP (out) = flxdec->next_time;
          flxdec->next_time += flxdec->frame_time;

          res = gst_pad_push (flxdec->srcpad, out);
          break;
        }
        default:
          GST_DEBUG_OBJECT (flxdec, "Unknown frame type 0x%02x, skipping %d",
              type, size);
          if (!gst_byte_reader_skip (&reader, size - FlxFrameChunkSize))
            goto parse_error;
          break;
      }
    }
  }

need_more_data:
  gst_buffer_unmap (input, &map_info);
  gst_buffer_unref (input);
  return res;

  /* ERRORS */
parse_error:
  GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
      ("%s", "Failed to parse stream"), (NULL));
unmap_input_error:
  gst_buffer_unmap (input, &map_info);
error:
  gst_buffer_unref (input);
  return GST_FLOW_ERROR;
}
Example #20
0
static GstFlowReturn
gst_flac_dec_handle_frame (GstAudioDecoder * audio_dec, GstBuffer * buf)
{
  GstFlacDec *dec;

  dec = GST_FLAC_DEC (audio_dec);

  /* drain remaining data? */
  if (G_UNLIKELY (buf == NULL)) {
    gst_flac_dec_flush (audio_dec, FALSE);
    return GST_FLOW_OK;
  }

  GST_LOG_OBJECT (dec, "frame: ts %" GST_TIME_FORMAT ", flags 0x%04x, "
      "%" G_GSIZE_FORMAT " bytes", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_BUFFER_FLAGS (buf), gst_buffer_get_size (buf));

  /* drop any in-stream headers, we've processed those in set_format already */
  if (G_UNLIKELY (!dec->got_headers)) {
    gboolean got_audio_frame;
    GstMapInfo map;

    /* check if this is a flac audio frame (rather than a header or junk) */
    gst_buffer_map (buf, &map, GST_MAP_READ);
    got_audio_frame = gst_flac_dec_scan_got_frame (dec, map.data, map.size);
    gst_buffer_unmap (buf, &map);

    if (!got_audio_frame) {
      GST_INFO_OBJECT (dec, "dropping in-stream header, %" G_GSIZE_FORMAT " "
          "bytes", map.size);
      gst_audio_decoder_finish_frame (audio_dec, NULL, 1);
      return GST_FLOW_OK;
    }

    GST_INFO_OBJECT (dec, "first audio frame, got all in-stream headers now");
    dec->got_headers = TRUE;
  }

  gst_adapter_push (dec->adapter, gst_buffer_ref (buf));
  buf = NULL;

  dec->last_flow = GST_FLOW_OK;

  /* framed - there should always be enough data to decode something */
  GST_LOG_OBJECT (dec, "%" G_GSIZE_FORMAT " bytes available",
      gst_adapter_available (dec->adapter));

  if (!FLAC__stream_decoder_process_single (dec->decoder)) {
    GST_INFO_OBJECT (dec, "process_single failed");
    if (FLAC__stream_decoder_get_state (dec->decoder) ==
        FLAC__STREAM_DECODER_ABORTED) {
      GST_WARNING_OBJECT (dec, "Read callback caused internal abort");
      /* allow recovery */
      gst_adapter_clear (dec->adapter);
      FLAC__stream_decoder_flush (dec->decoder);
      gst_flac_dec_handle_decoder_error (dec, TRUE);
    }
  }

  return dec->last_flow;
}
Example #21
0
static GstFlowReturn
gst_multipart_demux_chain (GstPad * pad, GstBuffer * buf)
{
  GstMultipartDemux *multipart;
  GstAdapter *adapter;
  GstClockTime timestamp;
  gint size = 1;
  GstFlowReturn res;

  multipart = GST_MULTIPART_DEMUX (gst_pad_get_parent (pad));
  adapter = multipart->adapter;

  res = GST_FLOW_OK;

  timestamp = GST_BUFFER_TIMESTAMP (buf);

  if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (adapter);
  }
  gst_adapter_push (adapter, buf);

  while (gst_adapter_available (adapter) > 0) {
    GstMultipartPad *srcpad;
    GstBuffer *outbuf;
    gboolean created;
    gint datalen;

    if (G_UNLIKELY (!multipart->header_completed)) {
      if ((size = multipart_parse_header (multipart)) < 0) {
        goto nodata;
      } else {
        gst_adapter_flush (adapter, size);
        multipart->header_completed = TRUE;
      }
    }
    if ((size = multipart_find_boundary (multipart, &datalen)) < 0) {
      goto nodata;
    }

    /* Invalidate header info */
    multipart->header_completed = FALSE;
    multipart->content_length = -1;

    if (G_UNLIKELY (datalen <= 0)) {
      GST_DEBUG_OBJECT (multipart, "skipping empty content.");
      gst_adapter_flush (adapter, size - datalen);
    } else {
      srcpad =
          gst_multipart_find_pad_by_mime (multipart,
          multipart->mime_type, &created);
      outbuf = gst_adapter_take_buffer (adapter, datalen);
      gst_adapter_flush (adapter, size - datalen);

      gst_buffer_set_caps (outbuf, GST_PAD_CAPS (srcpad->pad));
      if (created) {
        GstTagList *tags;

        /* Push new segment, first buffer has 0 timestamp */
        gst_pad_push_event (srcpad->pad,
            gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0));

        tags =
            gst_tag_list_new_full (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL);
        gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags));

        GST_BUFFER_TIMESTAMP (outbuf) = 0;
      } else {
        GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
      }
      GST_DEBUG_OBJECT (multipart,
          "pushing buffer with timestamp %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
      GST_DEBUG_OBJECT (multipart, "buffer has caps %" GST_PTR_FORMAT,
          GST_BUFFER_CAPS (outbuf));
      res = gst_pad_push (srcpad->pad, outbuf);
      res = gst_multipart_combine_flows (multipart, srcpad, res);
      if (res != GST_FLOW_OK)
        break;
    }
  }

nodata:
  gst_object_unref (multipart);

  if (G_UNLIKELY (size == MULTIPART_DATA_ERROR))
    return GST_FLOW_ERROR;
  if (G_UNLIKELY (size == MULTIPART_DATA_EOS))
    return GST_FLOW_UNEXPECTED;

  return res;
}
Example #22
0
void
AudioStream::submitBuffer (GstBuffer *buf)
{
  gst_adapter_push (adapter, buf);
}
Example #23
0
static GstFlowReturn
gst_visual_gl_chain (GstPad * pad, GstBuffer * buffer)
{
  GstGLBuffer *outbuf = NULL;
  GstVisualGL *visual = GST_VISUAL_GL (gst_pad_get_parent (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  guint avail;

  GST_DEBUG_OBJECT (visual, "chain function called");

  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (visual->srcpad) == NULL) {
    ret = get_buffer (visual, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }

  /* resync on DISCONT */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (visual->adapter);
  }

  GST_DEBUG_OBJECT (visual,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer));

  gst_adapter_push (visual->adapter, buffer);

  while (TRUE) {
    gboolean need_skip;
    guint64 dist, timestamp;

    GST_DEBUG_OBJECT (visual, "processing buffer");

    avail = gst_adapter_available (visual->adapter);
    GST_DEBUG_OBJECT (visual, "avail now %u", avail);

    /* we need at least VISUAL_SAMPLES samples */
    if (avail < VISUAL_SAMPLES * visual->bps)
      break;

    /* we need at least enough samples to make one frame */
    if (avail < visual->spf * visual->bps)
      break;

    /* get timestamp of the current adapter byte */
    timestamp = gst_adapter_prev_timestamp (visual->adapter, &dist);
    if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
      /* convert bytes to time */
      dist /= visual->bps;
      timestamp += gst_util_uint64_scale_int (dist, GST_SECOND, visual->rate);
    }

    if (timestamp != -1) {
      gint64 qostime;

      /* QoS is done on running time */
      qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME,
          timestamp);
      qostime += visual->duration;

      GST_OBJECT_LOCK (visual);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = visual->earliest_time != -1 &&
          qostime <= visual->earliest_time;
      GST_OBJECT_UNLOCK (visual);

      if (need_skip) {
        GST_WARNING_OBJECT (visual,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time));
        goto skip;
      }
    }

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (visual, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }

    /* render libvisual plugin to our target */
    gst_gl_display_use_fbo_v2 (visual->display,
        visual->width, visual->height, visual->fbo, visual->depthbuffer,
        visual->midtexture, (GLCB_V2) render_frame, (gpointer *) visual);

    /* gst video is top-down whereas opengl plan is bottom up */
    gst_gl_display_use_fbo (visual->display,
        visual->width, visual->height, visual->fbo, visual->depthbuffer,
        outbuf->texture, (GLCB) bottom_up_to_top_down,
        visual->width, visual->height, visual->midtexture,
        0, visual->width, 0, visual->height, GST_GL_DISPLAY_PROJECTION_ORTHO2D,
        (gpointer *) visual);

    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
    GST_BUFFER_DURATION (outbuf) = visual->duration;

    ret = gst_pad_push (visual->srcpad, GST_BUFFER (outbuf));
    outbuf = NULL;

  skip:
    GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input",
        visual->spf);

    /* Flush out the number of samples per frame */
    gst_adapter_flush (visual->adapter, visual->spf * visual->bps);

    /* quit the loop if something was wrong */
    if (ret != GST_FLOW_OK)
      break;
  }

beach:

  if (outbuf != NULL)
    gst_gl_buffer_unref (outbuf);

  gst_object_unref (visual);

  return ret;
}
Example #24
0
static GstFlowReturn
gst_ffmpegenc_chain_audio (GstPad * pad, GstBuffer * inbuf)
{
  GstFFMpegEnc *ffmpegenc;
  GstFFMpegEncClass *oclass;
  AVCodecContext *ctx;
  GstClockTime timestamp, duration;
  guint size, frame_size;
  gint osize;
  GstFlowReturn ret;
  gint out_size;
  gboolean discont;
  guint8 *in_data;

  ffmpegenc = (GstFFMpegEnc *) (GST_OBJECT_PARENT (pad));
  oclass = (GstFFMpegEncClass *) G_OBJECT_GET_CLASS (ffmpegenc);

  ctx = ffmpegenc->context;

  size = GST_BUFFER_SIZE (inbuf);
  timestamp = GST_BUFFER_TIMESTAMP (inbuf);
  duration = GST_BUFFER_DURATION (inbuf);
  discont = GST_BUFFER_IS_DISCONT (inbuf);

  GST_DEBUG_OBJECT (ffmpegenc,
      "Received time %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT
      ", size %d", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration), size);

  frame_size = ctx->frame_size;
  osize = av_get_bits_per_sample_format (ctx->sample_fmt) / 8;

  if (frame_size > 1) {
    /* we have a frame_size, feed the encoder multiples of this frame size */
    guint avail, frame_bytes;

    if (discont) {
      GST_LOG_OBJECT (ffmpegenc, "DISCONT, clear adapter");
      gst_adapter_clear (ffmpegenc->adapter);
      ffmpegenc->discont = TRUE;
    }

    if (gst_adapter_available (ffmpegenc->adapter) == 0) {
      /* lock on to new timestamp */
      GST_LOG_OBJECT (ffmpegenc, "taking buffer timestamp %" GST_TIME_FORMAT,
          GST_TIME_ARGS (timestamp));
      ffmpegenc->adapter_ts = timestamp;
      ffmpegenc->adapter_consumed = 0;
    } else {
      GstClockTime upstream_time;
      guint64 bytes;

      /* use timestamp at head of the adapter */
      GST_LOG_OBJECT (ffmpegenc, "taking adapter timestamp %" GST_TIME_FORMAT,
          GST_TIME_ARGS (ffmpegenc->adapter_ts));
      timestamp = ffmpegenc->adapter_ts;
      timestamp +=
          gst_util_uint64_scale (ffmpegenc->adapter_consumed, GST_SECOND,
          ctx->sample_rate);
      /* check with upstream timestamps, if too much deviation,
       * forego some timestamp perfection in favour of upstream syncing
       * (particularly in case these do not happen to come in multiple
       * of frame size) */
      upstream_time = gst_adapter_prev_timestamp (ffmpegenc->adapter, &bytes);
      if (GST_CLOCK_TIME_IS_VALID (upstream_time)) {
        GstClockTimeDiff diff;

        upstream_time +=
            gst_util_uint64_scale (bytes, GST_SECOND, ctx->sample_rate);
        diff = upstream_time - timestamp;
        /* relaxed difference, rather than half a sample or so ... */
        if (diff > GST_SECOND / 10 || diff < -GST_SECOND / 10) {
          GST_DEBUG_OBJECT (ffmpegenc, "adapter timestamp drifting, "
              "taking upstream timestamp %" GST_TIME_FORMAT,
              GST_TIME_ARGS (upstream_time));
          timestamp = upstream_time;
          ffmpegenc->adapter_ts = upstream_time -
              gst_util_uint64_scale (bytes, GST_SECOND, ctx->sample_rate);
          ffmpegenc->adapter_consumed = bytes;
          ffmpegenc->discont = TRUE;
        }
      }
    }

    GST_LOG_OBJECT (ffmpegenc, "pushing buffer in adapter");
    gst_adapter_push (ffmpegenc->adapter, inbuf);

    /* first see how many bytes we need to feed to the decoder. */
    frame_bytes = frame_size * osize * ctx->channels;
    avail = gst_adapter_available (ffmpegenc->adapter);

    GST_LOG_OBJECT (ffmpegenc, "frame_bytes %u, avail %u", frame_bytes, avail);

    /* while there is more than a frame size in the adapter, consume it */
    while (avail >= frame_bytes) {
      GST_LOG_OBJECT (ffmpegenc, "taking %u bytes from the adapter",
          frame_bytes);

      /* take an audio buffer out of the adapter */
      in_data = (guint8 *) gst_adapter_peek (ffmpegenc->adapter, frame_bytes);
      ffmpegenc->adapter_consumed += frame_size;

      /* calculate timestamp and duration relative to start of adapter and to
       * the amount of samples we consumed */
      duration =
          gst_util_uint64_scale (ffmpegenc->adapter_consumed, GST_SECOND,
          ctx->sample_rate);
      duration -= (timestamp - ffmpegenc->adapter_ts);

      /* 4 times the input size plus the minimal buffer size
       * should be big enough... */
      out_size = frame_bytes * 4 + FF_MIN_BUFFER_SIZE;

      ret = gst_ffmpegenc_encode_audio (ffmpegenc, in_data, out_size,
          timestamp, duration, ffmpegenc->discont);

      gst_adapter_flush (ffmpegenc->adapter, frame_bytes);

      if (ret != GST_FLOW_OK)
        goto push_failed;

      /* advance the adapter timestamp with the duration */
      timestamp += duration;

      ffmpegenc->discont = FALSE;
      avail = gst_adapter_available (ffmpegenc->adapter);
    }
    GST_LOG_OBJECT (ffmpegenc, "%u bytes left in the adapter", avail);
  } else {
    /* we have no frame_size, feed the encoder all the data and expect a fixed
     * output size */
    int coded_bps = av_get_bits_per_sample (oclass->in_plugin->id) / 8;

    GST_LOG_OBJECT (ffmpegenc, "coded bps %d, osize %d", coded_bps, osize);

    out_size = size / osize;
    if (coded_bps)
      out_size *= coded_bps;

    /* We need to provide at least ffmpegs minimal buffer size */
    out_size += FF_MIN_BUFFER_SIZE;

    in_data = (guint8 *) GST_BUFFER_DATA (inbuf);
    ret = gst_ffmpegenc_encode_audio (ffmpegenc, in_data, out_size,
        timestamp, duration, discont);
    gst_buffer_unref (inbuf);

    if (ret != GST_FLOW_OK)
      goto push_failed;
  }

  return GST_FLOW_OK;

  /* ERRORS */
push_failed:
  {
    GST_DEBUG_OBJECT (ffmpegenc, "Failed to push buffer %d (%s)", ret,
        gst_flow_get_name (ret));
    return ret;
  }
}
void
gst_aiur_stream_cache_add_buffer (GstAiurStreamCache * cache,
    GstBuffer * buffer)
{
  guint64 size;
  gint trycnt = 0;
  if ((cache == NULL) || (buffer == NULL))
    goto bail;

  g_mutex_lock (cache->mutex);

  size = GST_BUFFER_SIZE (buffer);

  if ((cache->seeking) || (size == 0)) {
    g_mutex_unlock (cache->mutex);
    goto bail;
  }

  if (cache->ignore_size) {
    /* drop part or total buffer */
    if (cache->ignore_size >= size) {
      cache->ignore_size -= size;
      g_mutex_unlock (cache->mutex);
      goto bail;
    } else {
      GST_BUFFER_DATA (buffer) += (cache->ignore_size);
      GST_BUFFER_SIZE (buffer) -= (cache->ignore_size);
      size = GST_BUFFER_SIZE (buffer);
      cache->ignore_size = 0;
    }
    //g_print("cache offset %lld\n", cache->offset);
  }

  gst_adapter_push (cache->adapter, buffer);
  g_cond_signal (cache->produce_cond);

  buffer = NULL;

  if (cache->threshold_max) {
#if 0
    if (cache->threshold_max < size + cache->threshold_pre) {
      cache->threshold_max = size + cache->threshold_pre;
    }
#endif

    while ((gst_adapter_available (cache->adapter) > cache->threshold_max)
        && (cache->closed == FALSE)) {
      if (((++trycnt) & 0x1f) == 0x0) {
        GST_WARNING ("wait push try %d SIZE %d %lld", trycnt,
            gst_adapter_available (cache->adapter), cache->threshold_max);
      }
      WAIT_COND_TIMEOUT (cache->consume_cond, cache->mutex, 1000000);
    }

    if (cache->seeking) {
      g_mutex_unlock (cache->mutex);
      goto bail;
    }
  }


  g_mutex_unlock (cache->mutex);

  return;

bail:
  if (buffer) {
    gst_buffer_unref (buffer);
  }
}
Example #26
0
static GstBuffer *
gst_rtp_jpeg_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{
  GstRtpJPEGDepay *rtpjpegdepay;
  GstBuffer *outbuf;
  gint payload_len, header_len;
  guint8 *payload;
  guint frag_offset;
  gint Q;
  guint type, width, height;
  guint16 dri, precision, length;
  guint8 *qtable;

  rtpjpegdepay = GST_RTP_JPEG_DEPAY (depayload);

  if (GST_BUFFER_IS_DISCONT (buf)) {
    gst_adapter_clear (rtpjpegdepay->adapter);
  }

  payload_len = gst_rtp_buffer_get_payload_len (buf);

  if (payload_len < 8)
    goto empty_packet;

  payload = gst_rtp_buffer_get_payload (buf);
  header_len = 0;

  /*  0                   1                   2                   3
   *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
   * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
   * | Type-specific |              Fragment Offset                  |
   * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
   * |      Type     |       Q       |     Width     |     Height    |
   * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
   */
  frag_offset = (payload[1] << 16) | (payload[2] << 8) | payload[3];
  type = payload[4];
  Q = payload[5];
  width = payload[6] * 8;
  height = payload[7] * 8;

  if (width == 0 || height == 0)
    goto invalid_dimension;

  GST_DEBUG_OBJECT (rtpjpegdepay, "frag %u, type %u, Q %d, width %u, height %u",
      frag_offset, type, Q, width, height);

  header_len += 8;
  payload += 8;
  payload_len -= 8;

  dri = 0;
  if (type > 63) {
    if (payload_len < 4)
      goto empty_packet;

    /*  0                   1                   2                   3
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |       Restart Interval        |F|L|       Restart Count       |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     */
    dri = (payload[0] << 8) | payload[1];

    GST_DEBUG_OBJECT (rtpjpegdepay, "DRI %" G_GUINT16_FORMAT, dri);

    payload += 4;
    header_len += 4;
    payload_len -= 4;
  }

  if (Q >= 128 && frag_offset == 0) {
    if (payload_len < 4)
      goto empty_packet;

    /*  0                   1                   2                   3
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |      MBZ      |   Precision   |             Length            |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |                    Quantization Table Data                    |
     * |                              ...                              |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     */
    precision = payload[1];
    length = (payload[2] << 8) | payload[3];

    GST_DEBUG_OBJECT (rtpjpegdepay, "precision %04x, length %" G_GUINT16_FORMAT,
        precision, length);

    if (Q == 255 && length == 0)
      goto empty_packet;

    payload += 4;
    header_len += 4;
    payload_len -= 4;

    if (length > payload_len)
      goto empty_packet;

    if (length > 0)
      qtable = payload;
    else
      qtable = rtpjpegdepay->qtables[Q];

    payload += length;
    header_len += length;
    payload_len -= length;
  } else {
    length = 0;
    qtable = NULL;
    precision = 0;
  }

  if (frag_offset == 0) {
    guint size;

    if (rtpjpegdepay->width != width || rtpjpegdepay->height != height) {
      GstCaps *outcaps;

      outcaps =
          gst_caps_new_simple ("image/jpeg", "framerate", GST_TYPE_FRACTION, 0,
          1, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL);
      gst_pad_set_caps (depayload->srcpad, outcaps);
      gst_caps_unref (outcaps);

      rtpjpegdepay->width = width;
      rtpjpegdepay->height = height;
    }

    GST_LOG_OBJECT (rtpjpegdepay, "first packet, length %" G_GUINT16_FORMAT,
        length);

    /* first packet */
    if (length == 0) {
      if (Q < 128) {
        /* no quant table, see if we have one cached */
        qtable = rtpjpegdepay->qtables[Q];
        if (!qtable) {
          GST_DEBUG_OBJECT (rtpjpegdepay, "making Q %d table", Q);
          /* make and cache the table */
          qtable = g_new (guint8, 128);
          MakeTables (rtpjpegdepay, Q, qtable);
          rtpjpegdepay->qtables[Q] = qtable;
        } else {
          GST_DEBUG_OBJECT (rtpjpegdepay, "using cached table for Q %d", Q);
        }
        /* all 8 bit quantizers */
        precision = 0;
      } else {
        if (!qtable)
          goto no_qtable;
      }
    }
    /* max header length, should be big enough */
    outbuf = gst_buffer_new_and_alloc (1000);
    size = MakeHeaders (GST_BUFFER_DATA (outbuf), type,
        width, height, qtable, precision, dri);

    GST_DEBUG_OBJECT (rtpjpegdepay, "pushing %u bytes of header", size);

    GST_BUFFER_SIZE (outbuf) = size;

    gst_adapter_push (rtpjpegdepay->adapter, outbuf);
  }

  /* take JPEG data, push in the adapter */
  GST_DEBUG_OBJECT (rtpjpegdepay, "pushing data at offset %d", header_len);
  outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, header_len, -1);
  gst_adapter_push (rtpjpegdepay->adapter, outbuf);
  outbuf = NULL;

  if (gst_rtp_buffer_get_marker (buf)) {
    guint avail;
    guint8 end[2];
    guint8 *data;

    /* last buffer take all data out of the adapter */
    avail = gst_adapter_available (rtpjpegdepay->adapter);
    GST_DEBUG_OBJECT (rtpjpegdepay, "marker set, last buffer");

    /* take the last bytes of the jpeg data to see if there is an EOI
     * marker */
    gst_adapter_copy (rtpjpegdepay->adapter, end, avail - 2, 2);

    if (end[0] != 0xff && end[1] != 0xd9) {
      GST_DEBUG_OBJECT (rtpjpegdepay, "no EOI marker, adding one");

      /* no EOI marker, add one */
      outbuf = gst_buffer_new_and_alloc (2);
      data = GST_BUFFER_DATA (outbuf);
      data[0] = 0xff;
      data[1] = 0xd9;

      gst_adapter_push (rtpjpegdepay->adapter, outbuf);
      avail += 2;
    }
    outbuf = gst_adapter_take_buffer (rtpjpegdepay->adapter, avail);

    GST_DEBUG_OBJECT (rtpjpegdepay, "returning %u bytes", avail);
  }

  return outbuf;

  /* ERRORS */
empty_packet:
  {
    GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, DECODE,
        ("Empty Payload."), (NULL));
    return NULL;
  }
invalid_dimension:
  {
    GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, FORMAT,
        ("Invalid Dimension %dx%d.", width, height), (NULL));
    return NULL;
  }
no_qtable:
  {
    GST_WARNING_OBJECT (rtpjpegdepay, "no qtable");
    return NULL;
  }
}
static GstBuffer *
gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp)
{
  GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (depay);
  GstBuffer *payload;
  guint8 *data;
  guint hdrsize;
  guint size;

  if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (rtp->buffer))) {
    GST_LOG_OBJECT (self, "Discontinuity, flushing adapter");
    gst_adapter_clear (self->adapter);
    self->started = FALSE;
  }

  size = gst_rtp_buffer_get_payload_len (rtp);

  /* At least one header and one vp8 byte */
  if (G_UNLIKELY (size < 2))
    goto too_small;

  data = gst_rtp_buffer_get_payload (rtp);

  if (G_UNLIKELY (!self->started)) {
    /* Check if this is the start of a VP8 frame, otherwise bail */
    /* S=1 and PartID= 0 */
    if ((data[0] & 0x17) != 0x10)
      goto done;

    self->started = TRUE;
  }

  hdrsize = 1;
  /* Check X optional header */
  if ((data[0] & 0x80) != 0) {
    hdrsize++;
    /* Check I optional header */
    if ((data[1] & 0x80) != 0) {
      if (G_UNLIKELY (size < 3))
        goto too_small;
      hdrsize++;
      /* Check for 16 bits PictureID */
      if ((data[2] & 0x80) != 0)
        hdrsize++;
    }
    /* Check L optional header */
    if ((data[1] & 0x40) != 0)
      hdrsize++;
    /* Check T or K optional headers */
    if ((data[1] & 0x20) != 0 || (data[1] & 0x10) != 0)
      hdrsize++;
  }
  GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u", hdrsize, size);

  if (G_UNLIKELY (hdrsize >= size))
    goto too_small;

  payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1);
  gst_adapter_push (self->adapter, payload);

  /* Marker indicates that it was the last rtp packet for this frame */
  if (gst_rtp_buffer_get_marker (rtp)) {
    GstBuffer *out;
    guint8 header[10];

    if (gst_adapter_available (self->adapter) < 10)
      goto too_small;
    gst_adapter_copy (self->adapter, &header, 0, 10);

    out = gst_adapter_take_buffer (self->adapter,
        gst_adapter_available (self->adapter));

    self->started = FALSE;

    /* mark keyframes */
    out = gst_buffer_make_writable (out);
    /* Filter away all metas that are not sensible to copy */
    gst_rtp_drop_meta (GST_ELEMENT_CAST (self), out,
        g_quark_from_static_string (GST_META_TAG_VIDEO_STR));
    if ((header[0] & 0x01)) {
      GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT);

      if (!self->caps_sent) {
        gst_buffer_unref (out);
        out = NULL;
        GST_INFO_OBJECT (self, "Dropping inter-frame before intra-frame");
        gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay),
            gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
                TRUE, 0));
      }
    } else {
      guint profile, width, height;

      GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT);

      profile = (header[0] & 0x0e) >> 1;
      width = GST_READ_UINT16_LE (header + 6) & 0x3fff;
      height = GST_READ_UINT16_LE (header + 8) & 0x3fff;

      if (G_UNLIKELY (self->last_width != width ||
              self->last_height != height || self->last_profile != profile)) {
        gchar profile_str[3];
        GstCaps *srccaps;

        snprintf (profile_str, 3, "%u", profile);
        srccaps = gst_caps_new_simple ("video/x-vp8",
            "framerate", GST_TYPE_FRACTION, 0, 1,
            "height", G_TYPE_INT, height,
            "width", G_TYPE_INT, width,
            "profile", G_TYPE_STRING, profile_str, NULL);

        gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), srccaps);
        gst_caps_unref (srccaps);

        self->caps_sent = TRUE;
        self->last_width = width;
        self->last_height = height;
        self->last_profile = profile;
      }
    }

    return out;
  }

done:
  return NULL;

too_small:
  GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring");
  gst_adapter_clear (self->adapter);
  self->started = FALSE;

  goto done;
}
Example #28
0
static GstFlowReturn
gst_monoscope_chain (GstPad * pad, GstBuffer * inbuf)
{
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstMonoscope *monoscope;

  monoscope = GST_MONOSCOPE (GST_PAD_PARENT (pad));

  /* don't try to combine samples from discont buffer */
  if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (monoscope->adapter);
    monoscope->next_ts = GST_CLOCK_TIME_NONE;
  }

  /* Match timestamps from the incoming audio */
  if (GST_BUFFER_TIMESTAMP (inbuf) != GST_CLOCK_TIME_NONE)
    monoscope->next_ts = GST_BUFFER_TIMESTAMP (inbuf);

  GST_LOG_OBJECT (monoscope, "in buffer has %d samples, ts=%" GST_TIME_FORMAT,
      GST_BUFFER_SIZE (inbuf) / monoscope->bps,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf)));

  gst_adapter_push (monoscope->adapter, inbuf);
  inbuf = NULL;

  /* Collect samples until we have enough for an output frame */
  while (flow_ret == GST_FLOW_OK) {
    gint16 *samples;
    GstBuffer *outbuf = NULL;
    guint32 *pixels, avail, bytesperframe;

    avail = gst_adapter_available (monoscope->adapter);
    GST_LOG_OBJECT (monoscope, "bytes avail now %u", avail);

    /* do negotiation if not done yet, so ->spf etc. is set */
    if (GST_PAD_CAPS (monoscope->srcpad) == NULL) {
      flow_ret = get_buffer (monoscope, &outbuf);
      if (flow_ret != GST_FLOW_OK)
        goto out;
      gst_buffer_unref (outbuf);
      outbuf = NULL;
    }

    bytesperframe = monoscope->spf * monoscope->bps;
    if (avail < bytesperframe)
      break;

    /* FIXME: something is wrong with QoS, we are skipping way too much
     * stuff even with very low CPU loads */
#if 0
    if (monoscope->next_ts != -1) {
      gboolean need_skip;
      gint64 qostime;

      qostime = gst_segment_to_running_time (&monoscope->segment,
          GST_FORMAT_TIME, monoscope->next_ts);

      GST_OBJECT_LOCK (monoscope);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip =
          GST_CLOCK_TIME_IS_VALID (monoscope->earliest_time) &&
          qostime <= monoscope->earliest_time;
      GST_OBJECT_UNLOCK (monoscope);

      if (need_skip) {
        GST_WARNING_OBJECT (monoscope,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (monoscope->earliest_time));
        goto skip;
      }
    }
#endif

    samples = (gint16 *) gst_adapter_peek (monoscope->adapter, bytesperframe);

    if (monoscope->spf < 512) {
      gint16 in_data[512], i;

      for (i = 0; i < 512; ++i) {
        gdouble off;

        off = ((gdouble) i * (gdouble) monoscope->spf) / 512.0;
        in_data[i] = samples[MIN ((guint) off, monoscope->spf)];
      }
      pixels = monoscope_update (monoscope->visstate, in_data);
    } else {
      /* not really correct, but looks much prettier */
      pixels = monoscope_update (monoscope->visstate, samples);
    }

    flow_ret = get_buffer (monoscope, &outbuf);
    if (flow_ret != GST_FLOW_OK)
      goto out;

    memcpy (GST_BUFFER_DATA (outbuf), pixels, monoscope->outsize);

    GST_BUFFER_TIMESTAMP (outbuf) = monoscope->next_ts;
    GST_BUFFER_DURATION (outbuf) = monoscope->frame_duration;

    flow_ret = gst_pad_push (monoscope->srcpad, outbuf);

#if 0
  skip:
#endif

    if (GST_CLOCK_TIME_IS_VALID (monoscope->next_ts))
      monoscope->next_ts += monoscope->frame_duration;

    gst_adapter_flush (monoscope->adapter, bytesperframe);
  }

out:

  return flow_ret;
}
static GstFlowReturn
gst_base_rtp_audio_payload_handle_buffer (GstBaseRTPPayload *
    basepayload, GstBuffer * buffer)
{
  GstBaseRTPAudioPayload *payload;
  GstBaseRTPAudioPayloadPrivate *priv;
  guint payload_len;
  GstFlowReturn ret;
  guint available;
  guint min_payload_len;
  guint max_payload_len;
  guint align;
  guint size;
  gboolean discont;

  ret = GST_FLOW_OK;

  payload = GST_BASE_RTP_AUDIO_PAYLOAD_CAST (basepayload);
  priv = payload->priv;

  discont = GST_BUFFER_IS_DISCONT (buffer);
  if (discont) {
    GstClockTime timestamp;

    GST_DEBUG_OBJECT (payload, "Got DISCONT");
    /* flush everything out of the adapter, mark DISCONT */
    ret = gst_base_rtp_audio_payload_flush (payload, -1, -1);
    priv->discont = TRUE;

    timestamp = GST_BUFFER_TIMESTAMP (buffer);

    /* get the distance between the timestamp gap and produce the same gap in
     * the RTP timestamps */
    if (priv->last_timestamp != -1 && timestamp != -1) {
      /* we had a last timestamp, compare it to the new timestamp and update the
       * offset counter for RTP timestamps. The effect is that we will produce
       * output buffers containing the same RTP timestamp gap as the gap
       * between the GST timestamps. */
      if (timestamp > priv->last_timestamp) {
        GstClockTime diff;
        guint64 bytes;
        /* we're only going to apply a positive gap, otherwise we let the marker
         * bit do its thing. simply convert to bytes and add the the current
         * offset */
        diff = timestamp - priv->last_timestamp;
        bytes = priv->time_to_bytes (payload, diff);
        priv->offset += bytes;

        GST_DEBUG_OBJECT (payload,
            "elapsed time %" GST_TIME_FORMAT ", bytes %" G_GUINT64_FORMAT
            ", new offset %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff), bytes,
            priv->offset);
      }
    }
  }

  if (!gst_base_rtp_audio_payload_get_lengths (basepayload, &min_payload_len,
          &max_payload_len, &align))
    goto config_error;

  GST_DEBUG_OBJECT (payload,
      "Calculated min_payload_len %u and max_payload_len %u",
      min_payload_len, max_payload_len);

  size = GST_BUFFER_SIZE (buffer);

  /* shortcut, we don't need to use the adapter when the packet can be pushed
   * through directly. */
  available = gst_adapter_available (priv->adapter);

  GST_DEBUG_OBJECT (payload, "got buffer size %u, available %u",
      size, available);

  if (available == 0 && (size >= min_payload_len && size <= max_payload_len) &&
      (size % align == 0)) {
    /* If buffer fits on an RTP packet, let's just push it through
     * this will check against max_ptime and max_mtu */
    GST_DEBUG_OBJECT (payload, "Fast packet push");
    ret = gst_base_rtp_audio_payload_push_buffer (payload, buffer);
  } else {
    /* push the buffer in the adapter */
    gst_adapter_push (priv->adapter, buffer);
    available += size;

    GST_DEBUG_OBJECT (payload, "available now %u", available);

    /* as long as we have full frames */
    while (available >= min_payload_len) {
      /* get multiple of alignment */
      payload_len = MIN (max_payload_len, available);
      payload_len = ALIGN_DOWN (payload_len, align);

      /* and flush out the bytes from the adapter, automatically set the
       * timestamp. */
      ret = gst_base_rtp_audio_payload_flush (payload, payload_len, -1);

      available -= payload_len;
      GST_DEBUG_OBJECT (payload, "available after push %u", available);
    }
  }
  return ret;

  /* ERRORS */
config_error:
  {
    GST_ELEMENT_ERROR (payload, STREAM, NOT_IMPLEMENTED, (NULL),
        ("subclass did not configure us properly"));
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
}
Example #30
0
static GstFlowReturn
gst_goom_chain (GstPad * pad, GstBuffer * buffer)
{
  GstGoom *goom;
  GstFlowReturn ret;
  GstBuffer *outbuf = NULL;

  goom = GST_GOOM (gst_pad_get_parent (pad));
    
  /* If we don't have an output format yet, preallocate a buffer to try and
   * set one */
  if (GST_PAD_CAPS (goom->srcpad) == NULL) {
    ret = get_buffer (goom, &outbuf);
    if (ret != GST_FLOW_OK) {
      gst_buffer_unref (buffer);
      goto beach;
    }
  }
  
  /* don't try to combine samples from discont buffer */
  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
    gst_adapter_clear (goom->adapter);
    goom->next_ts = -1;
  }

  /* Match timestamps from the incoming audio */
  if (GST_BUFFER_TIMESTAMP (buffer) != GST_CLOCK_TIME_NONE)
    goom->next_ts = GST_BUFFER_TIMESTAMP (buffer);

  GST_DEBUG_OBJECT (goom,
      "Input buffer has %d samples, time=%" G_GUINT64_FORMAT,
      GST_BUFFER_SIZE (buffer) / goom->bps, GST_BUFFER_TIMESTAMP (buffer));

  /* Collect samples until we have enough for an output frame */
  gst_adapter_push (goom->adapter, buffer);

  ret = GST_FLOW_OK;

  while (TRUE) {
    const guint16 *data;
    gboolean need_skip;
    guchar *out_frame;
    gint i, c;
    guint avail, to_flush;
	
	Message m;

    avail = gst_adapter_available (goom->adapter);
    GST_DEBUG_OBJECT (goom, "avail now %u", avail);

    /* we need GOOM_SAMPLES to get a meaningful result from goom. */
    if (avail < (GOOM_SAMPLES * goom->bps))
      break;

    /* we also need enough samples to produce one frame at least */
    if (avail < goom->bpf)
      break;

    GST_DEBUG_OBJECT (goom, "processing buffer");

    if (goom->next_ts != -1) {
      gint64 qostime;

      qostime = gst_segment_to_running_time (&goom->segment, GST_FORMAT_TIME,
          goom->next_ts);

      GST_OBJECT_LOCK (goom);
      /* check for QoS, don't compute buffers that are known to be late */
      need_skip = goom->earliest_time != -1 && qostime <= goom->earliest_time;
      GST_OBJECT_UNLOCK (goom);

      if (need_skip) {
        GST_WARNING_OBJECT (goom,
            "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
            GST_TIME_ARGS (qostime), GST_TIME_ARGS (goom->earliest_time));
        goto skip;
      }
    }

    /* get next GOOM_SAMPLES, we have at least this amount of samples */
    data =
        (const guint16 *) gst_adapter_peek (goom->adapter,
        GOOM_SAMPLES * goom->bps);

    if (goom->channels == 2) {
      for (i = 0; i < GOOM_SAMPLES; i++) {
        goom->datain[0][i] = *data++;
        goom->datain[1][i] = *data++;
      }
    } else {
      for (i = 0; i < GOOM_SAMPLES; i++) {
        goom->datain[0][i] = *data;
        goom->datain[1][i] = *data++;
      }
    }

    /* alloc a buffer if we don't have one yet, this happens
     * when we pushed a buffer in this while loop before */
    if (outbuf == NULL) {
      ret = get_buffer (goom, &outbuf);
      if (ret != GST_FLOW_OK) {
        goto beach;
      }
    }

    GST_BUFFER_TIMESTAMP (outbuf) = goom->next_ts;
    GST_BUFFER_DURATION (outbuf) = goom->duration;
    GST_BUFFER_SIZE (outbuf) = goom->outsize;

	//gst_spectrum_transform_ip(gstSpectrum,buffer);

	//print_spectrum_message(gstSpectrum);
	//readMessage(&m);

	//c = m.magnitude[0] + m.magnitude[1] + m.magnitude[2] + m.magnitude[3] + m.magnitude[4];
	//if (c != 0) {
		/*printf("\n\n [");
		for (i = 0; i < 5; i++)
			printf("%f $ ", m.magnitude[i]);
		printf("] \n\n");*/
		
		/*g_print ("Goom: %" GST_TIME_FORMAT ", message received: %" GST_TIME_FORMAT "\n",
			GST_TIME_ARGS (goom->next_ts), GST_TIME_ARGS (m.timestamp));*/
	//}

    out_frame = (guchar *) goom_update (goom->plugin, goom->datain, 0, 0);
    memcpy (GST_BUFFER_DATA (outbuf), out_frame, goom->outsize);

    GST_DEBUG ("Pushing frame with time=%" GST_TIME_FORMAT ", duration=%"
        GST_TIME_FORMAT, GST_TIME_ARGS (goom->next_ts),
        GST_TIME_ARGS (goom->duration));

    ret = gst_pad_push (goom->srcpad, outbuf);
    outbuf = NULL;

  skip:
    /* interpollate next timestamp */
    if (goom->next_ts != -1)
      goom->next_ts += goom->duration;

    /* Now flush the samples we needed for this frame, which might be more than
     * the samples we used (GOOM_SAMPLES). */
    to_flush = goom->bpf;

    GST_DEBUG_OBJECT (goom, "finished frame, flushing %u bytes from input",
        to_flush);
    gst_adapter_flush (goom->adapter, to_flush);

    if (ret != GST_FLOW_OK)
      break;
  }

  if (outbuf != NULL)
    gst_buffer_unref (outbuf);

beach:
  gst_object_unref (goom);

  return ret;
}