예제 #1
0
static inline void
extract_and_queue_tags (GstJpegParse * parse, guint size, guint8 * data,
    GstTagList * (*tag_func) (GstBuffer * buff))
{
  GstTagList *tags;
  GstBuffer *buf;

  buf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, data, size, 0,
      size, NULL, NULL);

  tags = tag_func (buf);
  gst_buffer_unref (buf);

  if (tags) {
    GstTagList *taglist = parse->priv->tags;
    if (taglist) {
      gst_tag_list_insert (taglist, tags, GST_TAG_MERGE_REPLACE);
      gst_tag_list_unref (tags);
    } else {
      parse->priv->tags = tags;
    }
    GST_DEBUG_OBJECT (parse, "collected tags: %" GST_PTR_FORMAT,
        parse->priv->tags);
  }
}
예제 #2
0
void ofxGstRTPServer::sendAudioOut(PooledAudioFrame * pooledFrame){
	GstClock * clock = gst_pipeline_get_clock(GST_PIPELINE(gst.getPipeline()));
	gst_object_ref(clock);
	GstClockTime now = gst_clock_get_time (clock) - gst_element_get_base_time(gst.getPipeline());
	gst_object_unref (clock);
	if(firstAudioFrame && !audioAutoTimestamp){
		prevTimestampAudio = now;
		firstAudioFrame = false;
		return;
	}

	int size = pooledFrame->audioFrame._payloadDataLengthInSamples*2*pooledFrame->audioFrame._audioChannel;

	GstBuffer * echoCancelledBuffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,(void*)pooledFrame->audioFrame._payloadData,size,0,size,pooledFrame,(GDestroyNotify)&ofxWebRTCAudioPool::relaseFrame);

	if(!audioAutoTimestamp){
		GstClockTime duration = (pooledFrame->audioFrame._payloadDataLengthInSamples * GST_SECOND / pooledFrame->audioFrame._frequencyInHz);
		GstClockTime now = prevTimestamp + duration;

		GST_BUFFER_OFFSET(echoCancelledBuffer) = numFrameAudio++;
		GST_BUFFER_OFFSET_END(echoCancelledBuffer) = numFrameAudio;
		GST_BUFFER_DTS (echoCancelledBuffer) = now;
		GST_BUFFER_PTS (echoCancelledBuffer) = now;
		GST_BUFFER_DURATION(echoCancelledBuffer) = duration;
		prevTimestampAudio = now;
	}


	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcAudio, echoCancelledBuffer);
	if (flow_return != GST_FLOW_OK) {
		ofLogError(LOG_NAME) << "error pushing audio buffer: flow_return was " << flow_return;
	}
}
예제 #3
0
gboolean
decoder_put_buffers (GstVaapiDecoder * decoder)
{
  const CodecDefs *codec;
  VideoDecodeInfo info;
  GstBuffer *buffer;
  gboolean success;

  g_return_val_if_fail (decoder != NULL, FALSE);

  codec = get_codec_defs (decoder);
  g_return_val_if_fail (codec != NULL, FALSE);

  codec->get_video_info (&info);
  buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
      (guchar *) info.data, info.data_size, 0, info.data_size, NULL, NULL);
  if (!buffer) {
    GST_ERROR ("failed to create encoded data buffer");
    return FALSE;
  }

  success = gst_vaapi_decoder_put_buffer (decoder, buffer);
  gst_buffer_unref (buffer);
  if (!success) {
    GST_ERROR ("failed to send video data to the decoder");
    return FALSE;
  }

  if (!gst_vaapi_decoder_put_buffer (decoder, NULL)) {
    GST_ERROR ("failed to submit <end-of-stream> to the decoder");
    return FALSE;
  }
  return TRUE;
}
/*
 * Runs the RTP pipeline.
 * @param p Pointer to the RTP pipeline.
 */
static void
rtp_pipeline_run (rtp_pipeline * p)
{
  GstFlowReturn flow_ret;
  GMainLoop *mainloop = NULL;
  GstBus *bus;
  gint i, j;

  /* Check parameters. */
  if (p == NULL) {
    return;
  }

  /* Create mainloop. */
  mainloop = g_main_loop_new (NULL, FALSE);
  if (!mainloop) {
    return;
  }

  /* Add bus callback. */
  bus = gst_pipeline_get_bus (GST_PIPELINE (p->pipeline));

  gst_bus_add_watch (bus, rtp_bus_callback, (gpointer) mainloop);
  gst_object_unref (bus);

  /* Set pipeline to PLAYING. */
  gst_element_set_state (p->pipeline, GST_STATE_PLAYING);

  /* Push data into the pipeline */
  for (i = 0; i < LOOP_COUNT; i++) {
    const guint8 *data = p->frame_data;

    for (j = 0; j < p->frame_count; j++) {
      GstBuffer *buf;

      buf =
          gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
          (guint8 *) data, p->frame_data_size, 0, p->frame_data_size, NULL,
          NULL);

      g_signal_emit_by_name (p->appsrc, "push-buffer", buf, &flow_ret);
      fail_unless_equals_int (flow_ret, GST_FLOW_OK);
      data += p->frame_data_size;

      gst_buffer_unref (buf);
    }
  }

  g_signal_emit_by_name (p->appsrc, "end-of-stream", &flow_ret);

  /* Run mainloop. */
  g_main_loop_run (mainloop);

  /* Set pipeline to NULL. */
  gst_element_set_state (p->pipeline, GST_STATE_NULL);

  /* Release mainloop. */
  g_main_loop_unref (mainloop);
}
예제 #5
0
static GstFlowReturn
gst_shm_src_create (GstPushSrc * psrc, GstBuffer ** outbuf)
{
  GstShmSrc *self = GST_SHM_SRC (psrc);
  gchar *buf = NULL;
  int rv = 0;
  struct GstShmBuffer *gsb;

  do {
    if (gst_poll_wait (self->poll, GST_CLOCK_TIME_NONE) < 0) {
      if (errno == EBUSY)
        return GST_FLOW_FLUSHING;
      GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"),
          ("Poll failed on fd: %s", strerror (errno)));
      return GST_FLOW_ERROR;
    }

    if (self->unlocked)
      return GST_FLOW_FLUSHING;

    if (gst_poll_fd_has_closed (self->poll, &self->pollfd)) {
      GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"),
          ("Control socket has closed"));
      return GST_FLOW_ERROR;
    }

    if (gst_poll_fd_has_error (self->poll, &self->pollfd)) {
      GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"),
          ("Control socket has error"));
      return GST_FLOW_ERROR;
    }

    if (gst_poll_fd_can_read (self->poll, &self->pollfd)) {
      buf = NULL;
      GST_LOG_OBJECT (self, "Reading from pipe");
      GST_OBJECT_LOCK (self);
      rv = sp_client_recv (self->pipe->pipe, &buf);
      GST_OBJECT_UNLOCK (self);
      if (rv < 0) {
        GST_ELEMENT_ERROR (self, RESOURCE, READ, ("Failed to read from shmsrc"),
            ("Error reading control data: %d", rv));
        return GST_FLOW_ERROR;
      }
    }
  } while (buf == NULL);

  GST_LOG_OBJECT (self, "Got buffer %p of size %d", buf, rv);

  gsb = g_slice_new0 (struct GstShmBuffer);
  gsb->buf = buf;
  gsb->pipe = self->pipe;
  gst_shm_pipe_inc (self->pipe);

  *outbuf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
      buf, rv, 0, rv, gsb, free_buffer);

  return GST_FLOW_OK;
}
예제 #6
0
/* spice_gst_decoder_queue_frame() queues the SpiceFrame for decoding and
 * displaying. The steps it goes through are as follows:
 *
 * 1) A SpiceGstFrame is created to keep track of SpiceFrame and some additional
 *    metadata. The SpiceGstFrame is then pushed to the decoding_queue.
 * 2) frame->data, which contains the compressed frame data, is reffed and
 *    wrapped in a GstBuffer which is pushed to the GStreamer pipeline for
 *    decoding.
 * 3) As soon as the GStreamer pipeline no longer needs the compressed frame it
 *    will call frame->unref_data() to free it.
 * 4) Once the decompressed frame is available the GStreamer pipeline calls
 *    new_sample() in the GStreamer thread.
 * 5) new_sample() then matches the decompressed frame to a SpiceGstFrame from
 *    the decoding queue using the GStreamer timestamp information to deal with
 *    dropped frames. The SpiceGstFrame is popped from the decoding_queue.
 * 6) new_sample() then attaches the decompressed frame to the SpiceGstFrame,
 *    pushes it to the display_queue and calls schedule_frame().
 * 7) schedule_frame() then uses gstframe->frame->mm_time to arrange for
 *    display_frame() to be called, in the main thread, at the right time for
 *    the next frame.
 * 8) display_frame() pops the first SpiceGstFrame from the display_queue and
 *    calls stream_display_frame().
 * 9) display_frame() then frees the SpiceGstFrame, which frees the SpiceFrame
 *    and decompressed frame with it.
 */
static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
                                              SpiceFrame *frame, int latency)
{
    SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;

    if (frame->size == 0) {
        SPICE_DEBUG("got an empty frame buffer!");
        frame->free(frame);
        return TRUE;
    }

    if (frame->mm_time < decoder->last_mm_time) {
        SPICE_DEBUG("new-frame-time < last-frame-time (%u < %u):"
                    " resetting stream",
                    frame->mm_time, decoder->last_mm_time);
        /* Let GStreamer deal with the frame anyway */
    }
    decoder->last_mm_time = frame->mm_time;

    if (latency < 0 &&
        decoder->base.codec_type == SPICE_VIDEO_CODEC_TYPE_MJPEG) {
        /* Dropping MJPEG frames has no impact on those that follow and
         * saves CPU so do it.
         */
        SPICE_DEBUG("dropping a late MJPEG frame");
        frame->free(frame);
        return TRUE;
    }

    if (decoder->pipeline == NULL) {
        /* An error occurred, causing the GStreamer pipeline to be freed */
        spice_warning("An error occurred, stopping the video stream");
        return FALSE;
    }

    /* ref() the frame data for the buffer */
    frame->ref_data(frame->data_opaque);
    GstBuffer *buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS,
                                                    frame->data, frame->size, 0, frame->size,
                                                    frame->data_opaque, frame->unref_data);

    GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_PTS(buffer) = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, latency)) * 1000 * 1000;

    g_mutex_lock(&decoder->queues_mutex);
    g_queue_push_tail(decoder->decoding_queue, create_gst_frame(buffer, frame));
    g_mutex_unlock(&decoder->queues_mutex);

    if (gst_app_src_push_buffer(decoder->appsrc, buffer) != GST_FLOW_OK) {
        SPICE_DEBUG("GStreamer error: unable to push frame of size %u", frame->size);
        stream_dropped_frame_on_playback(decoder->base.stream);
    }
    return TRUE;
}
예제 #7
0
void ofxGstRTPServer::newFrameDepth(ofPixels & pixels, GstClockTime timestamp){
	// here we push new depth frames in the pipeline, it's important
	// to timestamp them properly so gstreamer can sync them with the
	// audio.

	if(!bufferPoolDepth || !appSrcDepth) return;

	GstClockTime now = timestamp;
	if(!depthAutoTimestamp){
		if(now==GST_CLOCK_TIME_NONE){
			now = getTimeStamp();
		}

		if(firstDepthFrame){
			prevTimestampDepth = now;
			firstDepthFrame = false;
			return;
		}
	}

	// get a pixels buffer from the pool and copy the passed frame into it
	PooledPixels<unsigned char> * pooledPixels = bufferPoolDepth->newBuffer();
	//pooledPixels->swap(pixels);
	*(ofPixels*)pooledPixels=pixels;

	// wrap the pooled pixels into a gstreamer buffer and pass the release
	// callback so when it's not needed anymore by gst we can return it to the pool
	GstBuffer * buffer;
	buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,pooledPixels->getPixels(), pooledPixels->size(), 0, pooledPixels->size(), pooledPixels, (GDestroyNotify)&ofxGstBufferPool<unsigned char>::relaseBuffer);

	// timestamp the buffer, right now we are using:
	// timestamp = current pipeline time - base time
	// duration = timestamp - previousTimeStamp
	// the duration is actually the duration of the previous frame
	// but should be accurate enough

	if(!depthAutoTimestamp){
		GST_BUFFER_OFFSET(buffer) = numFrameDepth++;
		GST_BUFFER_OFFSET_END(buffer) = numFrameDepth;
		GST_BUFFER_DTS (buffer) = now;
		GST_BUFFER_PTS (buffer) = now;
		GST_BUFFER_DURATION(buffer) = now-prevTimestampDepth;
		prevTimestampDepth = now;
	}

	if(sendDepthKeyFrame){
		emitDepthKeyFrame();
	}

	// finally push the buffer into the pipeline through the appsrc element
	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcDepth, buffer);
	if (flow_return != GST_FLOW_OK) {
		ofLogError() << "error pushing depth buffer: flow_return was " << flow_return;
	}
}
예제 #8
0
파일: gsttffilter.c 프로젝트: AmesianX/wine
static HRESULT WINAPI Gstreamer_transform_ProcessData(TransformFilter *iface, IMediaSample *sample)
{
    GstTfImpl *This = (GstTfImpl*)iface;
    REFERENCE_TIME tStart, tStop;
    BYTE *data;
    GstBuffer *buf;
    HRESULT hr;
    DWORD bufsize;
    int ret;

    TRACE("%p, %p\n", This, sample);

    mark_wine_thread();

    EnterCriticalSection(&This->tf.csReceive);
    IMediaSample_GetPointer(sample, &data);

    IMediaSample_AddRef(sample);
    bufsize = IMediaSample_GetActualDataLength(sample);
    buf = gst_buffer_new_wrapped_full(0, data, bufsize, 0, bufsize, sample, release_sample_wrapper);
    if (!buf) {
        IMediaSample_Release(sample);
        LeaveCriticalSection(&This->tf.csReceive);
        return S_OK;
    }

    IMediaSample_AddRef(sample);
    gst_mini_object_set_qdata(GST_MINI_OBJECT(buf), g_quark_from_static_string(media_quark_string), sample, release_sample_wrapper);

    buf->duration = buf->pts = -1;
    hr = IMediaSample_GetTime(sample, &tStart, &tStop);
    if (SUCCEEDED(hr)) {
        buf->pts = tStart * 100;
        if (hr == S_OK)
            buf->duration = (tStop - tStart)*100;
    }
    if (IMediaSample_GetMediaTime(sample, &tStart, &tStop) == S_OK) {
        buf->offset = tStart * 100;
        buf->offset_end = tStop * 100;
    }
    if (IMediaSample_IsDiscontinuity(sample) == S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DISCONT);
    if (IMediaSample_IsPreroll(sample) == S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_LIVE);
    if (IMediaSample_IsSyncPoint(sample) != S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT);
    LeaveCriticalSection(&This->tf.csReceive);
    ret = gst_pad_push(This->my_src, buf);
    if (ret)
        WARN("Sending returned: %i\n", ret);
    if (ret == GST_FLOW_FLUSHING)
        return VFW_E_WRONG_STATE;
    return S_OK;
}
예제 #9
0
static GstFlowReturn
_src_getrange (GstPad * pad, GstObject * parent, guint64 offset, guint length,
    GstBuffer ** buffer)
{
  if (offset + length > sizeof (mxf_file))
    return GST_FLOW_EOS;

  *buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
      (guint8 *) (mxf_file + offset), length, 0, length, NULL, NULL);

  return GST_FLOW_OK;
}
예제 #10
0
static GstBuffer *
create_buffer (guint8 * data, gsize size)
{
  GstBuffer * buf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
    data, size, 0, size, NULL, NULL);
  GST_BUFFER_PTS (buf) = GST_CLOCK_TIME_NONE;
  GST_BUFFER_DTS (buf) = GST_CLOCK_TIME_NONE;
  GST_BUFFER_DURATION (buf) = GST_CLOCK_TIME_NONE;
  GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;
  GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
  return buf;
}
예제 #11
0
static GList *
_make_buffers_out (GList * buffer_out, guint8 * test_data, gsize test_data_size)
{
  GstBuffer *buffer;

  buffer =
      gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, test_data,
      test_data_size, 0, test_data_size, NULL, NULL);
  buffer_out = g_list_append (buffer_out, buffer);

  return buffer_out;
}
예제 #12
0
static GstStructure *
get_stereo_wave_buffer (gpointer user_data, guint wave_ix, guint wave_level_ix)
{
  static gint16 data[] = { G_MININT16, -1, 1, G_MAXINT16 };
  GstBuffer *buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
      data, sizeof (data), 0, sizeof (data), NULL, NULL);

  return gst_structure_new ("audio/x-raw",
      "channels", G_TYPE_INT, 2,
      "root-note", GSTBT_TYPE_NOTE, (guint) GSTBT_NOTE_C_3,
      "buffer", GST_TYPE_BUFFER, buffer, NULL);
}
예제 #13
0
static GstBuffer *
create_buffer (guint8 * data, gsize size,
    GstClockTime timestamp, GstClockTime duration)
{
  GstBuffer * buf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
    data, size, 0, size, NULL, NULL);
  GST_BUFFER_PTS (buf) = timestamp;
  GST_BUFFER_DTS (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;
  GST_BUFFER_OFFSET (buf) = 0;
  GST_BUFFER_OFFSET_END (buf) = 0;
  return buf;
}
예제 #14
0
static GList *
_make_buffers_in (GList * buffer_in, guint8 * test_data, gsize test_data_size)
{
  GstBuffer *buffer;
  gsize i;

  for (i = 0; i < test_data_size; i++) {
    buffer =
        gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, test_data + i, 1,
        0, 1, NULL, NULL);
    buffer_in = g_list_append (buffer_in, buffer);
  }
  return buffer_in;
}
예제 #15
0
static gboolean
gst_validate_ssim_get_frame_from_png (GstValidateSsim * self, const char *file,
    GstVideoFrame * frame)
{
  guint8 *data;
  GstBuffer *buf;
  GstVideoInfo info;
  cairo_surface_t *surface = NULL;

  surface = cairo_image_surface_create_from_png (file);
  if (surface == NULL
      || (cairo_surface_status (surface) != CAIRO_STATUS_SUCCESS)) {
    GST_VALIDATE_REPORT (self, GENERAL_INPUT_ERROR, "Could not open %s: %s",
        file, cairo_status_to_string (cairo_surface_status (surface)));

    return FALSE;
  }

  gst_video_info_init (&info);
  gst_video_info_set_format (&info,
      _get_format_from_surface (surface),
      cairo_image_surface_get_width (surface),
      cairo_image_surface_get_height (surface));

  data = cairo_image_surface_get_data (surface);
  buf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
      data, info.size, 0, info.size, surface,
      (GDestroyNotify) cairo_surface_destroy);
  if (!gst_video_frame_map (frame, &info, buf, GST_MAP_READ)) {
    gst_buffer_unref (buf);
    GST_VALIDATE_REPORT (self, GENERAL_INPUT_ERROR,
        "Could not map input frame");

    return FALSE;
  }

  gst_buffer_unref (buf);

  return TRUE;
}
예제 #16
0
static GstStructure *
get_wave_buffer (BtWavetable * self, guint wave_ix, guint wave_level_ix)
{
  BtWave *wave;
  BtWavelevel *wavelevel;
  GstStructure *s = NULL;

  if ((wave = bt_wavetable_get_wave_by_index (self, wave_ix))) {
    if ((wavelevel = bt_wave_get_level_by_index (wave, wave_level_ix))) {
      GstBuffer *buffer = NULL;
      gpointer *data;
      gulong length;
      guint channels;
      GstBtNote root_note;
      gsize size;

      g_object_get (wave, "channels", &channels, NULL);
      g_object_get (wavelevel, "data", &data, "length", &length, "root-note",
          &root_note, NULL);

      size = channels * length * sizeof (gint16);
      buffer =
          gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, data, size, 0,
          size, NULL, NULL);

      s = gst_structure_new ("audio/x-raw",     // unused
          "format", G_TYPE_STRING, GST_AUDIO_NE (S16),  // unused
          "layout", G_TYPE_STRING, "interleaved",       // unused
          "rate", G_TYPE_INT, 44100,    // unused
          "channels", G_TYPE_INT, channels,
          "root-note", GSTBT_TYPE_NOTE, (guint) root_note,
          "buffer", GST_TYPE_BUFFER, buffer, NULL);

      g_object_unref (wavelevel);
    }
    g_object_unref (wave);
  }
  return s;
}
예제 #17
0
void ofxGstRTPServer::newOscMsg(ofxOscMessage & msg, GstClockTime timestamp){
	if(!appSrcOsc) return;

	GstClockTime now = timestamp;
	if(!oscAutoTimestamp){
		if(now==GST_CLOCK_TIME_NONE){
			now = getTimeStamp();
		}

		if(firstOscFrame){
			prevTimestampOsc = now;
			firstOscFrame = false;
			return;
		}
	}

	PooledOscPacket * pooledOscPkg = oscPacketPool.newBuffer();
	appendMessage(msg,pooledOscPkg->packet);

	GstBuffer * buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,(void*)pooledOscPkg->compressedData(),pooledOscPkg->compressedSize(),0,pooledOscPkg->compressedSize(),pooledOscPkg,(GDestroyNotify)&ofxOscPacketPool::relaseBuffer);


	if(!oscAutoTimestamp){
		GST_BUFFER_OFFSET(buffer) = numFrameOsc++;
		GST_BUFFER_OFFSET_END(buffer) = numFrameOsc;
		GST_BUFFER_DTS (buffer) = now;
		GST_BUFFER_PTS (buffer) = now;
		GST_BUFFER_DURATION(buffer) = now-prevTimestampOsc;
		prevTimestampOsc = now;
	}

	GstFlowReturn flow_return = gst_app_src_push_buffer((GstAppSrc*)appSrcOsc, buffer);
	if (flow_return != GST_FLOW_OK) {
		ofLogError() << "error pushing osc buffer: flow_return was " << flow_return;
	}
}
예제 #18
0
static GstFlowReturn
gst_ffmpegaudenc_encode_audio (GstFFMpegAudEnc * ffmpegaudenc,
                               GstBuffer * buffer, gint * have_data)
{
    GstAudioEncoder *enc;
    AVCodecContext *ctx;
    gint res;
    GstFlowReturn ret;
    GstAudioInfo *info;
    AVPacket *pkt;
    AVFrame *frame = ffmpegaudenc->frame;
    gboolean planar;
    gint nsamples = -1;

    enc = GST_AUDIO_ENCODER (ffmpegaudenc);

    ctx = ffmpegaudenc->context;

    pkt = g_slice_new0 (AVPacket);

    if (buffer != NULL) {
        BufferInfo *buffer_info = g_slice_new0 (BufferInfo);
        guint8 *audio_in;
        guint in_size;

        buffer_info->buffer = buffer;
        gst_buffer_map (buffer, &buffer_info->map, GST_MAP_READ);
        audio_in = buffer_info->map.data;
        in_size = buffer_info->map.size;

        GST_LOG_OBJECT (ffmpegaudenc, "encoding buffer %p size:%u", audio_in,
                        in_size);

        info = gst_audio_encoder_get_audio_info (enc);
        planar = av_sample_fmt_is_planar (ffmpegaudenc->context->sample_fmt);
        frame->format = ffmpegaudenc->context->sample_fmt;
        frame->sample_rate = ffmpegaudenc->context->sample_rate;
        frame->channels = ffmpegaudenc->context->channels;
        frame->channel_layout = ffmpegaudenc->context->channel_layout;

        if (planar && info->channels > 1) {
            gint channels;
            gint i, j;

            nsamples = frame->nb_samples = in_size / info->bpf;
            channels = info->channels;

            frame->buf[0] =
                av_buffer_create (NULL, 0, buffer_info_free, buffer_info, 0);

            if (info->channels > AV_NUM_DATA_POINTERS) {
                buffer_info->ext_data_array = frame->extended_data =
                                                  av_malloc_array (info->channels, sizeof (uint8_t *));
            } else {
                frame->extended_data = frame->data;
            }

            buffer_info->ext_data = frame->extended_data[0] = av_malloc (in_size);
            frame->linesize[0] = in_size / channels;
            for (i = 1; i < channels; i++)
                frame->extended_data[i] =
                    frame->extended_data[i - 1] + frame->linesize[0];

            switch (info->finfo->width) {
            case 8: {
                const guint8 *idata = (const guint8 *) audio_in;

                for (i = 0; i < nsamples; i++) {
                    for (j = 0; j < channels; j++) {
                        ((guint8 *) frame->extended_data[j])[i] = idata[j];
                    }
                    idata += channels;
                }
                break;
            }
            case 16: {
                const guint16 *idata = (const guint16 *) audio_in;

                for (i = 0; i < nsamples; i++) {
                    for (j = 0; j < channels; j++) {
                        ((guint16 *) frame->extended_data[j])[i] = idata[j];
                    }
                    idata += channels;
                }
                break;
            }
            case 32: {
                const guint32 *idata = (const guint32 *) audio_in;

                for (i = 0; i < nsamples; i++) {
                    for (j = 0; j < channels; j++) {
                        ((guint32 *) frame->extended_data[j])[i] = idata[j];
                    }
                    idata += channels;
                }

                break;
            }
            case 64: {
                const guint64 *idata = (const guint64 *) audio_in;

                for (i = 0; i < nsamples; i++) {
                    for (j = 0; j < channels; j++) {
                        ((guint64 *) frame->extended_data[j])[i] = idata[j];
                    }
                    idata += channels;
                }

                break;
            }
            default:
                g_assert_not_reached ();
                break;
            }

            gst_buffer_unmap (buffer, &buffer_info->map);
            gst_buffer_unref (buffer);
            buffer_info->buffer = NULL;
        } else {
            frame->data[0] = audio_in;
            frame->extended_data = frame->data;
            frame->linesize[0] = in_size;
            frame->nb_samples = nsamples = in_size / info->bpf;
            frame->buf[0] =
                av_buffer_create (NULL, 0, buffer_info_free, buffer_info, 0);
        }

        /* we have a frame to feed the encoder */
        res = avcodec_encode_audio2 (ctx, pkt, frame, have_data);

        av_frame_unref (frame);
    } else {
        GST_LOG_OBJECT (ffmpegaudenc, "draining");
        /* flushing the encoder */
        res = avcodec_encode_audio2 (ctx, pkt, NULL, have_data);
    }

    if (res < 0) {
        char error_str[128] = { 0, };

        g_slice_free (AVPacket, pkt);
        av_strerror (res, error_str, sizeof (error_str));
        GST_ERROR_OBJECT (enc, "Failed to encode buffer: %d - %s", res, error_str);
        return GST_FLOW_OK;
    }
    GST_LOG_OBJECT (ffmpegaudenc, "got output size %d", res);

    if (*have_data) {
        GstBuffer *outbuf;
        const AVCodec *codec;

        GST_LOG_OBJECT (ffmpegaudenc, "pushing size %d", pkt->size);

        outbuf =
            gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, pkt->data,
                                         pkt->size, 0, pkt->size, pkt, gst_ffmpegaudenc_free_avpacket);

        codec = ffmpegaudenc->context->codec;
        if ((codec->capabilities & CODEC_CAP_VARIABLE_FRAME_SIZE) || !buffer) {
            /* FIXME: Not really correct, as -1 means "all the samples we got
               given so far", which may not be true depending on the codec,
               but we have no way to know AFAICT */
            ret = gst_audio_encoder_finish_frame (enc, outbuf, -1);
        } else {
            ret = gst_audio_encoder_finish_frame (enc, outbuf, nsamples);
        }
    } else {
        GST_LOG_OBJECT (ffmpegaudenc, "no output produced");
        g_slice_free (AVPacket, pkt);
        ret = GST_FLOW_OK;
    }

    return ret;
}
static GstFlowReturn
gst_decklink_audio_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
{
  GstDecklinkAudioSrc *self = GST_DECKLINK_AUDIO_SRC_CAST (bsrc);
  GstFlowReturn flow_ret = GST_FLOW_OK;
  const guint8 *data;
  glong sample_count;
  gsize data_size;
  CapturePacket *p;
  AudioPacket *ap;
  GstClockTime timestamp, duration;
  GstClockTime start_time, end_time;
  guint64 start_offset, end_offset;
  gboolean discont = FALSE;

  g_mutex_lock (&self->lock);
  while (g_queue_is_empty (&self->current_packets) && !self->flushing) {
    g_cond_wait (&self->cond, &self->lock);
  }

  p = (CapturePacket *) g_queue_pop_head (&self->current_packets);
  g_mutex_unlock (&self->lock);

  if (self->flushing) {
    if (p)
      capture_packet_free (p);
    GST_DEBUG_OBJECT (self, "Flushing");
    return GST_FLOW_FLUSHING;
  }

  p->packet->GetBytes ((gpointer *) & data);
  sample_count = p->packet->GetSampleFrameCount ();
  data_size = self->info.bpf * sample_count;

  ap = (AudioPacket *) g_malloc0 (sizeof (AudioPacket));

  *buffer =
      gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
      (gpointer) data, data_size, 0, data_size, ap,
      (GDestroyNotify) audio_packet_free);

  ap->packet = p->packet;
  p->packet->AddRef ();
  ap->input = self->input->input;
  ap->input->AddRef ();

  timestamp = p->capture_time;

  // Jitter and discontinuity handling, based on audiobasesrc
  start_time = timestamp;

  // Convert to the sample numbers
  start_offset =
      gst_util_uint64_scale (start_time, self->info.rate, GST_SECOND);

  end_offset = start_offset + sample_count;
  end_time = gst_util_uint64_scale_int (end_offset, GST_SECOND,
      self->info.rate);

  duration = end_time - start_time;

  if (self->next_offset == (guint64) - 1) {
    discont = TRUE;
  } else {
    guint64 diff, max_sample_diff;

    // Check discont
    if (start_offset <= self->next_offset)
      diff = self->next_offset - start_offset;
    else
      diff = start_offset - self->next_offset;

    max_sample_diff =
        gst_util_uint64_scale_int (self->alignment_threshold, self->info.rate,
        GST_SECOND);

    // Discont!
    if (G_UNLIKELY (diff >= max_sample_diff)) {
      if (self->discont_wait > 0) {
        if (self->discont_time == GST_CLOCK_TIME_NONE) {
          self->discont_time = start_time;
        } else if (start_time - self->discont_time >= self->discont_wait) {
          discont = TRUE;
          self->discont_time = GST_CLOCK_TIME_NONE;
        }
      } else {
        discont = TRUE;
      }
    } else if (G_UNLIKELY (self->discont_time != GST_CLOCK_TIME_NONE)) {
      // we have had a discont, but are now back on track!
      self->discont_time = GST_CLOCK_TIME_NONE;
    }
  }

  if (discont) {
    // Have discont, need resync and use the capture timestamps
    if (self->next_offset != (guint64) - 1)
      GST_INFO_OBJECT (self, "Have discont. Expected %"
          G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT,
          self->next_offset, start_offset);
    GST_BUFFER_FLAG_SET (*buffer, GST_BUFFER_FLAG_DISCONT);
    self->next_offset = end_offset;
  } else {
    // No discont, just keep counting
    self->discont_time = GST_CLOCK_TIME_NONE;
    timestamp =
        gst_util_uint64_scale (self->next_offset, GST_SECOND, self->info.rate);
    self->next_offset += sample_count;
    duration =
        gst_util_uint64_scale (self->next_offset, GST_SECOND,
        self->info.rate) - timestamp;
  }

  GST_BUFFER_TIMESTAMP (*buffer) = timestamp;
  GST_BUFFER_DURATION (*buffer) = duration;

  GST_DEBUG_OBJECT (self,
      "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
      GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));

  capture_packet_free (p);

  return flow_ret;
}
예제 #20
0
static GstFlowReturn process_audio_packet(MpegTSDemuxer *demuxer, AVPacket *packet)
{
    GstFlowReturn result = GST_FLOW_OK;
    Stream *stream = &demuxer->audio;

    if (!same_stream(demuxer, stream, packet))
        return result;

    GstBuffer *buffer = NULL;
    GstEvent *newsegment_event = NULL;
    void *buffer_data = av_mallocz(packet->size);

    if (buffer_data != NULL)
    {
        memcpy(buffer_data, packet->data, packet->size);
        buffer = gst_buffer_new_wrapped_full(0, buffer_data, packet->size, 0, packet->size, buffer_data, &av_free);

        if (packet->pts != AV_NOPTS_VALUE)
        {
            if (demuxer->base_pts == GST_CLOCK_TIME_NONE)
            {
                demuxer->base_pts = PTS_TO_GSTTIME(packet->pts) + stream->offset_time;
            }

            gint64 time = PTS_TO_GSTTIME(packet->pts) + stream->offset_time - demuxer->base_pts;
            if (time < 0)
                time = 0;

            if (stream->last_time > 0 && time < (gint64) (stream->last_time - PTS_TO_GSTTIME(G_MAXUINT32)))
            {
                stream->offset_time += PTS_TO_GSTTIME(MAX_PTS + 1); // Wraparound occured
                time = PTS_TO_GSTTIME(packet->pts) + stream->offset_time;
#ifdef VERBOSE_DEBUG_AUDIO
                g_print("[Audio wraparound] updating offset_time to %lld\n", stream->offset_time);
#endif
            }

#ifdef VERBOSE_DEBUG_AUDIO
            g_print("[Audio]: pts=%lld(%.4f) time=%lld (%.4f) offset_time=%lld last_time=%lld\n",
                    PTS_TO_GSTTIME(packet->pts), (double) PTS_TO_GSTTIME(packet->pts) / GST_SECOND,
                    time, (double) time / GST_SECOND, stream->offset_time, stream->last_time);
#endif

            stream->last_time = time;
            GST_BUFFER_TIMESTAMP(buffer) = time;
        }

        if (packet->duration != 0)
            GST_BUFFER_DURATION(buffer) = PTS_TO_GSTTIME(packet->duration);

        g_mutex_lock(&demuxer->lock);
        stream->segment.position = GST_BUFFER_TIMESTAMP(buffer);

        if (stream->discont)
        {
            GstSegment newsegment;
            gst_segment_init(&newsegment, GST_FORMAT_TIME);
            newsegment.flags = stream->segment.flags;
            newsegment.rate = stream->segment.rate;
            newsegment.start = stream->segment.time;
            newsegment.stop = stream->segment.stop;
            newsegment.time = stream->segment.time;
            newsegment.position = stream->segment.position;
            newsegment_event = gst_event_new_segment(&newsegment);

            GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DISCONT);
            stream->discont = FALSE;

#ifdef DEBUG_OUTPUT
            g_print("MpegTS: [Audio] NEWSEGMENT: last_stop = %.4f\n", (double) stream->segment.last_stop / GST_SECOND);
#endif
        }
        g_mutex_unlock(&demuxer->lock);
    } else
        result = GST_FLOW_ERROR;

    if (newsegment_event)
        result = gst_pad_push_event(stream->sourcepad, newsegment_event) ? GST_FLOW_OK : GST_FLOW_FLUSHING;

    if (result == GST_FLOW_OK)
        result = gst_pad_push(stream->sourcepad, buffer);
    else
        gst_buffer_unref(buffer);

#ifdef VERBOSE_DEBUG_AUDIO
    if (result != GST_FLOW_OK)
        g_print("MpegTS: Audio push failed: %s\n", gst_flow_get_name(result));
#endif
    return result;
}
예제 #21
0
/*
 * Runs the RTP pipeline.
 * @param p Pointer to the RTP pipeline.
 */
static void
rtp_pipeline_run (rtp_pipeline * p)
{
  GstFlowReturn flow_ret;
  GMainLoop *mainloop = NULL;
  GstBus *bus;
  gint i, j;

  /* Check parameters. */
  if (p == NULL) {
    return;
  }

  /* Create mainloop. */
  mainloop = g_main_loop_new (NULL, FALSE);
  if (!mainloop) {
    return;
  }

  /* Add bus callback. */
  bus = gst_pipeline_get_bus (GST_PIPELINE (p->pipeline));

  gst_bus_add_watch (bus, rtp_bus_callback, (gpointer) mainloop);
  gst_object_unref (bus);

  /* Set pipeline to PLAYING. */
  gst_element_set_state (p->pipeline, GST_STATE_PLAYING);

  /* Push custom event into the pipeline */
  if (p->custom_event) {
    GstPad *srcpad;

    /* Install a probe to drop the event after it being serialized */
    srcpad = gst_element_get_static_pad (p->rtppay, "src");
    gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
        pay_event_probe_cb, p, NULL);
    gst_object_unref (srcpad);

    /* Install a probe to trace the deserialized event after depayloading */
    srcpad = gst_element_get_static_pad (p->rtpdepay, "src");
    gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
        depay_event_probe_cb, p, NULL);
    gst_object_unref (srcpad);
    /* Send the event */
    gst_element_send_event (p->appsrc, gst_event_ref (p->custom_event));
  }

  /* Push data into the pipeline */
  for (i = 0; i < LOOP_COUNT; i++) {
    const guint8 *data = p->frame_data;

    for (j = 0; j < p->frame_count; j++) {
      GstBuffer *buf;

      buf =
          gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
          (guint8 *) data, p->frame_data_size, 0, p->frame_data_size, NULL,
          NULL);

      g_signal_emit_by_name (p->appsrc, "push-buffer", buf, &flow_ret);
      fail_unless_equals_int (flow_ret, GST_FLOW_OK);
      data += p->frame_data_size;

      gst_buffer_unref (buf);
    }
  }

  g_signal_emit_by_name (p->appsrc, "end-of-stream", &flow_ret);

  /* Run mainloop. */
  g_main_loop_run (mainloop);

  /* Set pipeline to NULL. */
  gst_element_set_state (p->pipeline, GST_STATE_NULL);

  /* Release mainloop. */
  g_main_loop_unref (mainloop);

  fail_if (p->custom_event);
}
예제 #22
0
static gpointer
decoder_thread (gpointer data)
{
  App *const app = data;
  GError *error = NULL;
  GstVaapiDecoderStatus status;
  GstVaapiSurfaceProxy *proxy;
  RenderFrame *rfp;
  GstBuffer *buffer;
  GstClockTime pts;
  gboolean got_surface, got_eos = FALSE;
  gint64 end_time;
  guint ofs;

  g_print ("Decoder thread started\n");

#define SEND_ERROR(...)                                                 \
    do {                                                                \
        error = g_error_new(APP_ERROR, APP_ERROR_DECODER, __VA_ARGS__); \
        goto send_error;                                                \
    } while (0)

  pts = g_get_monotonic_time ();
  ofs = 0;
  while (!app->decoder_thread_cancel) {
    if (G_UNLIKELY (ofs == app->file_size))
      buffer = NULL;
    else {
      const gsize size = MIN (4096, app->file_size - ofs);
      buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
          app->file_data, app->file_size, ofs, size, NULL, NULL);
      if (!buffer)
        SEND_ERROR ("failed to allocate new buffer");
      ofs += size;
    }
    if (!gst_vaapi_decoder_put_buffer (app->decoder, buffer))
      SEND_ERROR ("failed to push buffer to decoder");
    gst_buffer_replace (&buffer, NULL);

  get_surface:
    status = gst_vaapi_decoder_get_surface (app->decoder, &proxy);
    switch (status) {
      case GST_VAAPI_DECODER_STATUS_SUCCESS:
        gst_vaapi_surface_proxy_set_destroy_notify (proxy,
            (GDestroyNotify) decoder_release, app);
        rfp = render_frame_new ();
        if (!rfp)
          SEND_ERROR ("failed to allocate render frame");
        rfp->proxy = proxy;
        rfp->pts = pts;
        rfp->duration = app->frame_duration;
        pts += app->frame_duration;
        g_async_queue_push (app->decoder_queue, rfp);
        break;
      case GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA:
        /* nothing to do, just continue to the next iteration */
        break;
      case GST_VAAPI_DECODER_STATUS_END_OF_STREAM:
        gst_vaapi_decoder_flush (app->decoder);
        if (got_eos)
          goto send_eos;
        got_eos = TRUE;
        break;
      case GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE:
        end_time = g_get_monotonic_time () + G_TIME_SPAN_SECOND;
        g_mutex_lock (&app->mutex);
        got_surface = g_cond_wait_until (&app->decoder_ready, &app->mutex,
            end_time);
        g_mutex_unlock (&app->mutex);
        if (got_surface)
          goto get_surface;
        SEND_ERROR ("failed to acquire a surface within one second");
        break;
      default:
        SEND_ERROR ("%s", get_decoder_status_string (status));
        break;
    }
  }
  return NULL;

#undef SEND_ERROR

send_eos:
  app_send_eos (app);
  return NULL;

send_error:
  app_send_error (app, error);
  return NULL;
}
예제 #23
0
static void
byzanz_encoder_gstreamer_need_data (GstAppSrc *src, guint length, gpointer data)
{
  ByzanzEncoder *encoder = data;
  ByzanzEncoderGStreamer *gst = data;
  GstBuffer *buffer;
  cairo_t *cr;
  cairo_surface_t *surface;
  cairo_region_t *region;
  GError *error = NULL;
  guint64 msecs;
  int i, num_rects;

  if (!byzanz_deserialize (encoder->input_stream, &msecs, &surface, &region, encoder->cancellable, &error)) {
    gst_element_message_full (GST_ELEMENT (src), GST_MESSAGE_ERROR,
        error->domain, error->code, g_strdup (error->message), NULL, __FILE__, GST_FUNCTION, __LINE__);
    g_error_free (error);
    return;
  }

  if (surface == NULL) {
    gst_app_src_end_of_stream (gst->src);
    if (gst->audiosrc)
      gst_element_send_event (gst->audiosrc, gst_event_new_eos ());
    return;
  }

  if (cairo_surface_get_reference_count (gst->surface) > 1) {
    cairo_surface_t *copy = cairo_image_surface_create (CAIRO_FORMAT_RGB24,
        cairo_image_surface_get_width (gst->surface), cairo_image_surface_get_height (gst->surface));
    
    cr = cairo_create (copy);
    cairo_set_source_surface (cr, gst->surface, 0, 0);
    cairo_paint (cr);
    cairo_destroy (cr);

    cairo_surface_destroy (gst->surface);
    gst->surface = copy;
  }
  cr = cairo_create (gst->surface);
  cairo_set_source_surface (cr, surface, 0, 0);

  num_rects = cairo_region_num_rectangles (region);
  for (i = 0; i < num_rects; i++) {
    cairo_rectangle_int_t rect;
    cairo_region_get_rectangle (region, i, &rect);
    cairo_rectangle (cr, rect.x, rect.y,
                     rect.width, rect.height);
  }

  cairo_fill (cr);
  cairo_destroy (cr);

  /* create a buffer and send it */
  /* FIXME: stride just works? */
  cairo_surface_reference (gst->surface);
  buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
                                        cairo_image_surface_get_data (gst->surface),
                                        cairo_image_surface_get_stride (gst->surface) * cairo_image_surface_get_height (gst->surface),
                                        0,
                                        cairo_image_surface_get_stride (gst->surface) * cairo_image_surface_get_height (gst->surface),
                                        gst->surface,
                                        (GDestroyNotify) cairo_surface_destroy);
  GST_BUFFER_TIMESTAMP (buffer) = msecs * GST_MSECOND;
  gst_app_src_push_buffer (gst->src, buffer);
}
예제 #24
0
static GstFlowReturn
gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
{
  GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
  GstFlowReturn flow_ret = GST_FLOW_OK;
  const guint8 *data;
  gsize data_size;
  VideoFrame *vf;
  CaptureFrame *f;
  GstCaps *caps;
  gboolean caps_changed = FALSE;

  g_mutex_lock (&self->lock);
  while (g_queue_is_empty (&self->current_frames) && !self->flushing) {
    g_cond_wait (&self->cond, &self->lock);
  }

  f = (CaptureFrame *) g_queue_pop_head (&self->current_frames);
  g_mutex_unlock (&self->lock);

  if (self->flushing) {
    if (f)
      capture_frame_free (f);
    GST_DEBUG_OBJECT (self, "Flushing");
    return GST_FLOW_FLUSHING;
  }
  // If we're not flushing, we should have a valid frame from the queue
  g_assert (f != NULL);

  g_mutex_lock (&self->lock);
  if (self->caps_mode != f->mode) {
    if (self->mode == GST_DECKLINK_MODE_AUTO) {
      GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode,
          f->mode);
      caps_changed = TRUE;
      self->caps_mode = f->mode;
    } else {
      g_mutex_unlock (&self->lock);
      GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
          ("Invalid mode in captured frame"),
          ("Mode set to %d but captured %d", self->caps_mode, f->mode));
      capture_frame_free (f);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }
  if (self->caps_format != f->format) {
    if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO) {
      GST_DEBUG_OBJECT (self, "Format changed from %d to %d", self->caps_format,
          f->format);
      caps_changed = TRUE;
      self->caps_format = f->format;
    } else {
      g_mutex_unlock (&self->lock);
      GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
          ("Invalid pixel format in captured frame"),
          ("Format set to %d but captured %d", self->caps_format, f->format));
      capture_frame_free (f);
      return GST_FLOW_NOT_NEGOTIATED;
    }
  }

  g_mutex_unlock (&self->lock);
  if (caps_changed) {
    caps = gst_decklink_mode_get_caps (f->mode, f->format);
    gst_video_info_from_caps (&self->info, caps);
    gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps);
    gst_element_post_message (GST_ELEMENT_CAST (self),
        gst_message_new_latency (GST_OBJECT_CAST (self)));
    gst_caps_unref (caps);

  }

  f->frame->GetBytes ((gpointer *) & data);
  data_size = self->info.size;

  vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));

  *buffer =
      gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
      (gpointer) data, data_size, 0, data_size, vf,
      (GDestroyNotify) video_frame_free);

  vf->frame = f->frame;
  f->frame->AddRef ();
  vf->input = self->input->input;
  vf->input->AddRef ();

  GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time;
  GST_BUFFER_DURATION (*buffer) = f->capture_duration;
  gst_buffer_add_video_time_code_meta (*buffer, f->tc);

  GST_DEBUG_OBJECT (self,
      "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
      GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));

  capture_frame_free (f);

  return flow_ret;
}
static void
gst_decklink_src_task (void *priv)
{
  GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv);
  GstBuffer *buffer;
  GstBuffer *audio_buffer;
  IDeckLinkVideoInputFrame *video_frame;
  IDeckLinkAudioInputPacket *audio_frame;
  void *data;
  gsize data_size;
  int n_samples;
  GstFlowReturn ret;
  const GstDecklinkMode *mode;
  gboolean discont = FALSE;

  GST_DEBUG_OBJECT (decklinksrc, "task");

  g_mutex_lock (&decklinksrc->mutex);
  while (decklinksrc->video_frame == NULL && !decklinksrc->stop) {
    g_cond_wait (&decklinksrc->cond, &decklinksrc->mutex);
  }
  video_frame = decklinksrc->video_frame;
  audio_frame = decklinksrc->audio_frame;
  decklinksrc->video_frame = NULL;
  decklinksrc->audio_frame = NULL;
  g_mutex_unlock (&decklinksrc->mutex);

  if (decklinksrc->stop) {
    if (video_frame)
      video_frame->Release ();
    if (audio_frame)
      audio_frame->Release ();
    GST_DEBUG ("stopping task");
    return;
  }

  /* warning on dropped frames */
  /* FIXME: post QoS message */
  if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) {
    GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ,
        ("Dropped %d frame(s), for a total of %d frame(s)",
            decklinksrc->dropped_frames - decklinksrc->dropped_frames_old,
            decklinksrc->dropped_frames), (NULL));
    decklinksrc->dropped_frames_old = decklinksrc->dropped_frames;
    /* FIXME: discont = TRUE; ? */
  }

  if (!decklinksrc->started) {
    gst_decklink_src_send_initial_events (decklinksrc);
    decklinksrc->started = TRUE;
  }

  mode = gst_decklink_get_mode (decklinksrc->mode);

  video_frame->GetBytes (&data);

  data_size = mode->width * mode->height * 2;

  if (decklinksrc->copy_data) {
    buffer = gst_buffer_new_and_alloc (data_size);

    gst_buffer_fill (buffer, 0, data, data_size);

    video_frame->Release ();
  } else {
    VideoFrame *vf;

    vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));

    buffer = gst_buffer_new_wrapped_full ((GstMemoryFlags) 0, data, data_size,
        0, data_size, vf, (GDestroyNotify) video_frame_free);

    vf->frame = video_frame;
    vf->input = decklinksrc->input;
    vf->input->AddRef ();
  }

  GST_BUFFER_TIMESTAMP (buffer) =
      gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND,
      mode->fps_d, mode->fps_n);
  GST_BUFFER_DURATION (buffer) =
      gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND,
      mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer);
  GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num;
  GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num; /* FIXME: +1? */

  /* FIXME: set video meta */

  if (decklinksrc->frame_num == 0)
    discont = TRUE;

  if (discont)
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
  else
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);

  /* FIXME: proper flow aggregation with audio flow */
  ret = gst_pad_push (decklinksrc->videosrcpad, buffer);
  if (!(ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED ||
          ret == GST_FLOW_FLUSHING)) {
    GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED,
        ("Internal data stream error."),
        ("stream stopped, reason %s", gst_flow_get_name (ret)));
      goto pause;
  }

  if (gst_pad_is_linked (decklinksrc->audiosrcpad)) {
    n_samples = audio_frame->GetSampleFrameCount ();
    audio_frame->GetBytes (&data);
    audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2);
    gst_buffer_fill (audio_buffer, 0, data, n_samples * 2 * 2);

    GST_BUFFER_TIMESTAMP (audio_buffer) =
        gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND,
        1, 48000);
    /* FIXME: should be next_timestamp - timestamp for perfect stream */
    GST_BUFFER_DURATION (audio_buffer) =
        gst_util_uint64_scale_int (n_samples * GST_SECOND, 1, 48000);
    GST_BUFFER_OFFSET (audio_buffer) = decklinksrc->num_audio_samples;
    GST_BUFFER_OFFSET_END (audio_buffer) =
        GST_BUFFER_OFFSET (audio_buffer) + n_samples;

    decklinksrc->num_audio_samples += n_samples;

  /* FIXME: proper flow aggregation with video flow */
    ret = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer);
    if (!(ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED ||
            ret == GST_FLOW_FLUSHING)) {
      GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED,
          ("Internal data stream error."),
          ("stream stopped, reason %s", gst_flow_get_name (ret)));
      goto pause;
    }
  }

done:

  if (audio_frame)
    audio_frame->Release ();

  return;

pause:
  {
    const gchar *reason = gst_flow_get_name (ret);
    GstEvent *event = NULL;

    GST_DEBUG_OBJECT (decklinksrc, "pausing task, reason %s", reason);
    gst_task_pause (decklinksrc->task);
    if (ret == GST_FLOW_EOS) {
      /* perform EOS logic (very crude, we don't even keep a GstSegment) */
      event = gst_event_new_eos ();
    } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
      event = gst_event_new_eos ();
      /* for fatal errors we post an error message, post the error
       * first so the app knows about the error first.
       * Also don't do this for FLUSHING because it happens
       * due to flushing and posting an error message because of
       * that is the wrong thing to do, e.g. when we're doing
       * a flushing seek. */
      GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED,
          ("Internal data flow error."),
          ("streaming task paused, reason %s (%d)", reason, ret));
    }
    if (event != NULL) {
      GST_INFO_OBJECT (decklinksrc->videosrcpad, "pushing EOS event");
      gst_pad_push_event (decklinksrc->videosrcpad, gst_event_ref (event));
      GST_INFO_OBJECT (decklinksrc->audiosrcpad, "pushing EOS event");
      gst_pad_push_event (decklinksrc->audiosrcpad, event);
    }
    goto done;
  }
}
예제 #26
0
/* Ideas from gstjpegparse.c */
GstTagList *
gst_droidcamsrc_exif_tags_from_jpeg_data (void *data, size_t size)
{
  GstByteReader reader;
  guint16 len = 0;
  const gchar *id;
  const guint8 *exif = NULL;
  GstBuffer *buff;
  GstTagList *tags = NULL;

  void *app1 = memmem (data, size, marker, 2);
  if (!app1) {
    GST_ERROR ("No tags found");
    goto out;
  }

  size -= (app1 - data);

  gst_byte_reader_init (&reader, app1, size);

  if (!gst_byte_reader_skip (&reader, 2)) {
    GST_ERROR ("Not enough jpeg data for tags");
    goto out;
  }

  if (!gst_byte_reader_get_uint16_be (&reader, &len)) {
    GST_ERROR ("Failed to get APP1 size");
    goto out;
  }

  len -= 2;                     /* for the marker itself */

  if (!gst_byte_reader_peek_string_utf8 (&reader, &id)) {
    goto out;
  }

  if (!strncmp (id, "Exif", 4)) {
    /* id + NUL + padding */
    if (!gst_byte_reader_skip (&reader, 6)) {
      goto out;
    }

    len -= 6;

    if (!gst_byte_reader_get_data (&reader, len, &exif)) {
      goto out;
    }

    buff =
        gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, (gpointer) exif,
        len, 0, len, NULL, NULL);

    tags = gst_tag_list_from_exif_buffer_with_tiff_header (buff);
    gst_buffer_unref (buff);

    return tags;
  }

out:
  return NULL;
}
예제 #27
0
GstTagList *
gst_droidcamsrc_exif_tags_from_jpeg_data (void *data, size_t size)
{
  GstTagList *tags = NULL;
  ExifMem *mem = exif_mem_new (g_malloc0, g_realloc, g_free);
  ExifData *exif = exif_data_new_mem (mem);
  unsigned char *exif_data = NULL;
  void *_exif_data = NULL;
  unsigned int exif_data_size = 0;
  GstBuffer *buffer;
  ExifEntry *iso;
  int x, i;

  exif_data_load_data (exif, data, size);
  exif_data_set_data_type (exif, EXIF_DATA_TYPE_COMPRESSED);

  exif_data_save_data (exif, &exif_data, &exif_data_size);
  if (!exif_data_size) {
    goto out;
  }

  if (exif_data_size <= 6) {
    goto out;
  }

  /* dump the data. based on libexif code */
  for (x = 0; x < EXIF_IFD_COUNT; x++) {
    if (exif->ifd[x] && exif->ifd[x]->count) {
      for (i = 0; i < exif->ifd[x]->count; i++) {
        char val[1024];
        ExifEntry *e = exif->ifd[x]->entries[i];
        GST_LOG ("Exif IFD: %s. Tag 0x%x (%s) = %s", exif_ifd_get_name (x),
            e->tag, exif_tag_get_name_in_ifd (e->tag, exif_entry_get_ifd (e)),
            exif_entry_get_value (e, val, sizeof (val)));
      }
    }
  }

  _exif_data = exif_data;

  exif_data += 6;
  exif_data_size -= 6;

  buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
      exif_data, exif_data_size, 0, exif_data_size, NULL, NULL);
  tags = gst_tag_list_from_exif_buffer_with_tiff_header (buffer);
  gst_buffer_unref (buffer);

  /* We don't want these tags */
  gst_tag_list_remove_tag (tags, GST_TAG_DEVICE_MANUFACTURER);
  gst_tag_list_remove_tag (tags, GST_TAG_DEVICE_MODEL);
  gst_tag_list_remove_tag (tags, GST_TAG_APPLICATION_NAME);
  gst_tag_list_remove_tag (tags, GST_TAG_DATE_TIME);

  /* we have a mess with ISO so we will just behave as N9 */
  iso = exif_content_get_entry (exif->ifd[EXIF_IFD_EXIF],
      EXIF_TAG_ISO_SPEED_RATINGS);

  if (iso) {
#ifdef __arm__
    guint16 val = exif_get_short (iso->data, EXIF_BYTE_ORDER_MOTOROLA);
#else
    guint16 val = exif_get_short (iso->data, EXIF_BYTE_ORDER_INTEL);
#endif
    gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
        GST_TAG_CAPTURING_ISO_SPEED, val, NULL);
  }

  /* TODO: the following are being dropped
   *
   * 0x213  EXIF_TAG_YCBCR_POSITIONING
   * 0x9004 EXIF_TAG_DATE_TIME_DIGITIZED
   * 0x9101 EXIF_TAG_COMPONENTS_CONFIGURATION
   * 0xa001 EXIF_TAG_COLOR_SPACE
   * 0xa002 EXIF_TAG_PIXEL_X_DIMENSION
   * 0xa003 EXIF_TAG_PIXEL_Y_DIMENSION
   * 0xa005 EXIF_TAG_INTEROPERABILITY_IFD_POINTER
   * thumbnail.
   * 0x100 EXIF_TAG_IMAGE_WIDTH
   * 0x101 EXIF_TAG_IMAGE_LENGTH
   * 0x9203 EXIF_TAG_BRIGHTNESS_VALUE
   * 0x9205 EXIF_TAG_MAX_APERTURE_VALUE
   * 0x9206 EXIF_TAG_SUBJECT_DISTANCE
   * 0x9208 EXIF_TAG_LIGHT_SOURCE
   * 0x9286 EXIF_TAG_USER_COMMENT
   */
out:
  if (_exif_data) {
    exif_mem_free (mem, _exif_data);
  }

  if (exif) {
    exif_data_free (exif);
  }

  exif_mem_unref (mem);

  return tags;
}
예제 #28
0
static GstFlowReturn
gst_aravis_create (GstPushSrc * push_src, GstBuffer ** buffer)
{
	GstAravis *gst_aravis;
	ArvBuffer *arv_buffer;
	int arv_row_stride;

	gst_aravis = GST_ARAVIS (push_src);

	do {
		arv_buffer = arv_stream_timeout_pop_buffer (gst_aravis->stream, gst_aravis->buffer_timeout_us);
		if (arv_buffer != NULL && arv_buffer->status != ARV_BUFFER_STATUS_SUCCESS)
			arv_stream_push_buffer (gst_aravis->stream, arv_buffer);
	} while (arv_buffer != NULL && arv_buffer->status != ARV_BUFFER_STATUS_SUCCESS);

	if (arv_buffer == NULL)
		return GST_FLOW_ERROR;

	arv_row_stride = arv_buffer->width * ARV_PIXEL_FORMAT_BIT_PER_PIXEL (arv_buffer->pixel_format) / 8;

	/* Gstreamer requires row stride to be a multiple of 4 */
	if ((arv_row_stride & 0x3) != 0) {
		int gst_row_stride;
		size_t size;
		void *data;
		int i;

		gst_row_stride = (arv_row_stride & ~(0x3)) + 4;

		size = arv_buffer->height * gst_row_stride;
		data = g_malloc (size);

		for (i = 0; i < arv_buffer->height; i++)
			memcpy (((char *) data) + i * gst_row_stride, ((char *) arv_buffer->data) + i * arv_row_stride, arv_row_stride);

		*buffer = gst_buffer_new_wrapped (data, size);
	} else {
		*buffer = gst_buffer_new_wrapped_full (0,
			arv_buffer->data,
			arv_buffer->size,
			0,
			arv_buffer->size,
			NULL,
			NULL);
	}

	if (!gst_base_src_get_do_timestamp(GST_BASE_SRC(push_src))) {
		if (gst_aravis->timestamp_offset == 0) {
			gst_aravis->timestamp_offset = arv_buffer->timestamp_ns;
			gst_aravis->last_timestamp = arv_buffer->timestamp_ns;
		}

		GST_BUFFER_PTS (*buffer) = arv_buffer->timestamp_ns - gst_aravis->timestamp_offset;
		GST_BUFFER_DURATION (*buffer) = arv_buffer->timestamp_ns - gst_aravis->last_timestamp;

		gst_aravis->last_timestamp = arv_buffer->timestamp_ns;
	}

	arv_stream_push_buffer (gst_aravis->stream, arv_buffer);

	return GST_FLOW_OK;
}
예제 #29
0
static GstFlowReturn
gst_shmdata_src_create (GstPushSrc *psrc, GstBuffer **outbuf)
{
  GstShmdataSrc *self = GST_SHMDATA_SRC (psrc);

  if (self->unlocked) {
    return GST_FLOW_FLUSHING;
  }

  g_mutex_lock (&self->on_data_mutex);
  while (!self->on_data && !self->unlocked)
    g_cond_wait_until (&self->on_data_cond,
                       &self->on_data_mutex,
                       g_get_monotonic_time () + 10 * G_TIME_SPAN_MILLISECOND);
  if (self->unlocked) {
    self->on_data = FALSE;
    g_mutex_unlock (&self->on_data_mutex);
    gst_shmdata_src_make_data_rendered(self);
    return GST_FLOW_FLUSHING;
  }
  if (FALSE == self->on_data) {
    g_mutex_unlock (&self->on_data_mutex);
    return GST_FLOW_FLUSHING;
  }
  self->on_data = FALSE;

  if (self->has_new_caps &&
      (GST_STATE_PAUSED == GST_STATE(self) || GST_STATE_PLAYING == GST_STATE(self))) {
    self->has_new_caps = FALSE;
    g_object_notify(G_OBJECT(self), "caps");
    GstPad *pad = gst_element_get_static_pad (GST_ELEMENT(self),"src");
    if(!gst_pad_set_caps (pad, self->caps)) {
      GST_ELEMENT_ERROR (GST_ELEMENT(self), CORE, NEGOTIATION, (NULL),
                         ("caps fix caps from shmdata type description"));
      return GST_FLOW_ERROR;
    }
    gst_object_unref(pad);
  }
  if(!self->copy_buffers){
    *outbuf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
                                           self->current_data,
                                           self->current_size,
                                           0,
                                           self->current_size,
                                           self,
                                           gst_shmdata_src_on_data_rendered);
  } else {
    void *data = malloc(self->current_size);
    memcpy(data,  self->current_data, self->current_size);
    *outbuf = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
                                           data,
                                           self->current_size,
                                           0,
                                           self->current_size,
                                           data,  // user_data
                                           g_free);
    //*outbuf = gst_buffer_copy_deep (tmp);  // not available with earlier gst 1.0
    gst_shmdata_src_on_data_rendered(self);
    //gst_buffer_unref(tmp);
  }

  if (self->is_first_read) {
    gst_shmdata_src_make_data_rendered(self);
    self->is_first_read = FALSE;
  }
  g_mutex_unlock (&self->on_data_mutex);

  return GST_FLOW_OK;
}
/* Takes ownership of pixbuf; call with OBJECT_LOCK */
static void
gst_gdk_pixbuf_overlay_set_pixbuf (GstGdkPixbufOverlay * overlay,
    GdkPixbuf * pixbuf)
{
  GstVideoMeta *video_meta;
  guint8 *pixels, *p;
  gint width, height, stride, w, h, plane;

  if (!gdk_pixbuf_get_has_alpha (pixbuf)) {
    GdkPixbuf *alpha_pixbuf;

    /* FIXME: we could do this much more efficiently ourselves below, but
     * we're lazy for now */
    /* FIXME: perhaps expose substitute_color via properties */
    alpha_pixbuf = gdk_pixbuf_add_alpha (pixbuf, FALSE, 0, 0, 0);
    g_object_unref (pixbuf);
    pixbuf = alpha_pixbuf;
  }

  width = gdk_pixbuf_get_width (pixbuf);
  height = gdk_pixbuf_get_height (pixbuf);
  stride = gdk_pixbuf_get_rowstride (pixbuf);
  pixels = gdk_pixbuf_get_pixels (pixbuf);

  /* the memory layout in GdkPixbuf is R-G-B-A, we want:
   *  - B-G-R-A on little-endian platforms
   *  - A-R-G-B on big-endian platforms
   */
  for (h = 0; h < height; ++h) {
    p = pixels + (h * stride);
    for (w = 0; w < width; ++w) {
      guint8 tmp;

      /* R-G-B-A ==> B-G-R-A */
      tmp = p[0];
      p[0] = p[2];
      p[2] = tmp;

      if (G_BYTE_ORDER == G_BIG_ENDIAN) {
        /* B-G-R-A ==> A-R-G-B */
        /* we can probably assume sane alignment */
        *((guint32 *) p) = GUINT32_SWAP_LE_BE (*((guint32 *) p));
      }

      p += 4;
    }
  }

  /* assume we have row padding even for the last row */
  /* transfer ownership of pixbuf to the buffer */
  overlay->pixels = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
      pixels, height * stride, 0, height * stride, pixbuf,
      (GDestroyNotify) g_object_unref);

  video_meta = gst_buffer_add_video_meta (overlay->pixels,
      GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB,
      width, height);

  for (plane = 0; plane < video_meta->n_planes; ++plane)
    video_meta->stride[plane] = stride;

  overlay->update_composition = TRUE;

  GST_INFO_OBJECT (overlay, "Updated pixbuf, %d x %d", width, height);
}