示例#1
0
static void
gst_droidcamsrc_dev_buffers_released (G_GNUC_UNUSED void *user)
{
  GstDroidCamSrcDev *dev = (GstDroidCamSrcDev *) user;

  GST_FIXME_OBJECT (dev, "Not sure what to do here really");
}
static gboolean
do_seek (GstBaseSrc * bsrc, GstSegment * segment)
{
  gboolean reverse;
  GstClockTime position;
  GstMultiFileSrc *src;

  src = GST_MULTI_FILE_SRC (bsrc);

  segment->time = segment->start;
  position = segment->position;
  reverse = segment->rate < 0;

  if (reverse) {
    GST_FIXME_OBJECT (src, "Handle reverse playback");

    return FALSE;
  }

  /* now move to the position indicated */
  if (src->fps_n) {
    src->index = gst_util_uint64_scale (position,
        src->fps_n, src->fps_d * GST_SECOND);
  } else {
    src->index = 0;
    GST_WARNING_OBJECT (src, "No FPS set, can not seek");

    return FALSE;
  }

  return TRUE;
}
示例#3
0
void GSTGLAPI
_gst_gl_debug_callback (GLenum source, GLenum type, GLuint id, GLenum severity,
    GLsizei length, const gchar * message, gpointer user_data)
{
  GstGLContext *context = user_data;
  const gchar *severity_str = _debug_severity_to_string (severity);
  const gchar *source_str = _debug_source_to_string (source);
  const gchar *type_str = _debug_type_to_string (type);

  _init_debug ();

  switch (type) {
    case GL_DEBUG_TYPE_ERROR:
    case GL_DEBUG_TYPE_UNDEFINED_BEHAVIOUR:
      GST_ERROR_OBJECT (context, "%s: GL %s from %s id:%u, %s", severity_str,
          type_str, source_str, id, message);
      break;
    case GL_DEBUG_TYPE_DEPRECATED_BEHAVIOUR:
    case GL_DEBUG_TYPE_PORTABILITY:
      GST_FIXME_OBJECT (context, "%s: GL %s from %s id:%u, %s", severity_str,
          type_str, source_str, id, message);
      break;
    case GL_DEBUG_TYPE_PERFORMANCE:
      GST_CAT_DEBUG_OBJECT (gst_performance, context, "%s: GL %s from %s id:%u,"
          " %s", severity_str, type_str, source_str, id, message);
      break;
    default:
      GST_DEBUG_OBJECT (context, "%s: GL %s from %s id:%u, %s", severity_str,
          type_str, source_str, id, message);
      break;
  }
}
示例#4
0
static void
gst_ivtc_flush (GstIvtc * ivtc)
{
    if (ivtc->n_fields > 0) {
        GST_FIXME_OBJECT (ivtc, "not sending flushed fields to srcpad");
    }

    gst_ivtc_retire_fields (ivtc, ivtc->n_fields);
}
示例#5
0
/**
 * gst_gl_shader_detach_unlocked:
 * @shader: a #GstGLShader
 * @stage: a #GstGLSLStage to attach
 *
 * Detaches @stage from @shader.  @stage must have been successfully attached
 * to @shader with gst_gl_shader_attach() or gst_gl_shader_attach_unlocked().
 *
 * Note: must be called in the GL thread
 *
 * Since: 1.8
 */
void
gst_gl_shader_detach_unlocked (GstGLShader * shader, GstGLSLStage * stage)
{
  guint stage_handle;
  GList *elem;

  g_return_if_fail (GST_IS_GL_SHADER (shader));
  g_return_if_fail (GST_IS_GLSL_STAGE (stage));

  if (!_gst_glsl_funcs_fill (&shader->priv->vtable, shader->context)) {
    GST_WARNING_OBJECT (shader, "Failed to retreive required GLSL functions");
    return;
  }

  if (!shader->priv->program_handle)
    return;

  elem = g_list_find (shader->priv->stages, stage);
  if (!elem) {
    GST_FIXME_OBJECT (shader, "Could not find stage %p in shader %p", stage,
        shader);
    return;
  }

  stage_handle = gst_glsl_stage_get_handle (stage);
  if (!stage_handle) {
    GST_FIXME_OBJECT (shader, "Stage %p doesn't have a GL handle", stage);
    return;
  }

  if (shader->context->gl_vtable->IsProgram)
    g_assert (shader->context->gl_vtable->IsProgram (shader->priv->
            program_handle));
  if (shader->context->gl_vtable->IsShader)
    g_assert (shader->context->gl_vtable->IsShader (stage_handle));

  GST_LOG_OBJECT (shader, "detaching shader %i from program %i", stage_handle,
      (int) shader->priv->program_handle);
  shader->priv->vtable.DetachShader (shader->priv->program_handle,
      stage_handle);

  shader->priv->stages = g_list_delete_link (shader->priv->stages, elem);
  gst_object_unref (stage);
}
示例#6
0
static void
gst_droidcamsrc_dev_raw_image_notify_callback (void *user)
{
  GstDroidCamSrcDev *dev = (GstDroidCamSrcDev *) user;
  GstDroidCamSrc *src = GST_DROIDCAMSRC (GST_PAD_PARENT (dev->imgsrc->pad));

  GST_DEBUG_OBJECT (src, "dev raw image notify callback");

  GST_FIXME_OBJECT (src, "implement me");
}
示例#7
0
static void
gst_droidcamsrc_dev_postview_frame_callback (void *user,
    G_GNUC_UNUSED DroidMediaData * mem)
{
  GstDroidCamSrcDev *dev = (GstDroidCamSrcDev *) user;
  GstDroidCamSrc *src = GST_DROIDCAMSRC (GST_PAD_PARENT (dev->imgsrc->pad));

  GST_DEBUG_OBJECT (src, "dev postview frame callback");

  GST_FIXME_OBJECT (src, "implement me");
}
示例#8
0
static void
gst_droidcamsrc_dev_zoom_callback (void *user, G_GNUC_UNUSED int value,
    G_GNUC_UNUSED int arg)
{
  GstDroidCamSrcDev *dev = (GstDroidCamSrcDev *) user;
  GstDroidCamSrc *src = GST_DROIDCAMSRC (GST_PAD_PARENT (dev->imgsrc->pad));

  GST_DEBUG_OBJECT (src, "dev zoom callback");

  GST_FIXME_OBJECT (src, "implement me");
}
/**
 * gst_gl_upload_perform_with_gl_texture_upload_meta:
 * @upload: a #GstGLUpload
 * @meta: a #GstVideoGLTextureUploadMeta
 * @texture_id: resulting GL textures to place the data into.
 *
 * Uploads @meta into @texture_id.
 *
 * Returns: whether the upload was successful
 */
gboolean
gst_gl_upload_perform_with_gl_texture_upload_meta (GstGLUpload * upload,
        GstVideoGLTextureUploadMeta * meta, guint texture_id[4])
{
    gboolean ret;

    g_return_val_if_fail (upload != NULL, FALSE);
    g_return_val_if_fail (meta != NULL, FALSE);

    if (meta->texture_orientation !=
            GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_NORMAL)
        GST_FIXME_OBJECT (upload, "only x-normal,y-normal textures supported, "
                          "the images will not appear the right way up");
    if (meta->texture_type[0] != GST_VIDEO_GL_TEXTURE_TYPE_RGBA) {
        GST_FIXME_OBJECT (upload, "only single rgba texture supported");
        return FALSE;
    }

    GST_OBJECT_LOCK (upload);

    upload->priv->meta = meta;
    if (!upload->priv->tex_id)
        gst_gl_context_gen_texture (upload->context, &upload->priv->tex_id,
                                    GST_VIDEO_FORMAT_RGBA, GST_VIDEO_INFO_WIDTH (&upload->in_info),
                                    GST_VIDEO_INFO_HEIGHT (&upload->in_info));

    GST_LOG ("Uploading with GLTextureUploadMeta with textures %i,%i,%i,%i",
             texture_id[0], texture_id[1], texture_id[2], texture_id[3]);

    gst_gl_context_thread_add (upload->context,
                               (GstGLContextThreadFunc) _do_upload_with_meta, upload);

    ret = upload->priv->result;

    GST_OBJECT_UNLOCK (upload);

    return ret;
}
示例#10
0
static gboolean
gst_vdp_h264_dec_set_format (GstVideoDecoder * video_decoder,
    GstVideoCodecState * state)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder);

  if (h264_dec->input_state)
    gst_video_codec_state_unref (h264_dec->input_state);

  h264_dec->input_state = gst_video_codec_state_ref (state);

  GST_FIXME_OBJECT (video_decoder, "Do something when receiving input state ?");

  return TRUE;
}
示例#11
0
static void
gst_stream_set_stream_id (GstStream * stream, const gchar * stream_id)
{
  GST_OBJECT_LOCK (stream);
  g_assert (stream->stream_id == NULL);
  if (stream_id)
    stream->stream_id = g_strdup (stream_id);
  else {
    /* Create a randoom stream_id if NULL */
    GST_FIXME_OBJECT (stream, "Creating random stream-id, consider "
        "implementing a deterministic way of creating a stream-id");
    stream->stream_id =
        g_strdup_printf ("%08x%08x%08x%08x", g_random_int (), g_random_int (),
        g_random_int (), g_random_int ());
  }

  GST_OBJECT_UNLOCK (stream);
}
static void
check_all_streams_for_eos (GstDecodebin3 * dbin)
{
  GList *tmp;

  if (!all_inputs_are_eos (dbin))
    return;

  /* We know all streams are EOS, properly clean up everything */
  for (tmp = dbin->input_streams; tmp; tmp = tmp->next) {
    DecodebinInputStream *input = (DecodebinInputStream *) tmp->data;
    GstPad *peer = gst_pad_get_peer (input->srcpad);

    /* Send EOS and then remove elements */
    if (peer) {
      gst_pad_send_event (peer, gst_event_new_eos ());
      gst_object_unref (peer);
    }
    GST_FIXME_OBJECT (input->srcpad, "Remove input stream");
  }
}
static gboolean
gst_decklink_src_audio_src_query (GstPad * pad, GstObject * parent,
    GstQuery * query)
{
  gboolean res;

  GST_DEBUG_OBJECT (pad, "query: %" GST_PTR_FORMAT, query);

  switch (GST_QUERY_TYPE (query)) {
    /* FIXME: report live-ness and latency for audio too */
    case GST_QUERY_LATENCY:
      GST_FIXME_OBJECT (parent, "should report live-ness and audio latency");
      res = gst_pad_query_default (pad, parent, query);
      break;
    default:
      res = gst_pad_query_default (pad, parent, query);
      break;
  }

  return res;
}
示例#14
0
static gboolean
gst_vaapidecode_update_src_caps (GstVaapiDecode * decode)
{
  GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode);
  GstPad *const srcpad = GST_VIDEO_DECODER_SRC_PAD (vdec);
  GstCaps *allowed;
  GstVideoCodecState *state, *ref_state;
  GstVaapiCapsFeature feature;
  GstCapsFeatures *features;
  GstCaps *allocation_caps;
  GstVideoInfo *vi;
  GstVideoFormat format;
  GstClockTime latency;
  gint fps_d, fps_n;
  guint width, height;
  const gchar *format_str, *feature_str;

  if (!decode->input_state)
    return FALSE;

  ref_state = decode->input_state;

  format = GST_VIDEO_INFO_FORMAT (&decode->decoded_info);
  allowed = gst_vaapidecode_get_allowed_srcpad_caps (decode);
  feature = gst_vaapi_find_preferred_caps_feature (srcpad, allowed, &format);
  gst_caps_unref (allowed);

  if (feature == GST_VAAPI_CAPS_FEATURE_NOT_NEGOTIATED)
    return FALSE;

#if (!USE_GLX && !USE_EGL)
  /* This is a very pathological situation. Should not happen. */
  if (feature == GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META)
    return FALSE;
#endif

  if ((feature == GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY ||
          feature == GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE)
      && format != GST_VIDEO_INFO_FORMAT (&decode->decoded_info)) {
    GST_FIXME_OBJECT (decode, "validate if driver can convert from %s to %s",
        gst_video_format_to_string (GST_VIDEO_INFO_FORMAT
            (&decode->decoded_info)), gst_video_format_to_string (format));
  }

  width = decode->display_width;
  height = decode->display_height;

  if (!width || !height) {
    width = GST_VIDEO_INFO_WIDTH (&ref_state->info);
    height = GST_VIDEO_INFO_HEIGHT (&ref_state->info);
  }

  state = gst_video_decoder_set_output_state (vdec, format, width, height,
      ref_state);
  if (!state)
    return FALSE;

  if (GST_VIDEO_INFO_WIDTH (&state->info) == 0
      || GST_VIDEO_INFO_HEIGHT (&state->info) == 0) {
    gst_video_codec_state_unref (state);
    return FALSE;
  }

  vi = &state->info;
  state->caps = gst_video_info_to_caps (vi);

  switch (feature) {
    case GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META:
    case GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE:{
      GstStructure *structure = gst_caps_get_structure (state->caps, 0);

      /* Remove chroma-site and colorimetry from src caps,
       * which is unnecessary on downstream if using VASurface
       */
      gst_structure_remove_fields (structure, "chroma-site", "colorimetry",
          NULL);

      feature_str = gst_vaapi_caps_feature_to_string (feature);
      features = gst_caps_features_new (feature_str, NULL);
      gst_caps_set_features (state->caps, 0, features);
      break;
    }
    default:
      break;
  }

  /* Allocation query is different from pad's caps */
  allocation_caps = NULL;
  if (GST_VIDEO_INFO_WIDTH (&decode->decoded_info) != width
      || GST_VIDEO_INFO_HEIGHT (&decode->decoded_info) != height) {
    allocation_caps = gst_caps_copy (state->caps);
    format_str = gst_video_format_to_string (format);
    gst_caps_set_simple (allocation_caps,
        "width", G_TYPE_INT, GST_VIDEO_INFO_WIDTH (&decode->decoded_info),
        "height", G_TYPE_INT, GST_VIDEO_INFO_HEIGHT (&decode->decoded_info),
        "format", G_TYPE_STRING, format_str, NULL);
    GST_INFO_OBJECT (decode, "new alloc caps = %" GST_PTR_FORMAT,
        allocation_caps);
  }
  gst_caps_replace (&state->allocation_caps, allocation_caps);
  if (allocation_caps)
    gst_caps_unref (allocation_caps);

  GST_INFO_OBJECT (decode, "new src caps = %" GST_PTR_FORMAT, state->caps);
  gst_caps_replace (&decode->srcpad_caps, state->caps);
  gst_video_codec_state_unref (state);

  fps_n = GST_VIDEO_INFO_FPS_N (vi);
  fps_d = GST_VIDEO_INFO_FPS_D (vi);
  if (fps_n <= 0 || fps_d <= 0) {
    GST_DEBUG_OBJECT (decode, "forcing 25/1 framerate for latency calculation");
    fps_n = 25;
    fps_d = 1;
  }

  /* For parsing/preparation purposes we'd need at least 1 frame
   * latency in general, with perfectly known unit boundaries (NALU,
   * AU), and up to 2 frames when we need to wait for the second frame
   * start to determine the first frame is complete */
  latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n);
  gst_video_decoder_set_latency (vdec, latency, latency);

  return TRUE;
}
static gboolean
gst_type_find_element_sink_event (GstPad * pad, GstObject * parent,
    GstEvent * event)
{
  gboolean res = FALSE;
  GstTypeFindElement *typefind = GST_TYPE_FIND_ELEMENT (parent);

  GST_DEBUG_OBJECT (typefind, "got %s event in mode %d",
      GST_EVENT_TYPE_NAME (event), typefind->mode);

  switch (typefind->mode) {
    case MODE_TYPEFIND:
      switch (GST_EVENT_TYPE (event)) {
        case GST_EVENT_CAPS:
        {
          GstCaps *caps;

          /* Parse and push out our caps and data */
          gst_event_parse_caps (event, &caps);
          res = gst_type_find_element_setcaps (typefind, caps);

          gst_event_unref (event);
          break;
        }
        case GST_EVENT_GAP:
        {
          GST_FIXME_OBJECT (typefind,
              "GAP events during typefinding not handled properly");

          /* FIXME: These would need to be inserted in the stream at
           * the right position between buffers, but we combine all
           * buffers with a GstAdapter. Drop the GAP event for now,
           * which will only cause an implicit GAP between buffers.
           */
          gst_event_unref (event);
          res = TRUE;
          break;
        }
        case GST_EVENT_EOS:
        {
          GST_INFO_OBJECT (typefind, "Got EOS and no type found yet");
          gst_type_find_element_chain_do_typefinding (typefind, FALSE, TRUE);

          res = gst_pad_push_event (typefind->src, event);
          break;
        }
        case GST_EVENT_FLUSH_STOP:{
          GList *l;

          GST_OBJECT_LOCK (typefind);

          for (l = typefind->cached_events; l; l = l->next) {
            if (GST_EVENT_IS_STICKY (l->data) &&
                GST_EVENT_TYPE (l->data) != GST_EVENT_SEGMENT &&
                GST_EVENT_TYPE (l->data) != GST_EVENT_EOS) {
              gst_pad_store_sticky_event (typefind->src, l->data);
            }
            gst_event_unref (l->data);
          }

          g_list_free (typefind->cached_events);
          typefind->cached_events = NULL;
          gst_adapter_clear (typefind->adapter);
          GST_OBJECT_UNLOCK (typefind);
          /* fall through */
        }
        case GST_EVENT_FLUSH_START:
          res = gst_pad_push_event (typefind->src, event);
          break;
        default:
          /* Forward events that would happen before the caps event
           * directly instead of storing them. There's no reason not
           * to send them directly and we should only store events
           * for later sending that would need to come after the caps
           * event */
          if (GST_EVENT_TYPE (event) < GST_EVENT_CAPS) {
            res = gst_pad_push_event (typefind->src, event);
          } else {
            GST_DEBUG_OBJECT (typefind, "Saving %s event to send later",
                GST_EVENT_TYPE_NAME (event));
            GST_OBJECT_LOCK (typefind);
            typefind->cached_events =
                g_list_append (typefind->cached_events, event);
            GST_OBJECT_UNLOCK (typefind);
            res = TRUE;
          }
          break;
      }
      break;
    case MODE_NORMAL:
      res = gst_pad_push_event (typefind->src, event);
      break;
    case MODE_ERROR:
      break;
    default:
      g_assert_not_reached ();
  }
  return res;
}
/* FIXME : HACK, REMOVE, USE INPUT CHAINS */
static GstPadProbeReturn
parsebin_buffer_probe (GstPad * pad, GstPadProbeInfo * info,
    DecodebinInput * input)
{
  GstDecodebin3 *dbin = input->dbin;
  GList *tmp, *unused_slot = NULL;

  GST_FIXME_OBJECT (dbin, "Need a lock !");

  GST_DEBUG_OBJECT (pad, "Got a buffer ! UNBLOCK !");

  /* Any data out the demuxer means it's not creating pads
   * any more right now */

  /* 1. Re-use existing streams if/when possible */
  GST_FIXME_OBJECT (dbin, "Re-use existing input streams if/when possible");

  /* 2. Remove unused streams (push EOS) */
  GST_DEBUG_OBJECT (dbin, "Removing unused streams");
  tmp = dbin->input_streams;
  while (tmp != NULL) {
    DecodebinInputStream *input_stream = (DecodebinInputStream *) tmp->data;
    GList *next = tmp->next;

    GST_DEBUG_OBJECT (dbin, "Checking input stream %p", input_stream);
    if (input_stream->input_buffer_probe_id) {
      GST_DEBUG_OBJECT (dbin,
          "Removing pad block on input %p pad %" GST_PTR_FORMAT, input_stream,
          input_stream->srcpad);
      gst_pad_remove_probe (input_stream->srcpad,
          input_stream->input_buffer_probe_id);
    }
    input_stream->input_buffer_probe_id = 0;

    if (input_stream->saw_eos) {
      remove_input_stream (dbin, input_stream);
      tmp = dbin->input_streams;
    } else
      tmp = next;
  }

  GST_DEBUG_OBJECT (dbin, "Creating new streams (if needed)");
  /* 3. Create new streams */
  for (tmp = input->pending_pads; tmp; tmp = tmp->next) {
    GstStream *stream;
    PendingPad *ppad = (PendingPad *) tmp->data;

    stream = gst_pad_get_stream (ppad->pad);
    if (stream == NULL) {
      GST_ERROR_OBJECT (dbin, "No stream for pad ????");
    } else {
      MultiQueueSlot *slot;
      DecodebinInputStream *input_stream;
      /* The remaining pads in pending_pads are the ones that require a new
       * input stream */
      input_stream = create_input_stream (dbin, stream, ppad->pad, ppad->input);
      /* See if we can link it straight away */
      input_stream->active_stream = stream;

      g_mutex_lock (&dbin->selection_lock);
      slot = get_slot_for_input (dbin, input_stream);
      link_input_to_slot (input_stream, slot);
      g_mutex_unlock (&dbin->selection_lock);

      /* Remove the buffer and event probe */
      gst_pad_remove_probe (ppad->pad, ppad->buffer_probe);
      gst_pad_remove_probe (ppad->pad, ppad->event_probe);
      g_free (ppad);
    }
  }

  g_list_free (input->pending_pads);
  input->pending_pads = NULL;

  /* Weed out unused multiqueue slots */
  g_mutex_lock (&dbin->selection_lock);
  for (tmp = dbin->slots; tmp; tmp = tmp->next) {
    MultiQueueSlot *slot = (MultiQueueSlot *) tmp->data;
    GST_LOG_OBJECT (dbin, "Slot %d input:%p", slot->id, slot->input);
    if (slot->input == NULL) {
      unused_slot =
          g_list_append (unused_slot, gst_object_ref (slot->sink_pad));
    }
  }
  g_mutex_unlock (&dbin->selection_lock);

  for (tmp = unused_slot; tmp; tmp = tmp->next) {
    GstPad *sink_pad = (GstPad *) tmp->data;
    GST_DEBUG_OBJECT (sink_pad, "Sending EOS to unused slot");
    gst_pad_send_event (sink_pad, gst_event_new_eos ());
  }

  if (unused_slot)
    g_list_free_full (unused_slot, (GDestroyNotify) gst_object_unref);

  return GST_PAD_PROBE_OK;
}
/* Probe on the output of a parser chain (the last
 * src pad) */
static GstPadProbeReturn
parse_chain_output_probe (GstPad * pad, GstPadProbeInfo * info,
    DecodebinInputStream * input)
{
  GstPadProbeReturn ret = GST_PAD_PROBE_OK;

  if (GST_IS_EVENT (GST_PAD_PROBE_INFO_DATA (info))) {
    GstEvent *ev = GST_PAD_PROBE_INFO_EVENT (info);

    GST_DEBUG_OBJECT (pad, "Got event %s", GST_EVENT_TYPE_NAME (ev));
    switch (GST_EVENT_TYPE (ev)) {
      case GST_EVENT_STREAM_START:
      {
        GstStream *stream = NULL;
        guint group_id = G_MAXUINT32;
        gst_event_parse_group_id (ev, &group_id);
        GST_DEBUG_OBJECT (pad, "Got stream-start, group_id:%d, input %p",
            group_id, input->input);
        if (set_input_group_id (input->input, &group_id)) {
          ev = gst_event_make_writable (ev);
          gst_event_set_group_id (ev, group_id);
          GST_PAD_PROBE_INFO_DATA (info) = ev;
        }
        input->saw_eos = FALSE;

        gst_event_parse_stream (ev, &stream);
        /* FIXME : Would we ever end up with a stream already set on the input ?? */
        if (stream) {
          if (input->active_stream != stream) {
            MultiQueueSlot *slot;
            if (input->active_stream)
              gst_object_unref (input->active_stream);
            input->active_stream = stream;
            /* We have the beginning of a stream, get a multiqueue slot and link to it */
            g_mutex_lock (&input->dbin->selection_lock);
            slot = get_slot_for_input (input->dbin, input);
            link_input_to_slot (input, slot);
            g_mutex_unlock (&input->dbin->selection_lock);
          } else
            gst_object_unref (stream);
        }
      }
        break;
      case GST_EVENT_CAPS:
      {
        GstCaps *caps = NULL;
        gst_event_parse_caps (ev, &caps);
        GST_DEBUG_OBJECT (pad, "caps %" GST_PTR_FORMAT, caps);
        if (caps && input->active_stream)
          gst_stream_set_caps (input->active_stream, caps);
      }
        break;
      case GST_EVENT_EOS:
        input->saw_eos = TRUE;
        if (all_inputs_are_eos (input->dbin)) {
          GST_DEBUG_OBJECT (pad, "real input pad, marking as EOS");
          check_all_streams_for_eos (input->dbin);
        } else {
          GstPad *peer = gst_pad_get_peer (input->srcpad);
          if (peer) {
            /* Send custom-eos event to multiqueue slot */
            GstStructure *s;
            GstEvent *event;

            GST_DEBUG_OBJECT (pad,
                "Got EOS end of input stream, post custom-eos");
            s = gst_structure_new_empty ("decodebin3-custom-eos");
            event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
            gst_pad_send_event (peer, event);
            gst_object_unref (peer);
          } else {
            GST_FIXME_OBJECT (pad, "No peer, what should we do ?");
          }
        }
        ret = GST_PAD_PROBE_DROP;
        break;
      case GST_EVENT_FLUSH_STOP:
        GST_DEBUG_OBJECT (pad, "Clear saw_eos flag");
        input->saw_eos = FALSE;
      default:
        break;
    }
  } else if (GST_IS_QUERY (GST_PAD_PROBE_INFO_DATA (info))) {
    GstQuery *q = GST_PAD_PROBE_INFO_QUERY (info);
    GST_DEBUG_OBJECT (pad, "Seeing query %s", GST_QUERY_TYPE_NAME (q));
    /* If we have a parser, we want to reply to the caps query */
    /* FIXME: Set a flag when the input stream is created for
     * streams where we shouldn't reply to these queries */
    if (GST_QUERY_TYPE (q) == GST_QUERY_CAPS
        && (info->type & GST_PAD_PROBE_TYPE_PULL)) {
      GstCaps *filter = NULL;
      GstCaps *allowed;
      gst_query_parse_caps (q, &filter);
      allowed = get_parser_caps_filter (input->dbin, filter);
      GST_DEBUG_OBJECT (pad,
          "Intercepting caps query, setting %" GST_PTR_FORMAT, allowed);
      gst_query_set_caps_result (q, allowed);
      gst_caps_unref (allowed);
      ret = GST_PAD_PROBE_HANDLED;
    } else if (GST_QUERY_TYPE (q) == GST_QUERY_ACCEPT_CAPS) {
      GstCaps *prop = NULL;
      gst_query_parse_accept_caps (q, &prop);
      /* Fast check against target caps */
      if (gst_caps_can_intersect (prop, input->dbin->caps))
        gst_query_set_accept_caps_result (q, TRUE);
      else {
        gboolean accepted = check_parser_caps_filter (input->dbin, prop);
        /* check against caps filter */
        gst_query_set_accept_caps_result (q, accepted);
        GST_DEBUG_OBJECT (pad, "ACCEPT_CAPS query, returning %d", accepted);
      }
      ret = GST_PAD_PROBE_HANDLED;
    }
  }

  return ret;
}
void
ges_base_xml_formatter_add_track_element (GESBaseXmlFormatter * self,
    GType track_element_type, const gchar * asset_id, const gchar * track_id,
    const gchar * timeline_obj_id, GstStructure * children_properties,
    GstStructure * properties, const gchar * metadatas, GError ** error)
{
  GESTrackElement *trackelement;

  GError *err = NULL;
  GESAsset *asset = NULL;
  GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self);

  if (priv->check_only)
    return;

  if (g_type_is_a (track_element_type, GES_TYPE_TRACK_ELEMENT) == FALSE) {
    GST_DEBUG_OBJECT (self, "%s is not a TrackElement, can not create it",
        g_type_name (track_element_type));
    goto out;
  }

  if (g_type_is_a (track_element_type, GES_TYPE_BASE_EFFECT) == FALSE) {
    GST_FIXME_OBJECT (self, "%s currently not supported",
        g_type_name (track_element_type));
    goto out;
  }

  asset = ges_asset_request (track_element_type, asset_id, &err);
  if (asset == NULL) {
    GST_DEBUG_OBJECT (self, "Can not create trackelement %s", asset_id);
    GST_FIXME_OBJECT (self, "Check if missing plugins etc %s",
        err ? err->message : "");

    goto out;
  }

  trackelement = GES_TRACK_ELEMENT (ges_asset_extract (asset, NULL));
  if (trackelement) {
    GESClip *clip;
    if (metadatas)
      ges_meta_container_add_metas_from_string (GES_META_CONTAINER
          (trackelement), metadatas);

    clip = g_hash_table_lookup (priv->containers, timeline_obj_id);
    if (clip) {
      _add_track_element (GES_FORMATTER (self), clip, trackelement, track_id,
          children_properties, properties);
    } else {
      PendingEffects *peffect;
      PendingClip *pend = g_hash_table_lookup (priv->clipid_pendings,
          timeline_obj_id);
      if (pend == NULL) {
        GST_WARNING_OBJECT (self, "No Clip with id: %s can not "
            "add TrackElement", timeline_obj_id);
        goto out;
      }

      peffect = g_slice_new0 (PendingEffects);

      peffect->trackelement = trackelement;
      peffect->track_id = g_strdup (track_id);
      peffect->properties = properties ? gst_structure_copy (properties) : NULL;
      peffect->children_properties = children_properties ?
          gst_structure_copy (children_properties) : NULL;

      pend->effects = g_list_append (pend->effects, peffect);
    }
    priv->current_track_element = trackelement;
  }

  ges_project_add_asset (GES_FORMATTER (self)->project, asset);

out:
  if (asset)
    gst_object_unref (asset);
  if (err)
    g_error_free (err);

  return;
}
示例#19
0
static gboolean
gst_interleave_sink_event (GstCollectPads * pads, GstCollectData * data,
    GstEvent * event, gpointer user_data)
{
  GstInterleave *self = GST_INTERLEAVE (user_data);
  gboolean ret = TRUE;

  GST_DEBUG ("Got %s event on pad %s:%s", GST_EVENT_TYPE_NAME (event),
      GST_DEBUG_PAD_NAME (data->pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_STOP:
      GST_OBJECT_LOCK (self);
      gst_event_replace (&self->pending_segment, NULL);
      GST_OBJECT_UNLOCK (self);
      break;
    case GST_EVENT_SEGMENT:
    {
      GST_OBJECT_LOCK (self);
      gst_event_replace (&self->pending_segment, event);
      GST_OBJECT_UNLOCK (self);
      break;
    }
    case GST_EVENT_CAPS:
    {
      GstCaps *caps;
      GstAudioInfo info;
      GValue *val;
      guint channel;

      gst_event_parse_caps (event, &caps);

      if (!gst_audio_info_from_caps (&info, caps)) {
        GST_WARNING_OBJECT (self, "invalid sink caps");
        gst_event_unref (event);
        event = NULL;
        ret = FALSE;
        break;
      }

      if (self->channel_positions_from_input
          && GST_AUDIO_INFO_CHANNELS (&info) == 1) {
        channel = GST_INTERLEAVE_PAD_CAST (data->pad)->channel;
        val = g_value_array_get_nth (self->input_channel_positions, channel);
        g_value_set_enum (val, GST_AUDIO_INFO_POSITION (&info, 0));
      }

      if (!gst_pad_has_current_caps (data->pad))
        g_atomic_int_add (&self->configured_sinkpads_counter, 1);

      /* Last caps that are set on a sink pad are used as output caps */
      if (g_atomic_int_get (&self->configured_sinkpads_counter) ==
          self->channels) {
        ret = gst_interleave_sink_setcaps (self, data->pad, caps, &info);
        gst_event_unref (event);
        event = NULL;
      }
      break;
    }
    case GST_EVENT_TAG:
      GST_FIXME_OBJECT (self, "FIXME: merge tags and send after stream-start");
      break;
    default:
      break;
  }

  /* now GstCollectPads can take care of the rest, e.g. EOS */
  if (event != NULL)
    return gst_collect_pads_event_default (pads, data, event, FALSE);

  return ret;
}
示例#20
0
gboolean gst_imx_ipu_blitter_set_input_buffer(GstImxIpuBlitter *ipu_blitter, GstBuffer *input_buffer)
{
	GstImxPhysMemMeta *phys_mem_meta;

	g_assert(input_buffer != NULL);

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(input_buffer);

	/* Test if the input buffer uses DMA memory */
	if ((phys_mem_meta != NULL) && (phys_mem_meta->phys_addr != 0))
	{
		/* DMA memory present - the input buffer can be used as an actual input buffer */
		gst_imx_ipu_blitter_set_actual_input_buffer(ipu_blitter, gst_buffer_ref(input_buffer));

		GST_TRACE_OBJECT(ipu_blitter, "input buffer uses DMA memory - setting it as actual input buffer");
	}
	else
	{
		/* No DMA memory present; the input buffer needs to be copied to an internal
		 * temporary input buffer */

		GstBuffer *temp_input_buffer;
		GstFlowReturn flow_ret;

		GST_TRACE_OBJECT(ipu_blitter, "input buffer does not use DMA memory - need to copy it to an internal input DMA buffer");

		{
			/* The internal input buffer is the temp input frame's DMA memory.
			 * If it does not exist yet, it needs to be created here. The temp input
			 * frame is then mapped. */

			if (ipu_blitter->internal_bufferpool == NULL)
			{
				/* Internal bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input buffer */

				GstCaps *caps = gst_video_info_to_caps(&(ipu_blitter->input_video_info));

				ipu_blitter->internal_bufferpool = gst_imx_ipu_blitter_create_bufferpool(
					ipu_blitter,
					caps,
					ipu_blitter->input_video_info.size,
					2, 0,
					NULL,
					NULL
				);

				gst_caps_unref(caps);

				if (ipu_blitter->internal_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(ipu_blitter, "failed to create internal bufferpool");
					return FALSE;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(ipu_blitter->internal_bufferpool))
				gst_buffer_pool_set_active(ipu_blitter->internal_bufferpool, TRUE);
		}

		/* Create the internal input buffer */
		flow_ret = gst_buffer_pool_acquire_buffer(ipu_blitter->internal_bufferpool, &temp_input_buffer, NULL);
		if (flow_ret != GST_FLOW_OK)
		{
			GST_ERROR_OBJECT(ipu_blitter, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
			return FALSE;
		}

		{
			GstVideoFrame input_frame, temp_input_frame;

			gst_video_frame_map(&input_frame, &(ipu_blitter->input_video_info), input_buffer, GST_MAP_READ);
			gst_video_frame_map(&temp_input_frame, &(ipu_blitter->input_video_info), temp_input_buffer, GST_MAP_WRITE);

			/* Copy the input buffer's pixels to the temp input frame
			 * The gst_video_frame_copy() makes sure stride and plane offset values from both
			 * frames are respected */
			gst_video_frame_copy(&temp_input_frame, &input_frame);

			gst_video_frame_unmap(&temp_input_frame);
			gst_video_frame_unmap(&input_frame);
		}

		/* Finally, set the temp input buffer as the actual input buffer */
		gst_imx_ipu_blitter_set_actual_input_buffer(ipu_blitter, temp_input_buffer);
	}

	/* Configure interlacing */
	ipu_blitter->priv->task.input.deinterlace.enable = 0;
	if (ipu_blitter->deinterlace_mode != GST_IMX_IPU_BLITTER_DEINTERLACE_NONE)
	{
		switch (ipu_blitter->input_video_info.interlace_mode)
		{
			case GST_VIDEO_INTERLACE_MODE_INTERLEAVED:
				GST_TRACE_OBJECT(ipu_blitter, "input stream uses interlacing -> deinterlacing enabled");
				ipu_blitter->priv->task.input.deinterlace.enable = 1;
				break;
			case GST_VIDEO_INTERLACE_MODE_MIXED:
			{
				GstVideoMeta *video_meta;

				GST_TRACE_OBJECT(ipu_blitter, "input stream uses mixed interlacing -> need to check video metadata deinterlacing flag");

				video_meta = gst_buffer_get_video_meta(input_buffer);
				if (video_meta != NULL)
				{
					if (video_meta->flags & GST_VIDEO_FRAME_FLAG_INTERLACED)
					{
						GST_TRACE_OBJECT(ipu_blitter, "frame has video metadata and deinterlacing flag");
						ipu_blitter->priv->task.input.deinterlace.enable = 1;
					}
					else
						GST_TRACE_OBJECT(ipu_blitter, "frame has video metadata but no deinterlacing flag");
				}
				else
					GST_TRACE_OBJECT(ipu_blitter, "frame has no video metadata -> no deinterlacing done");

				break;
			}
			case GST_VIDEO_INTERLACE_MODE_PROGRESSIVE:
			{
				GST_TRACE_OBJECT(ipu_blitter, "input stream is progressive -> no deinterlacing necessary");
				break;
			}
			case GST_VIDEO_INTERLACE_MODE_FIELDS:
				GST_FIXME_OBJECT(ipu_blitter, "2-fields deinterlacing not supported (yet)");
				break;
			default:
				break;
		}
	}

	return TRUE;
}
示例#21
0
static gboolean gst_imx_ipu_blitter_set_input_frame(GstImxBlitter *blitter, GstBuffer *input_frame)
{
	GstImxIpuBlitter *ipu_blitter = GST_IMX_IPU_BLITTER(blitter);
	gst_buffer_replace(&(ipu_blitter->input_frame), input_frame);

	if (ipu_blitter->input_frame != NULL)
	{
		ipu_blitter->priv->main_task.input.deinterlace.enable = 0;

		if (ipu_blitter->deinterlacing_enabled)
		{
			switch (GST_VIDEO_INFO_INTERLACE_MODE(&(ipu_blitter->input_video_info)))
			{
				case GST_VIDEO_INTERLACE_MODE_INTERLEAVED:
					GST_LOG_OBJECT(ipu_blitter, "input stream uses interlacing -> deinterlacing enabled");
					ipu_blitter->priv->main_task.input.deinterlace.enable = 1;
					break;

				case GST_VIDEO_INTERLACE_MODE_MIXED:
				{
					if (GST_BUFFER_FLAG_IS_SET(input_frame, GST_VIDEO_BUFFER_FLAG_INTERLACED))
					{
						GST_LOG_OBJECT(ipu_blitter, "frame has deinterlacing flag");
						ipu_blitter->priv->main_task.input.deinterlace.enable = 1;
					}
					else
						GST_LOG_OBJECT(ipu_blitter, "frame has no deinterlacing flag");

					break;
				}

				case GST_VIDEO_INTERLACE_MODE_PROGRESSIVE:
					GST_LOG_OBJECT(ipu_blitter, "input stream is progressive -> no deinterlacing necessary");
					break;

				case GST_VIDEO_INTERLACE_MODE_FIELDS:
					GST_FIXME_OBJECT(ipu_blitter, "2-fields deinterlacing not supported (yet)");
					break;

				default:
					GST_LOG_OBJECT(ipu_blitter, "input stream uses unknown interlacing mode -> no deinterlacing performed");
			}
		}

		if (ipu_blitter->priv->main_task.input.deinterlace.enable)
		{
			if (GST_BUFFER_FLAG_IS_SET(input_frame, GST_VIDEO_BUFFER_FLAG_TFF))
			{
				GST_LOG_OBJECT(ipu_blitter, "interlaced with top field first");
				ipu_blitter->priv->main_task.input.deinterlace.field_fmt = IPU_DEINTERLACE_FIELD_TOP;
			}
			else
			{
				GST_LOG_OBJECT(ipu_blitter, "interlaced with bottom field first");
				ipu_blitter->priv->main_task.input.deinterlace.field_fmt = IPU_DEINTERLACE_FIELD_BOTTOM;
			}

			ipu_blitter->priv->main_task.input.deinterlace.motion = HIGH_MOTION;
		}
		else
			ipu_blitter->priv->main_task.input.deinterlace.motion = MED_MOTION;

		gst_imx_ipu_blitter_set_task_params(ipu_blitter, input_frame, &(ipu_blitter->priv->main_task), &(ipu_blitter->input_video_info), TRUE);

		if (ipu_blitter->use_entire_input_frame)
		{
			ipu_blitter->priv->main_task.input.crop.pos.x = 0;
			ipu_blitter->priv->main_task.input.crop.pos.y = 0;
			ipu_blitter->priv->main_task.input.crop.w     = GST_VIDEO_INFO_WIDTH(&(ipu_blitter->input_video_info));
			ipu_blitter->priv->main_task.input.crop.h     = GST_VIDEO_INFO_HEIGHT(&(ipu_blitter->input_video_info));
		}
	}

	return TRUE;
}
示例#22
0
static GstFlowReturn
gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer)
{
  GstFlowReturn res;
  GstBuffer *outbuf;
  GstV4l2Object *obj = pool->obj;
  GstClockTime timestamp;
  GstV4l2MemoryGroup *group;
  gint i;

  if ((res = gst_v4l2_buffer_pool_poll (pool)) != GST_FLOW_OK)
    goto poll_failed;

  GST_LOG_OBJECT (pool, "dequeueing a buffer");

  group = gst_v4l2_allocator_dqbuf (pool->vallocator);
  if (group == NULL)
    goto dqbuf_failed;

  /* get our GstBuffer with that index from the pool, if the buffer was
   * outstanding we have a serious problem.
   */
  outbuf = pool->buffers[group->buffer.index];
  if (outbuf == NULL)
    goto no_buffer;

  /* mark the buffer outstanding */
  pool->buffers[group->buffer.index] = NULL;
  if (g_atomic_int_dec_and_test (&pool->num_queued)) {
    GST_OBJECT_LOCK (pool);
    pool->empty = TRUE;
    GST_OBJECT_UNLOCK (pool);
  }

  timestamp = GST_TIMEVAL_TO_TIME (group->buffer.timestamp);

#ifndef GST_DISABLE_GST_DEBUG
  for (i = 0; i < group->n_mem; i++) {
    GST_LOG_OBJECT (pool,
        "dequeued buffer %p seq:%d (ix=%d), mem %p used %d, plane=%d, flags %08x, ts %"
        GST_TIME_FORMAT ", pool-queued=%d, buffer=%p", outbuf,
        group->buffer.sequence, group->buffer.index, group->mem[i],
        group->planes[i].bytesused, i, group->buffer.flags,
        GST_TIME_ARGS (timestamp), pool->num_queued, outbuf);
  }
#endif

  /* set top/bottom field first if v4l2_buffer has the information */
  switch (group->buffer.field) {
    case V4L2_FIELD_NONE:
      GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
      GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
      break;
    case V4L2_FIELD_INTERLACED_TB:
      GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
      GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
      break;
    case V4L2_FIELD_INTERLACED_BT:
      GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
      GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
      break;
    default:
      GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
      GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
      GST_FIXME_OBJECT (pool,
          "Unhandled enum v4l2_field %d - treating as progressive",
          group->buffer.field);
  }

  if (GST_VIDEO_INFO_FORMAT (&obj->info) == GST_VIDEO_FORMAT_ENCODED) {
    if (group->buffer.flags & V4L2_BUF_FLAG_KEYFRAME)
      GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
    else
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
  }

  GST_BUFFER_TIMESTAMP (outbuf) = timestamp;

  *buffer = outbuf;

  return GST_FLOW_OK;

  /* ERRORS */
poll_failed:
  {
    GST_DEBUG_OBJECT (pool, "poll error %s", gst_flow_get_name (res));
    return res;
  }
dqbuf_failed:
  {
    return GST_FLOW_ERROR;
  }
no_buffer:
  {
    GST_ERROR_OBJECT (pool, "No free buffer found in the pool at index %d.",
        group->buffer.index);
    return GST_FLOW_ERROR;
  }
}
示例#23
0
static gboolean gst_imx_ipu_blitter_set_input_frame(GstImxBaseBlitter *base_blitter, GstBuffer *input_frame)
{
	GstImxIpuBlitter *ipu_blitter = GST_IMX_IPU_BLITTER(base_blitter);

	GST_IMX_FILL_IPU_TASK(ipu_blitter, input_frame, ipu_blitter->priv->task.input);

	ipu_blitter->current_frame = input_frame;
	ipu_blitter->priv->task.input.deinterlace.enable = 0;

	if (ipu_blitter->deinterlace_mode != GST_IMX_IPU_BLITTER_DEINTERLACE_NONE)
	{
		switch (ipu_blitter->input_video_info.interlace_mode)
		{
			case GST_VIDEO_INTERLACE_MODE_INTERLEAVED:
				GST_LOG_OBJECT(ipu_blitter, "input stream uses interlacing -> deinterlacing enabled");
				ipu_blitter->priv->task.input.deinterlace.enable = 1;
				break;

			case GST_VIDEO_INTERLACE_MODE_MIXED:
			{
				if (GST_BUFFER_FLAG_IS_SET(input_frame, GST_VIDEO_BUFFER_FLAG_INTERLACED))
				{
					GST_LOG_OBJECT(ipu_blitter, "frame has deinterlacing flag");
					ipu_blitter->priv->task.input.deinterlace.enable = 1;
				}
				else
					GST_LOG_OBJECT(ipu_blitter, "frame has no deinterlacing flag");

				break;
			}

			case GST_VIDEO_INTERLACE_MODE_PROGRESSIVE:
				GST_LOG_OBJECT(ipu_blitter, "input stream is progressive -> no deinterlacing necessary");
				break;

			case GST_VIDEO_INTERLACE_MODE_FIELDS:
				GST_FIXME_OBJECT(ipu_blitter, "2-fields deinterlacing not supported (yet)");
				break;

			default:
				GST_LOG_OBJECT(ipu_blitter, "input stream uses unknown interlacing mode -> no deinterlacing performed");
				break;
		}
	}

	ipu_blitter->priv->task.input.paddr_n = 0;

	if (ipu_blitter->priv->task.input.deinterlace.enable)
	{
		if (GST_BUFFER_FLAG_IS_SET(input_frame, GST_VIDEO_BUFFER_FLAG_TFF))
		{
			GST_LOG_OBJECT(ipu_blitter, "interlaced with top field first");
			ipu_blitter->priv->task.input.deinterlace.field_fmt = IPU_DEINTERLACE_FIELD_TOP;
		}
		else
		{
			GST_LOG_OBJECT(ipu_blitter, "interlaced with bottom field first");
			ipu_blitter->priv->task.input.deinterlace.field_fmt = IPU_DEINTERLACE_FIELD_BOTTOM;
		}
//		ipu_blitter->priv->task.input.deinterlace.field_fmt |= IPU_DEINTERLACE_RATE_MASK;
	}
	else
	{
		ipu_blitter->priv->task.input.deinterlace.motion = MED_MOTION;
	}

	return TRUE;
}