/* Called after videoaggregator fixates our caps */
static gboolean
_negotiated_caps (GstAggregator * agg, GstCaps * caps)
{
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
  GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
  GstCaps *in_caps;

  GST_LOG_OBJECT (mix, "Configured output caps %" GST_PTR_FORMAT, caps);

  if (GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps)
    if (!GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps (agg, caps))
      return FALSE;

  /* Update the glview_convert output */

  /* We can configure the view_converter now */
  gst_gl_view_convert_set_context (mix->viewconvert,
      GST_GL_BASE_MIXER (mix)->context);

  in_caps = gst_video_info_to_caps (&mix->mix_info);
  gst_caps_set_features (in_caps, 0,
      gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));
  gst_caps_set_simple (in_caps, "texture-target", G_TYPE_STRING,
      GST_GL_TEXTURE_TARGET_2D_STR, NULL);

  gst_gl_view_convert_set_caps (mix->viewconvert, in_caps, caps);

  return TRUE;
}
static gboolean
gst_gl_video_mixer_process_textures (GstGLMixer * mix, GPtrArray * frames,
    guint out_tex)
{
  GstGLVideoMixer *video_mixer = GST_GL_VIDEO_MIXER (mix);

  video_mixer->input_frames = frames;

  gst_gl_context_use_fbo_v2 (GST_GL_BASE_MIXER (mix)->context,
      GST_VIDEO_INFO_WIDTH (&GST_VIDEO_AGGREGATOR (mix)->info),
      GST_VIDEO_INFO_HEIGHT (&GST_VIDEO_AGGREGATOR (mix)->info),
      mix->fbo, mix->depthbuffer,
      out_tex, gst_gl_video_mixer_callback, (gpointer) video_mixer);

  return TRUE;
}
static void
_mixer_pad_get_output_size (GstGLVideoMixer * mix,
    GstGLVideoMixerPad * mix_pad, gint * width, gint * height)
{
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mix);
  GstVideoAggregatorPad *vagg_pad = GST_VIDEO_AGGREGATOR_PAD (mix_pad);
  gint pad_width, pad_height;
  guint dar_n, dar_d;

  /* FIXME: Anything better we can do here? */
  if (!vagg_pad->info.finfo
      || vagg_pad->info.finfo->format == GST_VIDEO_FORMAT_UNKNOWN) {
    GST_DEBUG_OBJECT (mix_pad, "Have no caps yet");
    *width = 0;
    *height = 0;
    return;
  }

  pad_width =
      mix_pad->width <=
      0 ? GST_VIDEO_INFO_WIDTH (&vagg_pad->info) : mix_pad->width;
  pad_height =
      mix_pad->height <=
      0 ? GST_VIDEO_INFO_HEIGHT (&vagg_pad->info) : mix_pad->height;

  gst_video_calculate_display_ratio (&dar_n, &dar_d, pad_width, pad_height,
      GST_VIDEO_INFO_PAR_N (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_D (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_N (&vagg->info), GST_VIDEO_INFO_PAR_D (&vagg->info)
      );
  GST_LOG_OBJECT (mix_pad, "scaling %ux%u by %u/%u (%u/%u / %u/%u)", pad_width,
      pad_height, dar_n, dar_d, GST_VIDEO_INFO_PAR_N (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_D (&vagg_pad->info),
      GST_VIDEO_INFO_PAR_N (&vagg->info), GST_VIDEO_INFO_PAR_D (&vagg->info));

  if (pad_height % dar_n == 0) {
    pad_width = gst_util_uint64_scale_int (pad_height, dar_n, dar_d);
  } else if (pad_width % dar_d == 0) {
    pad_height = gst_util_uint64_scale_int (pad_width, dar_d, dar_n);
  } else {
    pad_width = gst_util_uint64_scale_int (pad_height, dar_n, dar_d);
  }

  if (width)
    *width = pad_width;
  if (height)
    *height = pad_height;
}
Example #4
0
static gboolean
gst_gl_mixer_query_caps (GstPad * pad, GstAggregator * agg, GstQuery * query)
{
  GstCaps *filter, *caps;
  GstStructure *s;
  gint n;

  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);

  gst_query_parse_caps (query, &filter);

  if (GST_VIDEO_INFO_FORMAT (&vagg->info) != GST_VIDEO_FORMAT_UNKNOWN) {
    caps = gst_video_info_to_caps (&vagg->info);
  } else {
    caps = gst_pad_get_pad_template_caps (agg->srcpad);
  }

  caps = gst_caps_make_writable (caps);

  n = gst_caps_get_size (caps) - 1;
  for (; n >= 0; n--) {
    s = gst_caps_get_structure (caps, n);
    gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
        "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
    if (GST_VIDEO_INFO_FPS_D (&vagg->info) != 0) {
      gst_structure_set (s,
          "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
    }
  }

  if (filter)
    caps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);

  gst_query_set_caps_result (query, caps);
  gst_caps_unref (caps);

  return TRUE;
}
/* opengl scene, params: input texture (not the output mixer->texture) */
static void
gst_gl_video_mixer_callback (gpointer stuff)
{
  GstGLVideoMixer *video_mixer = GST_GL_VIDEO_MIXER (stuff);
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (stuff);
  GstGLMixer *mixer = GST_GL_MIXER (video_mixer);
  GstGLFuncs *gl = GST_GL_BASE_MIXER (mixer)->context->gl_vtable;

  GLint attr_position_loc = 0;
  GLint attr_texture_loc = 0;
  guint out_width, out_height;

  guint count = 0;

  out_width = GST_VIDEO_INFO_WIDTH (&vagg->info);
  out_height = GST_VIDEO_INFO_HEIGHT (&vagg->info);

  gst_gl_context_clear_shader (GST_GL_BASE_MIXER (mixer)->context);
  gl->BindTexture (GL_TEXTURE_2D, 0);

  gl->Disable (GL_DEPTH_TEST);
  gl->Disable (GL_CULL_FACE);

  if (gl->GenVertexArrays) {
    if (!video_mixer->vao)
      gl->GenVertexArrays (1, &video_mixer->vao);
    gl->BindVertexArray (video_mixer->vao);
  }

  if (!_draw_background (video_mixer))
    return;

  gst_gl_shader_use (video_mixer->shader);

  attr_position_loc =
      gst_gl_shader_get_attribute_location (video_mixer->shader, "a_position");
  attr_texture_loc =
      gst_gl_shader_get_attribute_location (video_mixer->shader, "a_texCoord");

  gl->Enable (GL_BLEND);

  while (count < video_mixer->input_frames->len) {
    GstGLMixerFrameData *frame;
    GstGLVideoMixerPad *pad;
    GstVideoInfo *v_info;
    guint in_tex;
    guint in_width, in_height;

    /* *INDENT-OFF* */
    gfloat v_vertices[] = {
      -1.0,-1.0,-1.0f, 0.0f, 0.0f,
       1.0,-1.0,-1.0f, 1.0f, 0.0f,
       1.0, 1.0,-1.0f, 1.0f, 1.0f,
      -1.0, 1.0,-1.0f, 0.0f, 1.0f,
    };
    /* *INDENT-ON* */

    frame = g_ptr_array_index (video_mixer->input_frames, count);
    if (!frame) {
      GST_DEBUG ("skipping texture, null frame");
      count++;
      continue;
    }
    pad = (GstGLVideoMixerPad *) frame->pad;
    v_info = &GST_VIDEO_AGGREGATOR_PAD (pad)->info;
    in_width = GST_VIDEO_INFO_WIDTH (v_info);
    in_height = GST_VIDEO_INFO_HEIGHT (v_info);

    if (!frame->texture || in_width <= 0 || in_height <= 0
        || pad->alpha == 0.0f) {
      GST_DEBUG ("skipping texture:%u frame:%p width:%u height:%u alpha:%f",
          frame->texture, frame, in_width, in_height, pad->alpha);
      count++;
      continue;
    }

    in_tex = frame->texture;

    _init_vbo_indices (video_mixer);

    if (pad->geometry_change || !pad->vertex_buffer) {
      gint pad_width, pad_height;
      gfloat w, h;

      _mixer_pad_get_output_size (video_mixer, pad, &pad_width, &pad_height);

      w = ((gfloat) pad_width / (gfloat) out_width);
      h = ((gfloat) pad_height / (gfloat) out_height);

      /* top-left */
      v_vertices[0] = v_vertices[15] =
          2.0f * (gfloat) pad->xpos / (gfloat) out_width - 1.0f;
      /* bottom-left */
      v_vertices[1] = v_vertices[6] =
          2.0f * (gfloat) pad->ypos / (gfloat) out_height - 1.0f;
      /* top-right */
      v_vertices[5] = v_vertices[10] = v_vertices[0] + 2.0f * w;
      /* bottom-right */
      v_vertices[11] = v_vertices[16] = v_vertices[1] + 2.0f * h;
      GST_TRACE ("processing texture:%u dimensions:%ux%u, at %f,%f %fx%f with "
          "alpha:%f", in_tex, in_width, in_height, v_vertices[0], v_vertices[1],
          v_vertices[5], v_vertices[11], pad->alpha);

      if (!pad->vertex_buffer)
        gl->GenBuffers (1, &pad->vertex_buffer);

      gl->BindBuffer (GL_ARRAY_BUFFER, pad->vertex_buffer);

      gl->BufferData (GL_ARRAY_BUFFER, 4 * 5 * sizeof (GLfloat), v_vertices,
          GL_STATIC_DRAW);

      pad->geometry_change = FALSE;
    } else {
      gl->BindBuffer (GL_ARRAY_BUFFER, pad->vertex_buffer);
    }
    gl->BindBuffer (GL_ELEMENT_ARRAY_BUFFER, video_mixer->vbo_indices);

    gl->BlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
    gl->BlendEquation (GL_FUNC_ADD);

    gl->ActiveTexture (GL_TEXTURE0);
    gl->BindTexture (GL_TEXTURE_2D, in_tex);
    gst_gl_shader_set_uniform_1i (video_mixer->shader, "texture", 0);
    gst_gl_shader_set_uniform_1f (video_mixer->shader, "alpha", pad->alpha);

    gl->EnableVertexAttribArray (attr_position_loc);
    gl->EnableVertexAttribArray (attr_texture_loc);

    gl->VertexAttribPointer (attr_position_loc, 3, GL_FLOAT,
        GL_FALSE, 5 * sizeof (GLfloat), (void *) 0);

    gl->VertexAttribPointer (attr_texture_loc, 2, GL_FLOAT,
        GL_FALSE, 5 * sizeof (GLfloat), (void *) (3 * sizeof (GLfloat)));

    gl->DrawElements (GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, 0);

    ++count;
  }

  gl->DisableVertexAttribArray (attr_position_loc);
  gl->DisableVertexAttribArray (attr_texture_loc);

  if (gl->GenVertexArrays)
    gl->BindVertexArray (0);

  gl->BindBuffer (GL_ELEMENT_ARRAY_BUFFER, 0);
  gl->BindBuffer (GL_ARRAY_BUFFER, 0);
  gl->BindTexture (GL_TEXTURE_2D, 0);

  gl->Disable (GL_BLEND);

  gst_gl_context_clear_shader (GST_GL_BASE_MIXER (mixer)->context);
}
/* called with the object lock held */
static gboolean
gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer)
{
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mixer);
  GstBuffer *converted_buffer, *inbuf;
  GstVideoInfo *out_info = &vagg->info;
#ifndef G_DISABLE_ASSERT
  gint n;
#endif
  gint v, views;
  gint valid_views = 0;
  GList *walk;

  inbuf = gst_buffer_new ();
  walk = GST_ELEMENT (mixer)->sinkpads;
  while (walk) {
    GstGLStereoMixPad *pad = walk->data;
    GstMemory *in_mem;

    GST_LOG_OBJECT (mixer, "Handling frame %d", valid_views);

    if (!pad || !pad->current_buffer) {
      GST_DEBUG ("skipping texture, null frame");
      walk = g_list_next (walk);
      continue;
    }

    in_mem = gst_buffer_get_memory (pad->current_buffer, 0);

    GST_LOG_OBJECT (mixer,
        "Appending memory %" GST_PTR_FORMAT " to intermediate buffer", in_mem);
    /* Appending the memory to a 2nd buffer locks it
     * exclusive a 2nd time, which will mark it for
     * copy-on-write. The ref will keep the memory
     * alive but we add a parent_buffer_meta to also
     * prevent the input buffer from returning to any buffer
     * pool it might belong to
     */
    gst_buffer_append_memory (inbuf, in_mem);
    /* Use parent buffer meta to keep input buffer alive */
    gst_buffer_add_parent_buffer_meta (inbuf, pad->current_buffer);

    valid_views++;
    walk = g_list_next (walk);
  }

  if (mixer->mix_info.views != valid_views) {
    GST_WARNING_OBJECT (mixer, "Not enough input views to process");
    return FALSE;
  }

  if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==
      GST_VIDEO_MULTIVIEW_MODE_SEPARATED)
    views = out_info->views;
  else
    views = 1;

  if (gst_gl_view_convert_submit_input_buffer (mixer->viewconvert,
          FALSE, inbuf) != GST_FLOW_OK)
    return FALSE;

  /* Clear any existing buffers, just in case */
  gst_buffer_replace (&mixer->primary_out, NULL);
  gst_buffer_replace (&mixer->auxilliary_out, NULL);

  if (gst_gl_view_convert_get_output (mixer->viewconvert,
          &mixer->primary_out) != GST_FLOW_OK)
    return FALSE;

  if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==
      GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {
    if (gst_gl_view_convert_get_output (mixer->viewconvert,
            &mixer->auxilliary_out) != GST_FLOW_OK)
      return FALSE;
  }

  if (mixer->primary_out == NULL)
    return FALSE;

  converted_buffer = mixer->primary_out;

#ifndef G_DISABLE_ASSERT
  n = gst_buffer_n_memory (converted_buffer);
  g_assert (n == GST_VIDEO_INFO_N_PLANES (out_info) * views);
#endif

  for (v = 0; v < views; v++) {
    gst_buffer_add_video_meta_full (converted_buffer, v,
        GST_VIDEO_INFO_FORMAT (out_info),
        GST_VIDEO_INFO_WIDTH (out_info),
        GST_VIDEO_INFO_HEIGHT (out_info),
        GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset, out_info->stride);
    if (mixer->auxilliary_out) {
      gst_buffer_add_video_meta_full (mixer->auxilliary_out, v,
          GST_VIDEO_INFO_FORMAT (out_info),
          GST_VIDEO_INFO_WIDTH (out_info),
          GST_VIDEO_INFO_HEIGHT (out_info),
          GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset,
          out_info->stride);
    }
  }

  return TRUE;
}
Example #7
0
gboolean
gst_gl_mixer_process_textures (GstGLMixer * mix, GstBuffer * outbuf)
{
  guint i;
  GList *walk;
  guint out_tex;
  gboolean res = TRUE;
  guint array_index = 0;
  GstVideoFrame out_frame;
  gboolean out_gl_wrapped = FALSE;
  GstElement *element = GST_ELEMENT (mix);
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mix);
  GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (mix);
  GstGLMixerPrivate *priv = mix->priv;

  GST_TRACE ("Processing buffers");

  if (!gst_video_frame_map (&out_frame, &vagg->info, outbuf,
          GST_MAP_WRITE | GST_MAP_GL)) {
    return FALSE;
  }

  if (gst_is_gl_memory (out_frame.map[0].memory)) {
    out_tex = *(guint *) out_frame.data[0];
  } else {
    GST_INFO ("Output Buffer does not contain correct memory, "
        "attempting to wrap for download");

    out_tex = mix->out_tex_id;;

    if (!mix->download)
      mix->download = gst_gl_download_new (mix->context);

    gst_gl_download_set_format (mix->download, &out_frame.info);
    out_gl_wrapped = TRUE;
  }

  GST_OBJECT_LOCK (mix);
  walk = element->sinkpads;

  i = mix->frames->len;
  g_ptr_array_set_size (mix->frames, element->numsinkpads);
  for (; i < element->numsinkpads; i++)
    mix->frames->pdata[i] = g_slice_new0 (GstGLMixerFrameData);
  while (walk) {
    GstGLMixerPad *pad = GST_GL_MIXER_PAD (walk->data);
    GstVideoAggregatorPad *vaggpad = walk->data;
    GstGLMixerFrameData *frame;

    frame = g_ptr_array_index (mix->frames, array_index);
    frame->pad = pad;
    frame->texture = 0;

    walk = g_list_next (walk);

    if (vaggpad->buffer != NULL) {
      guint in_tex;

      if (!pad->upload) {
        pad->upload = gst_gl_upload_new (mix->context);

        gst_gl_upload_set_format (pad->upload, &vaggpad->info);
      }

      if (!gst_gl_upload_perform_with_buffer (pad->upload,
              vaggpad->buffer, &in_tex)) {
        ++array_index;
        pad->mapped = FALSE;
        continue;
      }
      pad->mapped = TRUE;

      frame->texture = in_tex;
    }
    ++array_index;
  }

  g_mutex_lock (&priv->gl_resource_lock);
  if (!priv->gl_resource_ready)
    g_cond_wait (&priv->gl_resource_cond, &priv->gl_resource_lock);

  if (!priv->gl_resource_ready) {
    g_mutex_unlock (&priv->gl_resource_lock);
    GST_ERROR_OBJECT (mix,
        "fbo used to render can't be created, do not run process_textures");
    res = FALSE;
    goto out;
  }

  mix_class->process_textures (mix, mix->frames, out_tex);

  g_mutex_unlock (&priv->gl_resource_lock);

  if (out_gl_wrapped) {
    if (!gst_gl_download_perform_with_data (mix->download, out_tex,
            out_frame.data)) {
      GST_ELEMENT_ERROR (mix, RESOURCE, NOT_FOUND, ("%s",
              "Failed to download video frame"), (NULL));
      res = FALSE;
      goto out;
    }
  }

out:
  i = 0;
  walk = GST_ELEMENT (mix)->sinkpads;
  while (walk) {
    GstGLMixerPad *pad = GST_GL_MIXER_PAD (walk->data);

    if (pad->mapped)
      gst_gl_upload_release_buffer (pad->upload);

    pad->mapped = FALSE;
    walk = g_list_next (walk);
    i++;
  }
  GST_OBJECT_UNLOCK (mix);

  gst_video_frame_unmap (&out_frame);

  return res;
}
Example #8
0
static gboolean
gst_gl_mixer_decide_allocation (GstGLMixer * mix, GstQuery * query)
{
  GstGLMixerClass *mixer_class = GST_GL_MIXER_GET_CLASS (mix);
  GstBufferPool *pool = NULL;
  GstStructure *config;
  GstCaps *caps;
  guint min, max, size;
  gboolean update_pool;
  GError *error = NULL;
  guint idx;
  guint out_width, out_height;
  GstGLContext *other_context = NULL;
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mix);

  gst_query_parse_allocation (query, &caps, NULL);

  if (gst_query_get_n_allocation_pools (query) > 0) {
    gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

    update_pool = TRUE;
  } else {
    GstVideoInfo vinfo;

    gst_video_info_init (&vinfo);
    gst_video_info_from_caps (&vinfo, caps);
    size = vinfo.size;
    min = max = 0;
    update_pool = FALSE;
  }

  if (!gst_gl_ensure_display (mix, &mix->display))
    return FALSE;

  if (gst_query_find_allocation_meta (query,
          GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx)) {
    GstGLContext *context;
    const GstStructure *upload_meta_params;
    gpointer handle;
    gchar *type;
    gchar *apis;

    gst_query_parse_nth_allocation_meta (query, idx, &upload_meta_params);
    if (upload_meta_params) {
      if (gst_structure_get (upload_meta_params, "gst.gl.GstGLContext",
              GST_GL_TYPE_CONTEXT, &context, NULL) && context) {
        GstGLContext *old = mix->context;

        mix->context = context;
        if (old)
          gst_object_unref (old);
      } else if (gst_structure_get (upload_meta_params, "gst.gl.context.handle",
              G_TYPE_POINTER, &handle, "gst.gl.context.type", G_TYPE_STRING,
              &type, "gst.gl.context.apis", G_TYPE_STRING, &apis, NULL)
          && handle) {
        GstGLPlatform platform = GST_GL_PLATFORM_NONE;
        GstGLAPI gl_apis;

        GST_DEBUG ("got GL context handle 0x%p with type %s and apis %s",
            handle, type, apis);

        platform = gst_gl_platform_from_string (type);
        gl_apis = gst_gl_api_from_string (apis);

        if (gl_apis && platform)
          other_context =
              gst_gl_context_new_wrapped (mix->display, (guintptr) handle,
              platform, gl_apis);
      }
    }
  }

  if (!mix->context) {
    mix->context = gst_gl_context_new (mix->display);
    if (!gst_gl_context_create (mix->context, other_context, &error))
      goto context_error;
  }

  out_width = GST_VIDEO_INFO_WIDTH (&vagg->info);
  out_height = GST_VIDEO_INFO_HEIGHT (&vagg->info);

  g_mutex_lock (&mix->priv->gl_resource_lock);
  mix->priv->gl_resource_ready = FALSE;
  if (mix->fbo) {
    gst_gl_context_del_fbo (mix->context, mix->fbo, mix->depthbuffer);
    mix->fbo = 0;
    mix->depthbuffer = 0;
  }

  if (!gst_gl_context_gen_fbo (mix->context, out_width, out_height,
          &mix->fbo, &mix->depthbuffer)) {
    g_cond_signal (&mix->priv->gl_resource_cond);
    g_mutex_unlock (&mix->priv->gl_resource_lock);
    goto context_error;
  }

  if (mix->out_tex_id)
    gst_gl_context_del_texture (mix->context, &mix->out_tex_id);
  gst_gl_context_gen_texture (mix->context, &mix->out_tex_id,
      GST_VIDEO_FORMAT_RGBA, out_width, out_height);

  if (mixer_class->set_caps)
    mixer_class->set_caps (mix, caps);

  mix->priv->gl_resource_ready = TRUE;
  g_cond_signal (&mix->priv->gl_resource_cond);
  g_mutex_unlock (&mix->priv->gl_resource_lock);

  if (!pool)
    pool = gst_gl_buffer_pool_new (mix->context);

  config = gst_buffer_pool_get_config (pool);
  gst_buffer_pool_config_set_params (config, caps, size, min, max);

  gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);

  gst_buffer_pool_set_config (pool, config);

  if (update_pool)
    gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
  else
    gst_query_add_allocation_pool (query, pool, size, min, max);

  gst_object_unref (pool);

  return TRUE;

context_error:
  {
    GST_ELEMENT_ERROR (mix, RESOURCE, NOT_FOUND, ("%s", error->message),
        (NULL));
    return FALSE;
  }
}
Example #9
0
gboolean
gst_gl_mixer_process_textures (GstGLMixer * mix, GstBuffer * outbuf)
{
    guint i;
    GList *walk;
    guint out_tex, out_tex_target;
    gboolean res = TRUE;
    guint array_index = 0;
    GstVideoFrame out_frame;
    GstElement *element = GST_ELEMENT (mix);
    GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mix);
    GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (mix);
    GstGLMixerPrivate *priv = mix->priv;
    gboolean to_download =
        gst_caps_features_is_equal (GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY,
                                    gst_caps_get_features (mix->out_caps, 0));
    GstMapFlags out_map_flags = GST_MAP_WRITE;

    GST_TRACE ("Processing buffers");

    to_download |= !gst_is_gl_memory (gst_buffer_peek_memory (outbuf, 0));

    if (!to_download)
        out_map_flags |= GST_MAP_GL;

    if (!gst_video_frame_map (&out_frame, &vagg->info, outbuf, out_map_flags)) {
        return FALSE;
    }

    if (!to_download) {
        out_tex = *(guint *) out_frame.data[0];
        out_tex_target =
            ((GstGLMemory *) gst_buffer_peek_memory (outbuf, 0))->tex_target;
    } else {
        GST_INFO ("Output Buffer does not contain correct memory, "
                  "attempting to wrap for download");

        if (!mix->download)
            mix->download = gst_gl_download_new (mix->context);

        gst_gl_download_set_format (mix->download, &out_frame.info);
        out_tex = mix->out_tex_id;
        out_tex_target = GL_TEXTURE_2D;
    }

    GST_OBJECT_LOCK (mix);
    walk = element->sinkpads;

    i = mix->frames->len;
    g_ptr_array_set_size (mix->frames, element->numsinkpads);
    for (; i < element->numsinkpads; i++)
        mix->frames->pdata[i] = g_slice_new0 (GstGLMixerFrameData);
    while (walk) {
        GstGLMixerPad *pad = GST_GL_MIXER_PAD (walk->data);
        GstGLMixerPadClass *pad_class = GST_GL_MIXER_PAD_GET_CLASS (pad);
        GstVideoAggregatorPad *vaggpad = walk->data;
        GstGLMixerFrameData *frame;

        frame = g_ptr_array_index (mix->frames, array_index);
        frame->pad = pad;
        frame->texture = 0;

        walk = g_list_next (walk);

        if (vaggpad->buffer != NULL) {
            g_assert (pad_class->upload_buffer);

            if (pad->gl_buffer)
                gst_buffer_unref (pad->gl_buffer);
            pad->gl_buffer = pad_class->upload_buffer (mix, frame, vaggpad->buffer);

            GST_DEBUG_OBJECT (pad,
                              "uploaded buffer %" GST_PTR_FORMAT " from buffer %" GST_PTR_FORMAT,
                              pad->gl_buffer, vaggpad->buffer);
        }

        ++array_index;
    }

    g_mutex_lock (&priv->gl_resource_lock);
    if (!priv->gl_resource_ready)
        g_cond_wait (&priv->gl_resource_cond, &priv->gl_resource_lock);

    if (!priv->gl_resource_ready) {
        g_mutex_unlock (&priv->gl_resource_lock);
        GST_ERROR_OBJECT (mix,
                          "fbo used to render can't be created, do not run process_textures");
        res = FALSE;
        goto out;
    }

    mix_class->process_textures (mix, mix->frames, out_tex);

    g_mutex_unlock (&priv->gl_resource_lock);

    if (to_download) {
        if (!gst_gl_download_perform_with_data (mix->download,
                                                out_tex, out_tex_target, out_frame.data)) {
            GST_ELEMENT_ERROR (mix, RESOURCE, NOT_FOUND, ("%s",
                               "Failed to download video frame"), (NULL));
            res = FALSE;
            goto out;
        }
    }

out:
    i = 0;
    walk = GST_ELEMENT (mix)->sinkpads;
    while (walk) {
        GstGLMixerPad *pad = GST_GL_MIXER_PAD (walk->data);

        if (pad->upload)
            gst_gl_upload_release_buffer (pad->upload);

        walk = g_list_next (walk);
        i++;
    }
    GST_OBJECT_UNLOCK (mix);

    gst_video_frame_unmap (&out_frame);

    return res;
}