Example #1
0
static int
droid_image_get_buffers(__DRIdrawable *driDrawable,
                  unsigned int format,
                  uint32_t *stamp,
                  void *loaderPrivate,
                  uint32_t buffer_mask,
                  struct __DRIimageList *images)
{
   struct dri2_egl_surface *dri2_surf = loaderPrivate;

   images->image_mask = 0;

   if (update_buffers(dri2_surf) < 0)
      return 0;

   if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
      /*
       * We don't support front buffers and GLES doesn't require them for
       * window surfaces, but some DRI drivers will request them anyway.
       * We just ignore such request as other platforms backends do.
       */
   }

   if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
      if (get_back_bo(dri2_surf) < 0)
         return 0;

      images->back = dri2_surf->dri_image;
      images->image_mask |= __DRI_IMAGE_BUFFER_BACK;
   }

   return 1;
}
Example #2
0
static __DRIbuffer *
droid_get_buffers_with_format(__DRIdrawable * driDrawable,
			     int *width, int *height,
			     unsigned int *attachments, int count,
			     int *out_count, void *loaderPrivate)
{
   struct dri2_egl_surface *dri2_surf = loaderPrivate;
   struct dri2_egl_display *dri2_dpy =
      dri2_egl_display(dri2_surf->base.Resource.Display);
   int i;

   if (update_buffers(dri2_surf) < 0)
      return NULL;

   dri2_surf->buffer_count =
      droid_get_buffers_parse_attachments(dri2_surf, attachments, count);

   if (width)
      *width = dri2_surf->base.Width;
   if (height)
      *height = dri2_surf->base.Height;

   *out_count = dri2_surf->buffer_count;

   return dri2_surf->buffers;
}
Example #3
0
int BC_Bitmap::delete_data()
{
//printf("BC_Bitmap::delete_data 1\n");
	if( last_pixmap_used && xv_portid >= 0 )
		XvStopVideo(top_level->display, xv_portid, last_pixmap);
	update_buffers(0);
	if( xv_portid >= 0 )
		XvUngrabPort(top_level->display, xv_portid, CurrentTime);
	last_pixmap_used = 0;
	active_bfr = 0;
	buffer_count = 0;
	max_buffer_count = 0;
	return 0;
}
Example #4
0
int Engine_register_sprite(Sprite *sprite, const char *filename)
{
  if (sprite_count >= MAX_SPRITES) {
    Log("ERROR: unable to register new sprite, max reached.");
    return 0;
  }

  int sprite_index = ++sprite_count;
  if (create_texture(filename, sprite_index) == -1) {
    Log("Unable to link sprite and texture");
    return 0;
  }

  // Give the current sprite a spot in the sprite points array.
  sprite->points = &sprite_points[sprite_index * SPRITE_SIZE];
  update_buffers(sprite_index);
  return 1;
}
Example #5
0
BC_BitmapImage *BC_Bitmap::cur_bfr()
{
	if( !active_bfr ) {
		avail_lock->lock("BC_Bitmap::cur_bfr 1");
		if( (!use_shm || top_level->is_running()) &&
		     (active_bfr=avail.first) != 0 )
			avail.remove_pointer(active_bfr);
		else {
			update_buffers(buffer_count+1, 0);
			if( (active_bfr=avail.first) != 0 )
				avail.remove_pointer(active_bfr);
			else
				active_bfr = new_buffer(type, -1);
		}
		avail_lock->unlock();
	}
	return active_bfr;
}
Example #6
0
int BC_Bitmap::allocate_data()
{
	int count = 1;
	max_buffer_count = MAX_BITMAP_BUFFERS;
	if(use_shm) { // Use shared memory.
		int bsz = best_buffer_size();
		if( bsz >= 0x800000 ) max_buffer_count = 2;
		else if( bsz >= 0x400000 ) max_buffer_count /= 8;
		else if( bsz >= 0x100000 ) max_buffer_count /= 4;
		else if( bsz >= 0x10000 ) max_buffer_count /= 2;
		type = hardware_scaling() ? bmXvShmImage : bmXShmImage;
		count = MIN_BITMAP_BUFFERS;
	}
	else // use unshared memory.
		type = hardware_scaling() ? bmXvImage : bmXImage;
	update_buffers(count);
	return 0;
}
Example #7
0
static int
droid_image_get_buffers(__DRIdrawable *driDrawable,
                  unsigned int format,
                  uint32_t *stamp,
                  void *loaderPrivate,
                  uint32_t buffer_mask,
                  struct __DRIimageList *images)
{
   struct dri2_egl_surface *dri2_surf = loaderPrivate;

   if (update_buffers(dri2_surf) < 0)
      return 0;

   if (get_back_bo(dri2_surf) < 0) {
      _eglError(EGL_BAD_PARAMETER, "get_back_bo");
      return 0;
   }

   images->image_mask = __DRI_IMAGE_BUFFER_BACK;
   images->back = dri2_surf->dri_image;

   return 1;
}
Example #8
0
void Video3DUberShader::draw(RenderContext const& ctx,
                             std::string const& ksfile_name,
                             std::string const& material_name,
                             scm::math::mat4 const& model_matrix,
                             scm::math::mat4 const& normal_matrix,
                             Frustum const& /*frustum*/,
                             View const& view) const
{  if (!GeometryDatabase::instance()->is_supported(ksfile_name) ||
      !MaterialDatabase::instance()->is_supported(material_name)) {
    gua::Logger::LOG_WARNING << "Video3DUberShader::draw(): No such video or material." << ksfile_name << ", " << material_name << std::endl;
    return;
  }

  auto video3d_ressource = std::static_pointer_cast<Video3DRessource>(GeometryDatabase::instance()->lookup(ksfile_name));
  auto material          = MaterialDatabase::instance()->lookup(material_name);

  if (!video3d_ressource || !material) {
    gua::Logger::LOG_WARNING << "Video3DUberShader::draw(): Invalid video or material." << std::endl;
    return;
  }

  // update stream data
  video3d_ressource->update_buffers(ctx);

  // make sure ressources are on the GPU
  upload_to(ctx);
  {
    // single texture only
    scm::gl::context_all_guard guard(ctx.render_context);

    ctx.render_context->set_rasterizer_state(no_bfc_rasterizer_state_);
    ctx.render_context->set_depth_stencil_state(depth_stencil_state_warp_pass_);

    // set uniforms
    ctx.render_context->bind_texture(video3d_ressource->depth_array(ctx), nearest_sampler_state_, 0);
    get_program(warp_pass)->get_program(ctx)->uniform_sampler("depth_video3d_texture", 0);


    get_program(warp_pass)->set_uniform(ctx, normal_matrix, "gua_normal_matrix");
    get_program(warp_pass)->set_uniform(ctx, model_matrix, "gua_model_matrix");
    get_program(warp_pass)->set_uniform(ctx, int(1), "bbxclip");

    auto bbox(video3d_ressource->get_bounding_box());
    get_program(warp_pass)->set_uniform(ctx, bbox.min, "bbx_min");
    get_program(warp_pass)->set_uniform(ctx, bbox.max, "bbx_max");

    // pre passes
    for (unsigned layer = 0; layer != video3d_ressource->number_of_cameras(); ++layer)
    {
      // configure fbo
      warp_result_fbo_->clear_attachments();
      warp_result_fbo_->attach_depth_stencil_buffer(warp_depth_result_, 0, layer);
      warp_result_fbo_->attach_color_buffer(0, warp_color_result_, 0, layer);

      // bind and clear fbo
      ctx.render_context->set_frame_buffer(warp_result_fbo_);
      ctx.render_context->clear_depth_stencil_buffer(warp_result_fbo_);
      ctx.render_context->clear_color_buffer(warp_result_fbo_, 0, scm::math::vec4f(0.0f, 0.0f, 0.0f, 0.0f));
      ctx.render_context->set_viewport(scm::gl::viewport(scm::math::vec2ui(0,0), warp_color_result_->dimensions()));

      // set uniforms
      get_program(warp_pass)->set_uniform(ctx, video3d_ressource->calibration_file(layer).getTexSizeInvD(), "tex_size_inv");
      get_program(warp_pass)->set_uniform(ctx, int(layer), "layer");


      if (material && video3d_ressource)
      {
        get_program(warp_pass)->set_uniform(ctx, video3d_ressource->calibration_file(layer).getImageDToEyeD(), "image_d_to_eye_d");
        get_program(warp_pass)->set_uniform(ctx, video3d_ressource->calibration_file(layer).getEyeDToWorld(), "eye_d_to_world");
        get_program(warp_pass)->set_uniform(ctx, video3d_ressource->calibration_file(layer).getEyeDToEyeRGB(), "eye_d_to_eye_rgb");
        get_program(warp_pass)->set_uniform(ctx, video3d_ressource->calibration_file(layer).getEyeRGBToImageRGB(), "eye_rgb_to_image_rgb");

	      ctx.render_context->bind_texture(video3d_ressource->cv_xyz(ctx,layer), linear_sampler_state_, 1);
	      get_program(warp_pass)->get_program(ctx)->uniform_sampler("cv_xyz", 1);

	      ctx.render_context->bind_texture(video3d_ressource->cv_uv(ctx,layer), linear_sampler_state_, 2);
	      get_program(warp_pass)->get_program(ctx)->uniform_sampler("cv_uv", 2);

	      get_program(warp_pass)->set_uniform(ctx, video3d_ressource->calibration_file(layer).cv_min_d, "cv_min_d");
	      get_program(warp_pass)->set_uniform(ctx, video3d_ressource->calibration_file(layer).cv_max_d, "cv_max_d");

        get_program(warp_pass)->use(ctx);
        {
          video3d_ressource->draw(ctx);
        }
        get_program(warp_pass)->unuse(ctx);
      }

      ctx.render_context->reset_framebuffer();
    }
  }

  {
    // single texture only
    scm::gl::context_all_guard guard(ctx.render_context);

    ctx.render_context->set_depth_stencil_state(depth_stencil_state_warp_pass_);

    // second pass
    get_program(blend_pass)->use(ctx);
    {
      if (material && video3d_ressource)
      {
        set_uniform(ctx, material->get_id(), "gua_material_id");
        set_uniform(ctx, normal_matrix, "gua_normal_matrix");
        set_uniform(ctx, model_matrix, "gua_model_matrix");

        // needs to be multiplied with scene scaling
        set_uniform(ctx, 0.075f, "epsilon");
        set_uniform(ctx, int(video3d_ressource->number_of_cameras()), "numlayers");
        get_program(blend_pass)->set_uniform(ctx, int(material_name == default_video_material_name()), "using_default_video_material");
        get_program(blend_pass)->set_uniform(ctx, int(video3d_ressource->do_overwrite_normal()), "overwrite_normal");
        get_program(blend_pass)->set_uniform(ctx, video3d_ressource->get_overwrite_normal(), "o_normal");

        ctx.render_context->bind_texture(warp_color_result_, nearest_sampler_state_, 0);
        get_program(blend_pass)->get_program(ctx)->uniform_sampler("quality_texture", 0);

        ctx.render_context->bind_texture(warp_depth_result_, nearest_sampler_state_, 1);
        get_program(blend_pass)->get_program(ctx)->uniform_sampler("depth_texture", 1);

        ctx.render_context->bind_texture(video3d_ressource->color_array(ctx), linear_sampler_state_, 2);
        get_program(blend_pass)->get_program(ctx)->uniform_sampler("video_color_texture", 2);

        fullscreen_quad_->draw(ctx.render_context);
      }
    }
    get_program(blend_pass)->unuse(ctx);
  }
}
Example #9
0
void SPointsRenderer::render(Pipeline& pipe,
                             PipelinePassDescription const& desc) {



  ///////////////////////////////////////////////////////////////////////////
  //  retrieve current view state
  ///////////////////////////////////////////////////////////////////////////
  auto& scene = *pipe.current_viewstate().scene;
  auto const& camera = pipe.current_viewstate().camera;
  // auto const& frustum = pipe.current_viewstate().frustum;
  auto& target = *pipe.current_viewstate().target;

  auto const& ctx(pipe.get_context());


  if (!initialized_) {
    initialized_ = true;
    points_rasterizer_state_ = ctx.render_device
      ->create_rasterizer_state(scm::gl::FILL_SOLID,
                                scm::gl::CULL_NONE,
                                scm::gl::ORIENT_CCW,
                                false,
                                false,
                                0.0,
                                false,
                                false,
                                scm::gl::point_raster_state(true));
  }

  auto objects(scene.nodes.find(std::type_index(typeid(node::SPointsNode))));
  int view_id(camera.config.get_view_id());


  if (objects != scene.nodes.end() && objects->second.size() > 0) {

    float last_known_point_size = std::numeric_limits<float>::max();
    for (auto& o : objects->second) {

      auto spoints_node(reinterpret_cast<node::SPointsNode*>(o));
      auto spoints_desc(spoints_node->get_spoints_description());

      if (!GeometryDatabase::instance()->contains(spoints_desc)) {
        gua::Logger::LOG_WARNING << "SPointsRenderer::draw(): No such spoints."
                                 << spoints_desc << ", " << std::endl;
        continue;
      }

      auto spoints_resource = std::static_pointer_cast<SPointsResource>(
          GeometryDatabase::instance()->lookup(spoints_desc));
      if (!spoints_resource) {
        gua::Logger::LOG_WARNING << "SPointsRenderer::draw(): Invalid spoints."
                                 << std::endl;
        continue;
      }


      auto const& model_matrix(spoints_node->get_cached_world_transform());
      auto normal_matrix(scm::math::transpose(
          scm::math::inverse(spoints_node->get_cached_world_transform())));
      auto view_matrix(pipe.current_viewstate().frustum.get_view());


      scm::math::mat4f mv_matrix = scm::math::mat4f(view_matrix) * scm::math::mat4f(model_matrix);
      scm::math::mat4f projection_matrix = scm::math::mat4f(pipe.current_viewstate().frustum.get_projection());



      const float scaling = scm::math::length(
          (model_matrix * view_matrix) * scm::math::vec4d(1.0, 0.0, 0.0, 0.0));


      spoints::matrix_package current_package;
      memcpy((char*) &current_package, (char*) mv_matrix.data_array, 16 * sizeof(float) );
      memcpy( ((char*) &current_package) +  16 * sizeof(float), (char*) projection_matrix.data_array, 16 * sizeof(float) );
      
      scm::math::vec2ui const& render_target_dims = camera.config.get_resolution();



      current_package.res_xy[0] = render_target_dims.x;
      current_package.res_xy[1] = render_target_dims.y;


    auto camera_id = pipe.current_viewstate().viewpoint_uuid;
    //auto view_direction = pipe.current_viewstate().view_direction;
    //std::size_t gua_view_id = (camera_id << 8) | (std::size_t(view_direction));


    bool is_camera = (!pipe.current_viewstate().shadow_mode);

    bool stereo_mode = (pipe.current_viewstate().camera.config.get_enable_stereo());

    std::size_t view_uuid = camera_id;


    spoints::camera_matrix_package cm_package;
    cm_package.k_package.is_camera = is_camera;
    cm_package.k_package.view_uuid = view_uuid;
    cm_package.k_package.stereo_mode = stereo_mode;
    cm_package.k_package.framecount = pipe.get_context().framecount;
    cm_package.k_package.render_context_id = pipe.get_context().id;
    cm_package.mat_package = current_package;



    spoints_resource->push_matrix_package(cm_package);

    spoints_resource->update_buffers(pipe.get_context(), pipe);
    //auto const& spoints_data = spointsdata_[spoints_resource->uuid()];




      // get material dependent shader
      std::shared_ptr<ShaderProgram> current_shader;

      MaterialShader* current_material =
          spoints_node->get_material()->get_shader();
      if (current_material) {

        auto shader_iterator = programs_.find(current_material);
        if (shader_iterator != programs_.end()) {
          current_shader = shader_iterator->second;
        } else {
          auto smap = global_substitution_map_;
          for (const auto& i : current_material->generate_substitution_map())
            smap[i.first] = i.second;

          current_shader = std::make_shared<ShaderProgram>();
          current_shader->set_shaders(
              program_stages_, std::list<std::string>(), false, smap);
          programs_[current_material] = current_shader;
        }
      } else {
        Logger::LOG_WARNING << "SPointsPass::render(): Cannot find material: "
                            << spoints_node->get_material()->get_shader_name()
                            << std::endl;
      }




      current_shader->use(ctx);
    
      current_shader->set_uniform(
        ctx,
        scm::math::mat4f(model_matrix),
        "kinect_model_matrix");

      float const screen_space_point_size = spoints_node->get_screen_space_point_size();

      current_shader->set_uniform(
        ctx,
        screen_space_point_size,
        "point_size");

      bool write_depth = true;
      target.bind(ctx, write_depth);
      target.set_viewport(ctx);

      ctx.render_context->set_rasterizer_state(points_rasterizer_state_);
      ctx.render_context->apply();

      spoints_resource->draw(ctx);

      target.unbind(ctx);
    }

  
  }

}