コード例 #1
0
ファイル: mixer.c プロジェクト: Unr34ler/mesa
/**
 * Create a VdpVideoMixer.
 */
VdpStatus
vlVdpVideoMixerCreate(VdpDevice device,
                      uint32_t feature_count,
                      VdpVideoMixerFeature const *features,
                      uint32_t parameter_count,
                      VdpVideoMixerParameter const *parameters,
                      void const *const *parameter_values,
                      VdpVideoMixer *mixer)
{
   vlVdpVideoMixer *vmixer = NULL;
   VdpStatus ret;
   struct pipe_screen *screen;
   uint32_t max_2d_texture_level;
   unsigned max_size, i;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;
   screen = dev->vscreen->pscreen;

   vmixer = CALLOC(1, sizeof(vlVdpVideoMixer));
   if (!vmixer)
      return VDP_STATUS_RESOURCES;

   DeviceReference(&vmixer->device, dev);

   pipe_mutex_lock(dev->mutex);

   vl_compositor_init_state(&vmixer->cstate, dev->context);

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, true, &vmixer->csc);
   if (!debug_get_bool_option("G3DVL_NO_CSC", FALSE))
      vl_compositor_set_csc_matrix(&vmixer->cstate, (const vl_csc_matrix *)&vmixer->csc);

   *mixer = vlAddDataHTAB(vmixer);
   if (*mixer == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   ret = VDP_STATUS_INVALID_VIDEO_MIXER_FEATURE;
   for (i = 0; i < feature_count; ++i) {
      switch (features[i]) {
      /* they are valid, but we doesn't support them */
      case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L1:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L2:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L3:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L4:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L5:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L6:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L7:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L8:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L9:
      case VDP_VIDEO_MIXER_FEATURE_INVERSE_TELECINE:
      case VDP_VIDEO_MIXER_FEATURE_LUMA_KEY:
         break;

      case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL:
         vmixer->deint.supported = true;
         break;

      case VDP_VIDEO_MIXER_FEATURE_SHARPNESS:
         vmixer->sharpness.supported = true;
         break;

      case VDP_VIDEO_MIXER_FEATURE_NOISE_REDUCTION:
         vmixer->noise_reduction.supported = true;
         break;

      default: goto no_params;
      }
   }

   vmixer->chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;
   ret = VDP_STATUS_INVALID_VIDEO_MIXER_PARAMETER;
   for (i = 0; i < parameter_count; ++i) {
      switch (parameters[i]) {
      case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH:
         vmixer->video_width = *(uint32_t*)parameter_values[i];
         break;
      case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT:
         vmixer->video_height = *(uint32_t*)parameter_values[i];
         break;
      case VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE:
         vmixer->chroma_format = ChromaToPipe(*(VdpChromaType*)parameter_values[i]);
         break;
      case VDP_VIDEO_MIXER_PARAMETER_LAYERS:
         vmixer->max_layers = *(uint32_t*)parameter_values[i];
         break;
      default: goto no_params;
      }
   }
   ret = VDP_STATUS_INVALID_VALUE;
   if (vmixer->max_layers > 4) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] Max layers > 4 not supported\n", vmixer->max_layers);
      goto no_params;
   }

   max_2d_texture_level = screen->get_param(screen, PIPE_CAP_MAX_TEXTURE_2D_LEVELS);
   max_size = pow(2, max_2d_texture_level-1);
   if (vmixer->video_width < 48 || vmixer->video_width > max_size) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u not valid for width\n",
                vmixer->video_width, max_size);
      goto no_params;
   }
   if (vmixer->video_height < 48 || vmixer->video_height > max_size) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u  not valid for height\n",
                vmixer->video_height, max_size);
      goto no_params;
   }
   vmixer->luma_key_min = 0.f;
   vmixer->luma_key_max = 1.f;
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;

no_params:
   vlRemoveDataHTAB(*mixer);

no_handle:
   vl_compositor_cleanup_state(&vmixer->cstate);
   pipe_mutex_unlock(dev->mutex);
   DeviceReference(&vmixer->device, NULL);
   FREE(vmixer);
   return ret;
}
コード例 #2
0
ファイル: surface.c プロジェクト: Unr34ler/mesa
/**
 * Create a VdpVideoSurface.
 */
VdpStatus
vlVdpVideoSurfaceCreate(VdpDevice device, VdpChromaType chroma_type,
                        uint32_t width, uint32_t height,
                        VdpVideoSurface *surface)
{
   struct pipe_context *pipe;
   vlVdpSurface *p_surf;
   VdpStatus ret;

   if (!(width && height)) {
      ret = VDP_STATUS_INVALID_SIZE;
      goto inv_size;
   }

   p_surf = CALLOC(1, sizeof(vlVdpSurface));
   if (!p_surf) {
      ret = VDP_STATUS_RESOURCES;
      goto no_res;
   }

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev) {
      ret = VDP_STATUS_INVALID_HANDLE;
      goto inv_device;
   }

   DeviceReference(&p_surf->device, dev);
   pipe = dev->context;

   pipe_mutex_lock(dev->mutex);
   memset(&p_surf->templat, 0, sizeof(p_surf->templat));
   p_surf->templat.buffer_format = pipe->screen->get_video_param
   (
      pipe->screen,
      PIPE_VIDEO_PROFILE_UNKNOWN,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_PREFERED_FORMAT
   );
   p_surf->templat.chroma_format = ChromaToPipe(chroma_type);
   p_surf->templat.width = width;
   p_surf->templat.height = height;
   p_surf->templat.interlaced = pipe->screen->get_video_param
   (
      pipe->screen,
      PIPE_VIDEO_PROFILE_UNKNOWN,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_PREFERS_INTERLACED
   );
   if (p_surf->templat.buffer_format != PIPE_FORMAT_NONE)
      p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);

   /* do not mandate early allocation of a video buffer */
   vlVdpVideoSurfaceClear(p_surf);
   pipe_mutex_unlock(dev->mutex);

   *surface = vlAddDataHTAB(p_surf);
   if (*surface == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   return VDP_STATUS_OK;

no_handle:
   p_surf->video_buffer->destroy(p_surf->video_buffer);

inv_device:
   DeviceReference(&p_surf->device, NULL);
   FREE(p_surf);

no_res:
inv_size:
   return ret;
}
コード例 #3
0
ファイル: bitmap.c プロジェクト: ChristophHaag/mesa-mesa
/**
 * Create a VdpBitmapSurface.
 */
VdpStatus
vlVdpBitmapSurfaceCreate(VdpDevice device,
                         VdpRGBAFormat rgba_format,
                         uint32_t width, uint32_t height,
                         VdpBool frequently_accessed,
                         VdpBitmapSurface *surface)
{
   struct pipe_context *pipe;
   struct pipe_resource res_tmpl, *res;
   struct pipe_sampler_view sv_templ;
   VdpStatus ret;

   vlVdpBitmapSurface *vlsurface = NULL;

   if (!(width && height))
      return VDP_STATUS_INVALID_SIZE;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   pipe = dev->context;
   if (!pipe)
      return VDP_STATUS_INVALID_HANDLE;

   if (!surface)
      return VDP_STATUS_INVALID_POINTER;

   vlsurface = CALLOC(1, sizeof(vlVdpBitmapSurface));
   if (!vlsurface)
      return VDP_STATUS_RESOURCES;

   DeviceReference(&vlsurface->device, dev);

   memset(&res_tmpl, 0, sizeof(res_tmpl));
   res_tmpl.target = PIPE_TEXTURE_2D;
   res_tmpl.format = VdpFormatRGBAToPipe(rgba_format);
   res_tmpl.width0 = width;
   res_tmpl.height0 = height;
   res_tmpl.depth0 = 1;
   res_tmpl.array_size = 1;
   res_tmpl.bind = PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_RENDER_TARGET;
   res_tmpl.usage = frequently_accessed ? PIPE_USAGE_DYNAMIC : PIPE_USAGE_DEFAULT;

   mtx_lock(&dev->mutex);

   if (!CheckSurfaceParams(pipe->screen, &res_tmpl)) {
      ret = VDP_STATUS_RESOURCES;
      goto err_unlock;
   }

   res = pipe->screen->resource_create(pipe->screen, &res_tmpl);
   if (!res) {
      ret = VDP_STATUS_RESOURCES;
      goto err_unlock;
   }

   vlVdpDefaultSamplerViewTemplate(&sv_templ, res);
   vlsurface->sampler_view = pipe->create_sampler_view(pipe, res, &sv_templ);

   pipe_resource_reference(&res, NULL);

   if (!vlsurface->sampler_view) {
      ret = VDP_STATUS_RESOURCES;
      goto err_unlock;
   }

   mtx_unlock(&dev->mutex);

   *surface = vlAddDataHTAB(vlsurface);
   if (*surface == 0) {
      mtx_lock(&dev->mutex);
      ret = VDP_STATUS_ERROR;
      goto err_sampler;
   }

   return VDP_STATUS_OK;

err_sampler:
   pipe_sampler_view_reference(&vlsurface->sampler_view, NULL);
err_unlock:
   mtx_unlock(&dev->mutex);
   DeviceReference(&vlsurface->device, NULL);
   FREE(vlsurface);
   return ret;
}
コード例 #4
0
/**
 * Create a VdpOutputSurface.
 */
VdpStatus
vlVdpOutputSurfaceCreate(VdpDevice device,
                         VdpRGBAFormat rgba_format,
                         uint32_t width, uint32_t height,
                         VdpOutputSurface  *surface)
{
   struct pipe_context *pipe;
   struct pipe_resource res_tmpl, *res;
   struct pipe_sampler_view sv_templ;
   struct pipe_surface surf_templ;

   vlVdpOutputSurface *vlsurface = NULL;

   if (!(width && height))
      return VDP_STATUS_INVALID_SIZE;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   pipe = dev->context;
   if (!pipe)
      return VDP_STATUS_INVALID_HANDLE;

   vlsurface = CALLOC(1, sizeof(vlVdpOutputSurface));
   if (!vlsurface)
      return VDP_STATUS_RESOURCES;

   vlsurface->device = dev;

   memset(&res_tmpl, 0, sizeof(res_tmpl));

   res_tmpl.target = PIPE_TEXTURE_2D;
   res_tmpl.format = FormatRGBAToPipe(rgba_format);
   res_tmpl.width0 = width;
   res_tmpl.height0 = height;
   res_tmpl.depth0 = 1;
   res_tmpl.array_size = 1;
   res_tmpl.bind = PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_RENDER_TARGET;
   res_tmpl.usage = PIPE_USAGE_STATIC;

   pipe_mutex_lock(dev->mutex);
   res = pipe->screen->resource_create(pipe->screen, &res_tmpl);
   if (!res) {
      pipe_mutex_unlock(dev->mutex);
      FREE(dev);
      FREE(vlsurface);
      return VDP_STATUS_ERROR;
   }

   vlVdpDefaultSamplerViewTemplate(&sv_templ, res);
   vlsurface->sampler_view = pipe->create_sampler_view(pipe, res, &sv_templ);
   if (!vlsurface->sampler_view) {
      pipe_resource_reference(&res, NULL);
      pipe_mutex_unlock(dev->mutex);
      FREE(dev);
      return VDP_STATUS_ERROR;
   }

   memset(&surf_templ, 0, sizeof(surf_templ));
   surf_templ.format = res->format;
   vlsurface->surface = pipe->create_surface(pipe, res, &surf_templ);
   if (!vlsurface->surface) {
      pipe_resource_reference(&res, NULL);
      pipe_mutex_unlock(dev->mutex);
      FREE(dev);
      return VDP_STATUS_ERROR;
   }

   *surface = vlAddDataHTAB(vlsurface);
   if (*surface == 0) {
      pipe_resource_reference(&res, NULL);
      pipe_mutex_unlock(dev->mutex);
      FREE(dev);
      return VDP_STATUS_ERROR;
   }
   
   pipe_resource_reference(&res, NULL);

   vl_compositor_init_state(&vlsurface->cstate, pipe);
   vl_compositor_reset_dirty_area(&vlsurface->dirty_area);
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;
}
コード例 #5
0
ファイル: decode.c プロジェクト: CSRedRat/mesa-1
/**
 * Create a VdpDecoder.
 */
VdpStatus
vlVdpDecoderCreate(VdpDevice device,
                   VdpDecoderProfile profile,
                   uint32_t width, uint32_t height,
                   uint32_t max_references,
                   VdpDecoder *decoder)
{
   enum pipe_video_profile p_profile;
   struct pipe_context *pipe;
   struct pipe_screen *screen;
   vlVdpDevice *dev;
   vlVdpDecoder *vldecoder;
   VdpStatus ret;
   bool supported;

   if (!decoder)
      return VDP_STATUS_INVALID_POINTER;
   *decoder = 0;

   if (!(width && height))
      return VDP_STATUS_INVALID_VALUE;

   p_profile = ProfileToPipe(profile);
   if (p_profile == PIPE_VIDEO_PROFILE_UNKNOWN)
      return VDP_STATUS_INVALID_DECODER_PROFILE;

   dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   pipe = dev->context;
   screen = dev->vscreen->pscreen;

   pipe_mutex_lock(dev->mutex);

   supported = screen->get_video_param
   (
      screen,
      p_profile,
      PIPE_VIDEO_CAP_SUPPORTED
   );
   if (!supported) {
      pipe_mutex_unlock(dev->mutex);
      return VDP_STATUS_INVALID_DECODER_PROFILE;
   }

   vldecoder = CALLOC(1,sizeof(vlVdpDecoder));
   if (!vldecoder) {
      pipe_mutex_unlock(dev->mutex);
      return VDP_STATUS_RESOURCES;
   }

   vldecoder->device = dev;

   vldecoder->decoder = pipe->create_video_decoder
   (
      pipe, p_profile,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CHROMA_FORMAT_420,
      width, height, max_references,
      false
   );

   if (!vldecoder->decoder) {
      ret = VDP_STATUS_ERROR;
      goto error_decoder;
   }

   *decoder = vlAddDataHTAB(vldecoder);
   if (*decoder == 0) {
      ret = VDP_STATUS_ERROR;
      goto error_handle;
   }
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;

error_handle:
   vldecoder->decoder->destroy(vldecoder->decoder);

error_decoder:
   pipe_mutex_unlock(dev->mutex);
   FREE(vldecoder);
   return ret;
}