예제 #1
0
파일: presentation.c 프로젝트: etnaviv/mesa
/**
 * Create a VdpPresentationQueue.
 */
VdpStatus
vlVdpPresentationQueueCreate(VdpDevice device,
                             VdpPresentationQueueTarget presentation_queue_target,
                             VdpPresentationQueue *presentation_queue)
{
   vlVdpPresentationQueue *pq = NULL;
   VdpStatus ret;

   if (!presentation_queue)
      return VDP_STATUS_INVALID_POINTER;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   vlVdpPresentationQueueTarget *pqt = vlGetDataHTAB(presentation_queue_target);
   if (!pqt)
      return VDP_STATUS_INVALID_HANDLE;

   if (dev != pqt->device)
      return VDP_STATUS_HANDLE_DEVICE_MISMATCH;

   pq = CALLOC(1, sizeof(vlVdpPresentationQueue));
   if (!pq)
      return VDP_STATUS_RESOURCES;

   DeviceReference(&pq->device, dev);
   pq->drawable = pqt->drawable;

   pipe_mutex_lock(dev->mutex);
   if (!vl_compositor_init_state(&pq->cstate, dev->context)) {
      pipe_mutex_unlock(dev->mutex);
      ret = VDP_STATUS_ERROR;
      goto no_compositor;
   }
   pipe_mutex_unlock(dev->mutex);

   *presentation_queue = vlAddDataHTAB(pq);
   if (*presentation_queue == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   return VDP_STATUS_OK;

no_handle:
no_compositor:
   DeviceReference(&pq->device, NULL);
   FREE(pq);
   return ret;
}
예제 #2
0
/**
 * Destroy a VdpBitmapSurface.
 */
VdpStatus
vlVdpBitmapSurfaceDestroy(VdpBitmapSurface surface)
{
   vlVdpBitmapSurface *vlsurface;

   vlsurface = vlGetDataHTAB(surface);
   if (!vlsurface)
      return VDP_STATUS_INVALID_HANDLE;

   mtx_lock(&vlsurface->device->mutex);
   pipe_sampler_view_reference(&vlsurface->sampler_view, NULL);
   mtx_unlock(&vlsurface->device->mutex);

   vlRemoveDataHTAB(surface);
   DeviceReference(&vlsurface->device, NULL);
   FREE(vlsurface);

   return VDP_STATUS_OK;
}
예제 #3
0
파일: presentation.c 프로젝트: etnaviv/mesa
/**
 * Destroy a VdpPresentationQueue.
 */
VdpStatus
vlVdpPresentationQueueDestroy(VdpPresentationQueue presentation_queue)
{
   vlVdpPresentationQueue *pq;

   pq = vlGetDataHTAB(presentation_queue);
   if (!pq)
      return VDP_STATUS_INVALID_HANDLE;

   pipe_mutex_lock(pq->device->mutex);
   vl_compositor_cleanup_state(&pq->cstate);
   pipe_mutex_unlock(pq->device->mutex);

   vlRemoveDataHTAB(presentation_queue);
   DeviceReference(&pq->device, NULL);
   FREE(pq);

   return VDP_STATUS_OK;
}
예제 #4
0
/**
 * Destroy a VdpDecoder.
 */
VdpStatus
vlVdpDecoderDestroy(VdpDecoder decoder)
{
   vlVdpDecoder *vldecoder;

   vldecoder = (vlVdpDecoder *)vlGetDataHTAB(decoder);
   if (!vldecoder)
      return VDP_STATUS_INVALID_HANDLE;

   pipe_mutex_lock(vldecoder->mutex);
   vldecoder->decoder->destroy(vldecoder->decoder);
   pipe_mutex_unlock(vldecoder->mutex);
   pipe_mutex_destroy(vldecoder->mutex);

   vlRemoveDataHTAB(decoder);
   DeviceReference(&vldecoder->device, NULL);
   FREE(vldecoder);

   return VDP_STATUS_OK;
}
예제 #5
0
파일: surface.c 프로젝트: Unr34ler/mesa
/**
 * Destroy a VdpVideoSurface.
 */
VdpStatus
vlVdpVideoSurfaceDestroy(VdpVideoSurface surface)
{
   vlVdpSurface *p_surf;

   p_surf = (vlVdpSurface *)vlGetDataHTAB((vlHandle)surface);
   if (!p_surf)
      return VDP_STATUS_INVALID_HANDLE;

   pipe_mutex_lock(p_surf->device->mutex);
   if (p_surf->video_buffer)
      p_surf->video_buffer->destroy(p_surf->video_buffer);
   pipe_mutex_unlock(p_surf->device->mutex);

   vlRemoveDataHTAB(surface);
   DeviceReference(&p_surf->device, NULL);
   FREE(p_surf);

   return VDP_STATUS_OK;
}
예제 #6
0
파일: mixer.c 프로젝트: Unr34ler/mesa
/**
 * Destroy a VdpVideoMixer.
 */
VdpStatus
vlVdpVideoMixerDestroy(VdpVideoMixer mixer)
{
   vlVdpVideoMixer *vmixer;

   vmixer = vlGetDataHTAB(mixer);
   if (!vmixer)
      return VDP_STATUS_INVALID_HANDLE;

   pipe_mutex_lock(vmixer->device->mutex);

   vlVdpResolveDelayedRendering(vmixer->device, NULL, NULL);

   vlRemoveDataHTAB(mixer);

   vl_compositor_cleanup_state(&vmixer->cstate);

   if (vmixer->deint.filter) {
      vl_deint_filter_cleanup(vmixer->deint.filter);
      FREE(vmixer->deint.filter);
   }

   if (vmixer->noise_reduction.filter) {
      vl_median_filter_cleanup(vmixer->noise_reduction.filter);
      FREE(vmixer->noise_reduction.filter);
   }

   if (vmixer->sharpness.filter) {
      vl_matrix_filter_cleanup(vmixer->sharpness.filter);
      FREE(vmixer->sharpness.filter);
   }
   pipe_mutex_unlock(vmixer->device->mutex);
   DeviceReference(&vmixer->device, NULL);

   FREE(vmixer);

   return VDP_STATUS_OK;
}
예제 #7
0
/**
 * Create a VdpDecoder.
 */
VdpStatus
vlVdpDecoderCreate(VdpDevice device,
                   VdpDecoderProfile profile,
                   uint32_t width, uint32_t height,
                   uint32_t max_references,
                   VdpDecoder *decoder)
{
   struct pipe_video_codec templat = {};
   struct pipe_context *pipe;
   struct pipe_screen *screen;
   vlVdpDevice *dev;
   vlVdpDecoder *vldecoder;
   VdpStatus ret;
   bool supported;
   uint32_t maxwidth, maxheight;

   if (!decoder)
      return VDP_STATUS_INVALID_POINTER;
   *decoder = 0;

   if (!(width && height))
      return VDP_STATUS_INVALID_VALUE;

   templat.profile = ProfileToPipe(profile);
   if (templat.profile == PIPE_VIDEO_PROFILE_UNKNOWN)
      return VDP_STATUS_INVALID_DECODER_PROFILE;

   dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   pipe = dev->context;
   screen = dev->vscreen->pscreen;

   pipe_mutex_lock(dev->mutex);

   supported = screen->get_video_param
   (
      screen,
      templat.profile,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_SUPPORTED
   );
   if (!supported) {
      pipe_mutex_unlock(dev->mutex);
      return VDP_STATUS_INVALID_DECODER_PROFILE;
   }

   maxwidth = screen->get_video_param
   (
      screen,
      templat.profile,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_MAX_WIDTH
   );
   maxheight = screen->get_video_param
   (
      screen,
      templat.profile,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_MAX_HEIGHT
   );
   if (width > maxwidth || height > maxheight) {
      pipe_mutex_unlock(dev->mutex);
      return VDP_STATUS_INVALID_SIZE;
   }

   vldecoder = CALLOC(1,sizeof(vlVdpDecoder));
   if (!vldecoder) {
      pipe_mutex_unlock(dev->mutex);
      return VDP_STATUS_RESOURCES;
   }

   DeviceReference(&vldecoder->device, dev);

   templat.entrypoint = PIPE_VIDEO_ENTRYPOINT_BITSTREAM;
   templat.chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;
   templat.width = width;
   templat.height = height;
   templat.max_references = max_references;

   if (u_reduce_video_profile(templat.profile) ==
       PIPE_VIDEO_FORMAT_MPEG4_AVC)
      templat.level = u_get_h264_level(templat.width, templat.height,
                            &templat.max_references);

   vldecoder->decoder = pipe->create_video_codec(pipe, &templat);

   if (!vldecoder->decoder) {
      ret = VDP_STATUS_ERROR;
      goto error_decoder;
   }

   *decoder = vlAddDataHTAB(vldecoder);
   if (*decoder == 0) {
      ret = VDP_STATUS_ERROR;
      goto error_handle;
   }

   pipe_mutex_init(vldecoder->mutex);
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;

error_handle:
   vldecoder->decoder->destroy(vldecoder->decoder);

error_decoder:
   pipe_mutex_unlock(dev->mutex);
   DeviceReference(&vldecoder->device, NULL);
   FREE(vldecoder);
   return ret;
}
예제 #8
0
파일: mixer.c 프로젝트: Unr34ler/mesa
/**
 * Create a VdpVideoMixer.
 */
VdpStatus
vlVdpVideoMixerCreate(VdpDevice device,
                      uint32_t feature_count,
                      VdpVideoMixerFeature const *features,
                      uint32_t parameter_count,
                      VdpVideoMixerParameter const *parameters,
                      void const *const *parameter_values,
                      VdpVideoMixer *mixer)
{
   vlVdpVideoMixer *vmixer = NULL;
   VdpStatus ret;
   struct pipe_screen *screen;
   uint32_t max_2d_texture_level;
   unsigned max_size, i;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;
   screen = dev->vscreen->pscreen;

   vmixer = CALLOC(1, sizeof(vlVdpVideoMixer));
   if (!vmixer)
      return VDP_STATUS_RESOURCES;

   DeviceReference(&vmixer->device, dev);

   pipe_mutex_lock(dev->mutex);

   vl_compositor_init_state(&vmixer->cstate, dev->context);

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, true, &vmixer->csc);
   if (!debug_get_bool_option("G3DVL_NO_CSC", FALSE))
      vl_compositor_set_csc_matrix(&vmixer->cstate, (const vl_csc_matrix *)&vmixer->csc);

   *mixer = vlAddDataHTAB(vmixer);
   if (*mixer == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   ret = VDP_STATUS_INVALID_VIDEO_MIXER_FEATURE;
   for (i = 0; i < feature_count; ++i) {
      switch (features[i]) {
      /* they are valid, but we doesn't support them */
      case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L1:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L2:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L3:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L4:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L5:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L6:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L7:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L8:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L9:
      case VDP_VIDEO_MIXER_FEATURE_INVERSE_TELECINE:
      case VDP_VIDEO_MIXER_FEATURE_LUMA_KEY:
         break;

      case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL:
         vmixer->deint.supported = true;
         break;

      case VDP_VIDEO_MIXER_FEATURE_SHARPNESS:
         vmixer->sharpness.supported = true;
         break;

      case VDP_VIDEO_MIXER_FEATURE_NOISE_REDUCTION:
         vmixer->noise_reduction.supported = true;
         break;

      default: goto no_params;
      }
   }

   vmixer->chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;
   ret = VDP_STATUS_INVALID_VIDEO_MIXER_PARAMETER;
   for (i = 0; i < parameter_count; ++i) {
      switch (parameters[i]) {
      case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH:
         vmixer->video_width = *(uint32_t*)parameter_values[i];
         break;
      case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT:
         vmixer->video_height = *(uint32_t*)parameter_values[i];
         break;
      case VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE:
         vmixer->chroma_format = ChromaToPipe(*(VdpChromaType*)parameter_values[i]);
         break;
      case VDP_VIDEO_MIXER_PARAMETER_LAYERS:
         vmixer->max_layers = *(uint32_t*)parameter_values[i];
         break;
      default: goto no_params;
      }
   }
   ret = VDP_STATUS_INVALID_VALUE;
   if (vmixer->max_layers > 4) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] Max layers > 4 not supported\n", vmixer->max_layers);
      goto no_params;
   }

   max_2d_texture_level = screen->get_param(screen, PIPE_CAP_MAX_TEXTURE_2D_LEVELS);
   max_size = pow(2, max_2d_texture_level-1);
   if (vmixer->video_width < 48 || vmixer->video_width > max_size) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u not valid for width\n",
                vmixer->video_width, max_size);
      goto no_params;
   }
   if (vmixer->video_height < 48 || vmixer->video_height > max_size) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u  not valid for height\n",
                vmixer->video_height, max_size);
      goto no_params;
   }
   vmixer->luma_key_min = 0.f;
   vmixer->luma_key_max = 1.f;
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;

no_params:
   vlRemoveDataHTAB(*mixer);

no_handle:
   vl_compositor_cleanup_state(&vmixer->cstate);
   pipe_mutex_unlock(dev->mutex);
   DeviceReference(&vmixer->device, NULL);
   FREE(vmixer);
   return ret;
}
예제 #9
0
파일: surface.c 프로젝트: Unr34ler/mesa
/**
 * Create a VdpVideoSurface.
 */
VdpStatus
vlVdpVideoSurfaceCreate(VdpDevice device, VdpChromaType chroma_type,
                        uint32_t width, uint32_t height,
                        VdpVideoSurface *surface)
{
   struct pipe_context *pipe;
   vlVdpSurface *p_surf;
   VdpStatus ret;

   if (!(width && height)) {
      ret = VDP_STATUS_INVALID_SIZE;
      goto inv_size;
   }

   p_surf = CALLOC(1, sizeof(vlVdpSurface));
   if (!p_surf) {
      ret = VDP_STATUS_RESOURCES;
      goto no_res;
   }

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev) {
      ret = VDP_STATUS_INVALID_HANDLE;
      goto inv_device;
   }

   DeviceReference(&p_surf->device, dev);
   pipe = dev->context;

   pipe_mutex_lock(dev->mutex);
   memset(&p_surf->templat, 0, sizeof(p_surf->templat));
   p_surf->templat.buffer_format = pipe->screen->get_video_param
   (
      pipe->screen,
      PIPE_VIDEO_PROFILE_UNKNOWN,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_PREFERED_FORMAT
   );
   p_surf->templat.chroma_format = ChromaToPipe(chroma_type);
   p_surf->templat.width = width;
   p_surf->templat.height = height;
   p_surf->templat.interlaced = pipe->screen->get_video_param
   (
      pipe->screen,
      PIPE_VIDEO_PROFILE_UNKNOWN,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_PREFERS_INTERLACED
   );
   if (p_surf->templat.buffer_format != PIPE_FORMAT_NONE)
      p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);

   /* do not mandate early allocation of a video buffer */
   vlVdpVideoSurfaceClear(p_surf);
   pipe_mutex_unlock(dev->mutex);

   *surface = vlAddDataHTAB(p_surf);
   if (*surface == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   return VDP_STATUS_OK;

no_handle:
   p_surf->video_buffer->destroy(p_surf->video_buffer);

inv_device:
   DeviceReference(&p_surf->device, NULL);
   FREE(p_surf);

no_res:
inv_size:
   return ret;
}
예제 #10
0
/**
 * Create a VdpBitmapSurface.
 */
VdpStatus
vlVdpBitmapSurfaceCreate(VdpDevice device,
                         VdpRGBAFormat rgba_format,
                         uint32_t width, uint32_t height,
                         VdpBool frequently_accessed,
                         VdpBitmapSurface *surface)
{
   struct pipe_context *pipe;
   struct pipe_resource res_tmpl, *res;
   struct pipe_sampler_view sv_templ;
   VdpStatus ret;

   vlVdpBitmapSurface *vlsurface = NULL;

   if (!(width && height))
      return VDP_STATUS_INVALID_SIZE;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   pipe = dev->context;
   if (!pipe)
      return VDP_STATUS_INVALID_HANDLE;

   if (!surface)
      return VDP_STATUS_INVALID_POINTER;

   vlsurface = CALLOC(1, sizeof(vlVdpBitmapSurface));
   if (!vlsurface)
      return VDP_STATUS_RESOURCES;

   DeviceReference(&vlsurface->device, dev);

   memset(&res_tmpl, 0, sizeof(res_tmpl));
   res_tmpl.target = PIPE_TEXTURE_2D;
   res_tmpl.format = VdpFormatRGBAToPipe(rgba_format);
   res_tmpl.width0 = width;
   res_tmpl.height0 = height;
   res_tmpl.depth0 = 1;
   res_tmpl.array_size = 1;
   res_tmpl.bind = PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_RENDER_TARGET;
   res_tmpl.usage = frequently_accessed ? PIPE_USAGE_DYNAMIC : PIPE_USAGE_DEFAULT;

   mtx_lock(&dev->mutex);

   if (!CheckSurfaceParams(pipe->screen, &res_tmpl)) {
      ret = VDP_STATUS_RESOURCES;
      goto err_unlock;
   }

   res = pipe->screen->resource_create(pipe->screen, &res_tmpl);
   if (!res) {
      ret = VDP_STATUS_RESOURCES;
      goto err_unlock;
   }

   vlVdpDefaultSamplerViewTemplate(&sv_templ, res);
   vlsurface->sampler_view = pipe->create_sampler_view(pipe, res, &sv_templ);

   pipe_resource_reference(&res, NULL);

   if (!vlsurface->sampler_view) {
      ret = VDP_STATUS_RESOURCES;
      goto err_unlock;
   }

   mtx_unlock(&dev->mutex);

   *surface = vlAddDataHTAB(vlsurface);
   if (*surface == 0) {
      mtx_lock(&dev->mutex);
      ret = VDP_STATUS_ERROR;
      goto err_sampler;
   }

   return VDP_STATUS_OK;

err_sampler:
   pipe_sampler_view_reference(&vlsurface->sampler_view, NULL);
err_unlock:
   mtx_unlock(&dev->mutex);
   DeviceReference(&vlsurface->device, NULL);
   FREE(vlsurface);
   return ret;
}