Exemplo n.º 1
0
VAStatus
vlVaQueryConfigEntrypoints(VADriverContextP ctx, VAProfile profile,
                           VAEntrypoint *entrypoint_list, int *num_entrypoints)
{
   struct pipe_screen *pscreen;
   enum pipe_video_profile p;

   if (!ctx)
      return VA_STATUS_ERROR_INVALID_CONTEXT;

   *num_entrypoints = 0;

   if (profile == VAProfileNone) {
      entrypoint_list[(*num_entrypoints)++] = VAEntrypointVideoProc;
      return VA_STATUS_SUCCESS;
   }

   p = ProfileToPipe(profile);
   if (p == PIPE_VIDEO_PROFILE_UNKNOWN)
      return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;

   pscreen = VL_VA_PSCREEN(ctx);
   if (pscreen->get_video_param(pscreen, p, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
				PIPE_VIDEO_CAP_SUPPORTED))
      entrypoint_list[(*num_entrypoints)++] = VAEntrypointVLD;

   if (pscreen->get_video_param(pscreen, p, PIPE_VIDEO_ENTRYPOINT_ENCODE,
				PIPE_VIDEO_CAP_SUPPORTED))
      entrypoint_list[(*num_entrypoints)++] = VAEntrypointEncSlice;

   if (num_entrypoints == 0)
      return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;

   return VA_STATUS_SUCCESS;
}
Exemplo n.º 2
0
VAStatus
vlVaCreateConfig(VADriverContextP ctx, VAProfile profile, VAEntrypoint entrypoint,
                 VAConfigAttrib *attrib_list, int num_attribs, VAConfigID *config_id)
{
   struct pipe_screen *pscreen;
   enum pipe_video_profile p;

   if (!ctx)
      return VA_STATUS_ERROR_INVALID_CONTEXT;

   if (profile == VAProfileNone && entrypoint == VAEntrypointVideoProc) {
      *config_id = PIPE_VIDEO_PROFILE_UNKNOWN;
      return VA_STATUS_SUCCESS;
   }

   p = ProfileToPipe(profile);
   if (p == PIPE_VIDEO_PROFILE_UNKNOWN)
      return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;

   pscreen = VL_VA_PSCREEN(ctx);
   if (!pscreen->get_video_param(pscreen, p, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_SUPPORTED))
      return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;

   if (entrypoint != VAEntrypointVLD)
      return VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;

   *config_id = p;

   return VA_STATUS_SUCCESS;
}
Exemplo n.º 3
0
Arquivo: query.c Projeto: airlied/mesa
/**
 * Query the implementation's VdpDecoder capabilities.
 */
VdpStatus
vlVdpDecoderQueryCapabilities(VdpDevice device, VdpDecoderProfile profile,
                              VdpBool *is_supported, uint32_t *max_level, uint32_t *max_macroblocks,
                              uint32_t *max_width, uint32_t *max_height)
{
   vlVdpDevice *dev;
   struct pipe_screen *pscreen;
   enum pipe_video_profile p_profile;

   if (!(is_supported && max_level && max_macroblocks && max_width && max_height))
      return VDP_STATUS_INVALID_POINTER;

   dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   pscreen = dev->vscreen->pscreen;
   if (!pscreen)
      return VDP_STATUS_RESOURCES;

   p_profile = ProfileToPipe(profile);
   if (p_profile == PIPE_VIDEO_PROFILE_UNKNOWN)	{
      *is_supported = false;
      return VDP_STATUS_OK;
   }

   pipe_mutex_lock(dev->mutex);
   *is_supported = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
                                            PIPE_VIDEO_CAP_SUPPORTED);
   if (*is_supported) {
      *max_width = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
                                            PIPE_VIDEO_CAP_MAX_WIDTH);
      *max_height = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
                                             PIPE_VIDEO_CAP_MAX_HEIGHT);
      *max_level = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
                                            PIPE_VIDEO_CAP_MAX_LEVEL);
      *max_macroblocks = (*max_width/16)*(*max_height/16);
   } else {
      *max_width = 0;
      *max_height = 0;
      *max_level = 0;
      *max_macroblocks = 0;
   }
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;
}
Exemplo n.º 4
0
/**
 * Create a VdpDecoder.
 */
VdpStatus
vlVdpDecoderCreate(VdpDevice device,
                   VdpDecoderProfile profile,
                   uint32_t width, uint32_t height,
                   uint32_t max_references,
                   VdpDecoder *decoder)
{
   struct pipe_video_codec templat = {};
   struct pipe_context *pipe;
   struct pipe_screen *screen;
   vlVdpDevice *dev;
   vlVdpDecoder *vldecoder;
   VdpStatus ret;
   bool supported;
   uint32_t maxwidth, maxheight;

   if (!decoder)
      return VDP_STATUS_INVALID_POINTER;
   *decoder = 0;

   if (!(width && height))
      return VDP_STATUS_INVALID_VALUE;

   templat.profile = ProfileToPipe(profile);
   if (templat.profile == PIPE_VIDEO_PROFILE_UNKNOWN)
      return VDP_STATUS_INVALID_DECODER_PROFILE;

   dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   pipe = dev->context;
   screen = dev->vscreen->pscreen;

   pipe_mutex_lock(dev->mutex);

   supported = screen->get_video_param
   (
      screen,
      templat.profile,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_SUPPORTED
   );
   if (!supported) {
      pipe_mutex_unlock(dev->mutex);
      return VDP_STATUS_INVALID_DECODER_PROFILE;
   }

   maxwidth = screen->get_video_param
   (
      screen,
      templat.profile,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_MAX_WIDTH
   );
   maxheight = screen->get_video_param
   (
      screen,
      templat.profile,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_MAX_HEIGHT
   );
   if (width > maxwidth || height > maxheight) {
      pipe_mutex_unlock(dev->mutex);
      return VDP_STATUS_INVALID_SIZE;
   }

   vldecoder = CALLOC(1,sizeof(vlVdpDecoder));
   if (!vldecoder) {
      pipe_mutex_unlock(dev->mutex);
      return VDP_STATUS_RESOURCES;
   }

   DeviceReference(&vldecoder->device, dev);

   templat.entrypoint = PIPE_VIDEO_ENTRYPOINT_BITSTREAM;
   templat.chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;
   templat.width = width;
   templat.height = height;
   templat.max_references = max_references;

   if (u_reduce_video_profile(templat.profile) ==
       PIPE_VIDEO_FORMAT_MPEG4_AVC)
      templat.level = u_get_h264_level(templat.width, templat.height,
                            &templat.max_references);

   vldecoder->decoder = pipe->create_video_codec(pipe, &templat);

   if (!vldecoder->decoder) {
      ret = VDP_STATUS_ERROR;
      goto error_decoder;
   }

   *decoder = vlAddDataHTAB(vldecoder);
   if (*decoder == 0) {
      ret = VDP_STATUS_ERROR;
      goto error_handle;
   }

   pipe_mutex_init(vldecoder->mutex);
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;

error_handle:
   vldecoder->decoder->destroy(vldecoder->decoder);

error_decoder:
   pipe_mutex_unlock(dev->mutex);
   DeviceReference(&vldecoder->device, NULL);
   FREE(vldecoder);
   return ret;
}
Exemplo n.º 5
0
PUBLIC
Status XvMCCreateContext(Display *dpy, XvPortID port, int surface_type_id,
                         int width, int height, int flags, XvMCContext *context)
{
   bool found_port;
   int scrn = 0;
   int chroma_format = 0;
   int mc_type = 0;
   int surface_flags = 0;
   unsigned short subpic_max_w = 0;
   unsigned short subpic_max_h = 0;
   Status ret;
   struct vl_screen *vscreen;
   struct pipe_context *pipe;
   struct pipe_video_codec templat = {0};
   XvMCContextPrivate *context_priv;
   vl_csc_matrix csc;

   XVMC_MSG(XVMC_TRACE, "[XvMC] Creating context %p.\n", context);

   assert(dpy);

   if (!context)
      return XvMCBadContext;

   ret = Validate(dpy, port, surface_type_id, width, height, flags,
                  &found_port, &scrn, &chroma_format, &mc_type, &surface_flags,
                  &subpic_max_w, &subpic_max_h);

   /* Success and XvBadPort have the same value */
   if (ret != Success || !found_port)
      return ret;

   /* XXX: Current limits */
   if (chroma_format != XVMC_CHROMA_FORMAT_420) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Cannot decode requested surface type. Unsupported chroma format.\n");
      return BadImplementation;
   }
   if ((mc_type & ~XVMC_IDCT) != (XVMC_MOCOMP | XVMC_MPEG_2)) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Cannot decode requested surface type. Non-MPEG2/Mocomp/iDCT acceleration unsupported.\n");
      return BadImplementation;
   }
   if (surface_flags & XVMC_INTRA_UNSIGNED) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Cannot decode requested surface type. Unsigned intra unsupported.\n");
      return BadImplementation;
   }

   context_priv = CALLOC(1, sizeof(XvMCContextPrivate));
   if (!context_priv)
      return BadAlloc;

   /* TODO: Reuse screen if process creates another context */
   vscreen = vl_screen_create(dpy, scrn);

   if (!vscreen) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL screen.\n");
      FREE(context_priv);
      return BadAlloc;
   }

   pipe = vscreen->pscreen->context_create(vscreen->pscreen, vscreen, 0);
   if (!pipe) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL context.\n");
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }

   templat.profile = ProfileToPipe(mc_type);
   templat.entrypoint = (mc_type & XVMC_IDCT) ? PIPE_VIDEO_ENTRYPOINT_IDCT : PIPE_VIDEO_ENTRYPOINT_MC;
   templat.chroma_format = FormatToPipe(chroma_format);
   templat.width = width;
   templat.height = height;
   templat.max_references = 2;
   templat.expect_chunked_decode = true;

   context_priv->decoder = pipe->create_video_codec(pipe, &templat);

   if (!context_priv->decoder) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL decoder.\n");
      pipe->destroy(pipe);
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }

   if (!vl_compositor_init(&context_priv->compositor, pipe)) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL compositor.\n");
      context_priv->decoder->destroy(context_priv->decoder);
      pipe->destroy(pipe);
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }

   if (!vl_compositor_init_state(&context_priv->cstate, pipe)) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL compositor state.\n");
      vl_compositor_cleanup(&context_priv->compositor);
      context_priv->decoder->destroy(context_priv->decoder);
      pipe->destroy(pipe);
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }


   context_priv->color_standard =
      debug_get_bool_option("G3DVL_NO_CSC", FALSE) ?
      VL_CSC_COLOR_STANDARD_IDENTITY : VL_CSC_COLOR_STANDARD_BT_601;
   context_priv->procamp = vl_default_procamp;

   vl_csc_get_matrix
   (
      context_priv->color_standard,
      &context_priv->procamp, true, &csc
   );
   vl_compositor_set_csc_matrix(&context_priv->cstate, (const vl_csc_matrix *)&csc);

   context_priv->vscreen = vscreen;
   context_priv->pipe = pipe;
   context_priv->subpicture_max_width = subpic_max_w;
   context_priv->subpicture_max_height = subpic_max_h;

   context->context_id = XAllocID(dpy);
   context->surface_type_id = surface_type_id;
   context->width = width;
   context->height = height;
   context->flags = flags;
   context->port = port;
   context->privData = context_priv;

   SyncHandle();

   XVMC_MSG(XVMC_TRACE, "[XvMC] Context %p created.\n", context);

   return Success;
}
Exemplo n.º 6
0
VAStatus
vlVaCreateConfig(VADriverContextP ctx, VAProfile profile, VAEntrypoint entrypoint,
                 VAConfigAttrib *attrib_list, int num_attribs, VAConfigID *config_id)
{
   vlVaDriver *drv;
   vlVaConfig *config;
   struct pipe_screen *pscreen;
   enum pipe_video_profile p;

   if (!ctx)
      return VA_STATUS_ERROR_INVALID_CONTEXT;

   drv = VL_VA_DRIVER(ctx);

   if (!drv)
      return VA_STATUS_ERROR_INVALID_CONTEXT;

   config = CALLOC(1, sizeof(vlVaConfig));
   if (!config)
      return VA_STATUS_ERROR_ALLOCATION_FAILED;

   if (profile == VAProfileNone && entrypoint == VAEntrypointVideoProc) {
      config->entrypoint = VAEntrypointVideoProc;
      config->profile = PIPE_VIDEO_PROFILE_UNKNOWN;
      for (int i = 0; i < num_attribs; i++) {
         if (attrib_list[i].type == VAConfigAttribRTFormat) {
            if (attrib_list[i].value & (VA_RT_FORMAT_YUV420 | VA_RT_FORMAT_RGB32)) {
               config->rt_format = attrib_list[i].value;
            } else {
               FREE(config);
               return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
            }
         }
      }

      /* Default value if not specified in the input attributes. */
      if (!config->rt_format)
         config->rt_format = VA_RT_FORMAT_YUV420 | VA_RT_FORMAT_RGB32;

      pipe_mutex_lock(drv->mutex);
      *config_id = handle_table_add(drv->htab, config);
      pipe_mutex_unlock(drv->mutex);
      return VA_STATUS_SUCCESS;
   }

   p = ProfileToPipe(profile);
   if (p == PIPE_VIDEO_PROFILE_UNKNOWN) {
      FREE(config);
      return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
   }

   pscreen = VL_VA_PSCREEN(ctx);

   switch (entrypoint) {
   case VAEntrypointVLD:
      if (!pscreen->get_video_param(pscreen, p, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
				    PIPE_VIDEO_CAP_SUPPORTED)) {
         FREE(config);
         return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
      }

      config->entrypoint = PIPE_VIDEO_ENTRYPOINT_BITSTREAM;
      break;

   case VAEntrypointEncSlice:
      if (!pscreen->get_video_param(pscreen, p, PIPE_VIDEO_ENTRYPOINT_ENCODE,
				    PIPE_VIDEO_CAP_SUPPORTED)) {
         FREE(config);
         return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
      }

      config->entrypoint = PIPE_VIDEO_ENTRYPOINT_ENCODE;
      break;

   default:
      FREE(config);
      return VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
   }

   config->profile = p;

   for (int i = 0; i <num_attribs ; i++) {
      if (attrib_list[i].type == VAConfigAttribRateControl) {
         if (attrib_list[i].value == VA_RC_CBR)
            config->rc = PIPE_H264_ENC_RATE_CONTROL_METHOD_CONSTANT;
         else if (attrib_list[i].value == VA_RC_VBR)
            config->rc = PIPE_H264_ENC_RATE_CONTROL_METHOD_VARIABLE;
         else
            config->rc = PIPE_H264_ENC_RATE_CONTROL_METHOD_DISABLE;
      }
      if (attrib_list[i].type == VAConfigAttribRTFormat) {
         if (attrib_list[i].value == VA_RT_FORMAT_YUV420) {
            config->rt_format = attrib_list[i].value;
         } else {
            FREE(config);
            return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
         }
      }
   }

   /* Default value if not specified in the input attributes. */
   if (!config->rt_format)
      config->rt_format = VA_RT_FORMAT_YUV420;

   pipe_mutex_lock(drv->mutex);
   *config_id = handle_table_add(drv->htab, config);
   pipe_mutex_unlock(drv->mutex);

   return VA_STATUS_SUCCESS;
}
Exemplo n.º 7
0
/**
 * Create a VdpDecoder.
 */
VdpStatus
vlVdpDecoderCreate(VdpDevice device,
                   VdpDecoderProfile profile,
                   uint32_t width, uint32_t height,
                   uint32_t max_references,
                   VdpDecoder *decoder)
{
   enum pipe_video_profile p_profile;
   struct pipe_context *pipe;
   struct pipe_screen *screen;
   vlVdpDevice *dev;
   vlVdpDecoder *vldecoder;
   VdpStatus ret;
   bool supported;

   if (!decoder)
      return VDP_STATUS_INVALID_POINTER;
   *decoder = 0;

   if (!(width && height))
      return VDP_STATUS_INVALID_VALUE;

   p_profile = ProfileToPipe(profile);
   if (p_profile == PIPE_VIDEO_PROFILE_UNKNOWN)
      return VDP_STATUS_INVALID_DECODER_PROFILE;

   dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   pipe = dev->context;
   screen = dev->vscreen->pscreen;

   pipe_mutex_lock(dev->mutex);

   supported = screen->get_video_param
   (
      screen,
      p_profile,
      PIPE_VIDEO_CAP_SUPPORTED
   );
   if (!supported) {
      pipe_mutex_unlock(dev->mutex);
      return VDP_STATUS_INVALID_DECODER_PROFILE;
   }

   vldecoder = CALLOC(1,sizeof(vlVdpDecoder));
   if (!vldecoder) {
      pipe_mutex_unlock(dev->mutex);
      return VDP_STATUS_RESOURCES;
   }

   vldecoder->device = dev;

   vldecoder->decoder = pipe->create_video_decoder
   (
      pipe, p_profile,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CHROMA_FORMAT_420,
      width, height, max_references,
      false
   );

   if (!vldecoder->decoder) {
      ret = VDP_STATUS_ERROR;
      goto error_decoder;
   }

   *decoder = vlAddDataHTAB(vldecoder);
   if (*decoder == 0) {
      ret = VDP_STATUS_ERROR;
      goto error_handle;
   }
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;

error_handle:
   vldecoder->decoder->destroy(vldecoder->decoder);

error_decoder:
   pipe_mutex_unlock(dev->mutex);
   FREE(vldecoder);
   return ret;
}