Exemple #1
0
static OMX_ERRORTYPE vid_dec_Destructor(OMX_COMPONENTTYPE *comp)
{
   vid_dec_PrivateType* priv = comp->pComponentPrivate;
   int i;

   if (priv->ports) {
      for (i = 0; i < priv->sPortTypesParam[OMX_PortDomainVideo].nPorts; ++i) {
         if(priv->ports[i])
            priv->ports[i]->PortDestructor(priv->ports[i]);
      }
      FREE(priv->ports);
      priv->ports=NULL;
   }

   if (priv->pipe) {
      vl_compositor_cleanup_state(&priv->cstate);
      vl_compositor_cleanup(&priv->compositor);
      priv->pipe->destroy(priv->pipe);
   }

   if (priv->screen)
      omx_put_screen();

   return omx_workaround_Destructor(comp);
}
Exemple #2
0
PUBLIC
Status XvMCDestroyContext(Display *dpy, XvMCContext *context)
{
   struct vl_screen *vscreen;
   struct vl_context *vctx;
   XvMCContextPrivate *context_priv;

   XVMC_MSG(XVMC_TRACE, "[XvMC] Destroying context %p.\n", context);

   assert(dpy);

   if (!context || !context->privData)
      return XvMCBadContext;

   context_priv = context->privData;
   vctx = context_priv->vctx;
   vscreen = vctx->vscreen;
   pipe_surface_reference(&context_priv->drawable_surface, NULL);
   context_priv->decoder->destroy(context_priv->decoder);
   vl_compositor_cleanup(&context_priv->compositor);
   vl_video_destroy(vctx);
   vl_screen_destroy(vscreen);
   FREE(context_priv);
   context->privData = NULL;

   XVMC_MSG(XVMC_TRACE, "[XvMC] Context %p destroyed.\n", context);

   return Success;
}
Exemple #3
0
PUBLIC
Status XvMCDestroyContext(Display *dpy, XvMCContext *context)
{
   XvMCContextPrivate *context_priv;

   XVMC_MSG(XVMC_TRACE, "[XvMC] Destroying context %p.\n", context);

   assert(dpy);

   if (!context || !context->privData)
      return XvMCBadContext;

   context_priv = context->privData;
   context_priv->decoder->destroy(context_priv->decoder);
   vl_compositor_cleanup_state(&context_priv->cstate);
   vl_compositor_cleanup(&context_priv->compositor);
   context_priv->pipe->destroy(context_priv->pipe);
   vl_screen_destroy(context_priv->vscreen);
   FREE(context_priv);
   context->privData = NULL;

   XVMC_MSG(XVMC_TRACE, "[XvMC] Context %p destroyed.\n", context);

   return Success;
}
Exemple #4
0
static OMX_ERRORTYPE h264d_prc_allocate_resources(void *ap_obj, OMX_U32 a_pid)
{
   vid_dec_PrivateType*priv = ap_obj;
   struct pipe_screen *screen;
   vl_csc_matrix csc;

   assert (priv);

   priv->screen = omx_get_screen();
   if (!priv->screen)
      return OMX_ErrorInsufficientResources;

   screen = priv->screen->pscreen;
   priv->pipe = screen->context_create(screen, priv->screen, 0);
   if (!priv->pipe)
      return OMX_ErrorInsufficientResources;

   if (!vl_compositor_init(&priv->compositor, priv->pipe)) {
      priv->pipe->destroy(priv->pipe);
      priv->pipe = NULL;
      return OMX_ErrorInsufficientResources;
   }

   if (!vl_compositor_init_state(&priv->cstate, priv->pipe)) {
      vl_compositor_cleanup(&priv->compositor);
      priv->pipe->destroy(priv->pipe);
      priv->pipe = NULL;
      return OMX_ErrorInsufficientResources;
   }

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, true, &csc);
   if (!vl_compositor_set_csc_matrix(&priv->cstate, (const vl_csc_matrix *)&csc, 1.0f, 0.0f)) {
      vl_compositor_cleanup(&priv->compositor);
      priv->pipe->destroy(priv->pipe);
      priv->pipe = NULL;
      return OMX_ErrorInsufficientResources;
   }

   LIST_INITHEAD(&priv->codec_data.h264.dpb_list);

   priv->video_buffer_map = util_hash_table_create(handle_hash, handle_compare);

   return OMX_ErrorNone;
}
Exemple #5
0
VdpStatus
vlVdpPresentationQueueDestroy(VdpPresentationQueue presentation_queue)
{
   vlVdpPresentationQueue *pq;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Destroying PresentationQueue\n");

   pq = vlGetDataHTAB(presentation_queue);
   if (!pq)
      return VDP_STATUS_INVALID_HANDLE;

   vl_compositor_cleanup(&pq->compositor);

   vlRemoveDataHTAB(presentation_queue);
   FREE(pq);

   return VDP_STATUS_OK;
}
Exemple #6
0
VAStatus
vlVaTerminate(VADriverContextP ctx)
{
   vlVaDriver *drv;

   if (!ctx)
      return VA_STATUS_ERROR_INVALID_CONTEXT;

   drv = ctx->pDriverData;
   vl_compositor_cleanup_state(&drv->cstate);
   vl_compositor_cleanup(&drv->compositor);
   drv->pipe->destroy(drv->pipe);
   drv->vscreen->destroy(drv->vscreen);
   handle_table_destroy(drv->htab);
   FREE(drv);

   return VA_STATUS_SUCCESS;
}
Exemple #7
0
static OMX_ERRORTYPE h264d_prc_deallocate_resources(void *ap_obj)
{
   vid_dec_PrivateType*priv = ap_obj;
   assert(priv);

   /* Clear hash table */
   util_hash_table_foreach(priv->video_buffer_map,
                            &hash_table_clear_item_callback,
                            NULL);
   util_hash_table_destroy(priv->video_buffer_map);

   if (priv->pipe) {
      vl_compositor_cleanup_state(&priv->cstate);
      vl_compositor_cleanup(&priv->compositor);
      priv->pipe->destroy(priv->pipe);
   }

   if (priv->screen)
      omx_put_screen();

   return OMX_ErrorNone;
}
Exemple #8
0
static OMX_ERRORTYPE vid_dec_Constructor(OMX_COMPONENTTYPE *comp, OMX_STRING name)
{
   vid_dec_PrivateType *priv;
   omx_base_video_PortType *port;
   struct pipe_screen *screen;
   OMX_ERRORTYPE r;
   int i;

   assert(!comp->pComponentPrivate);

   priv = comp->pComponentPrivate = CALLOC(1, sizeof(vid_dec_PrivateType));
   if (!priv)
      return OMX_ErrorInsufficientResources;

   r = omx_base_filter_Constructor(comp, name);
   if (r)
      return r;

   priv->profile = PIPE_VIDEO_PROFILE_UNKNOWN;

   if (!strcmp(name, OMX_VID_DEC_MPEG2_NAME))
      priv->profile = PIPE_VIDEO_PROFILE_MPEG2_MAIN;

   if (!strcmp(name, OMX_VID_DEC_AVC_NAME))
      priv->profile = PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH;

   if (!strcmp(name, OMX_VID_DEC_HEVC_NAME))
      priv->profile = PIPE_VIDEO_PROFILE_HEVC_MAIN;

   priv->BufferMgmtCallback = vid_dec_FrameDecoded;
   priv->messageHandler = vid_dec_MessageHandler;
   priv->destructor = vid_dec_Destructor;

   comp->SetParameter = vid_dec_SetParameter;
   comp->GetParameter = vid_dec_GetParameter;

   priv->screen = omx_get_screen();
   if (!priv->screen)
      return OMX_ErrorInsufficientResources;

   screen = priv->screen->pscreen;
   priv->pipe = screen->context_create(screen, NULL, 0);
   if (!priv->pipe)
      return OMX_ErrorInsufficientResources;

   if (!vl_compositor_init(&priv->compositor, priv->pipe)) {
      priv->pipe->destroy(priv->pipe);
      priv->pipe = NULL;
      return OMX_ErrorInsufficientResources;
   }

   if (!vl_compositor_init_state(&priv->cstate, priv->pipe)) {
      vl_compositor_cleanup(&priv->compositor);
      priv->pipe->destroy(priv->pipe);
      priv->pipe = NULL;
      return OMX_ErrorInsufficientResources;
   }

   priv->sPortTypesParam[OMX_PortDomainVideo].nStartPortNumber = 0;
   priv->sPortTypesParam[OMX_PortDomainVideo].nPorts = 2;
   priv->ports = CALLOC(2, sizeof(omx_base_PortType *));
   if (!priv->ports)
      return OMX_ErrorInsufficientResources;

   for (i = 0; i < 2; ++i) {
      priv->ports[i] = CALLOC(1, sizeof(omx_base_video_PortType));
      if (!priv->ports[i])
         return OMX_ErrorInsufficientResources;

      base_video_port_Constructor(comp, &priv->ports[i], i, i == 0);
   }

   port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_INPUTPORT_INDEX];
   strcpy(port->sPortParam.format.video.cMIMEType,"video/MPEG2");
   port->sPortParam.nBufferCountMin = 8;
   port->sPortParam.nBufferCountActual = 8;
   port->sPortParam.nBufferSize = DEFAULT_OUT_BUFFER_SIZE;
   port->sPortParam.format.video.nFrameWidth = 176;
   port->sPortParam.format.video.nFrameHeight = 144;
   port->sPortParam.format.video.eCompressionFormat = OMX_VIDEO_CodingMPEG2;
   port->sVideoParam.eCompressionFormat = OMX_VIDEO_CodingMPEG2;
   port->Port_SendBufferFunction = vid_dec_DecodeBuffer;
   port->Port_FreeBuffer = vid_dec_FreeDecBuffer;

   port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_OUTPUTPORT_INDEX];
   port->sPortParam.nBufferCountActual = 8;
   port->sPortParam.nBufferCountMin = 4;
   port->sPortParam.format.video.nFrameWidth = 176;
   port->sPortParam.format.video.nFrameHeight = 144;
   port->sPortParam.format.video.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
   port->sVideoParam.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;

   return OMX_ErrorNone;
}
Exemple #9
0
PUBLIC VAStatus
VA_DRIVER_INIT_FUNC(VADriverContextP ctx)
{
   vlVaDriver *drv;
   struct drm_state *drm_info;

   if (!ctx)
      return VA_STATUS_ERROR_INVALID_CONTEXT;

   drv = CALLOC(1, sizeof(vlVaDriver));
   if (!drv)
      return VA_STATUS_ERROR_ALLOCATION_FAILED;

   switch (ctx->display_type) {
   case VA_DISPLAY_ANDROID:
      FREE(drv);
      return VA_STATUS_ERROR_UNIMPLEMENTED;
   case VA_DISPLAY_GLX:
   case VA_DISPLAY_X11:
#if defined(HAVE_DRI3)
      drv->vscreen = vl_dri3_screen_create(ctx->native_dpy, ctx->x11_screen);
#endif
      if (!drv->vscreen)
         drv->vscreen = vl_dri2_screen_create(ctx->native_dpy, ctx->x11_screen);
      if (!drv->vscreen)
         goto error_screen;
      break;
   case VA_DISPLAY_WAYLAND:
   case VA_DISPLAY_DRM:
   case VA_DISPLAY_DRM_RENDERNODES: {
      drm_info = (struct drm_state *) ctx->drm_state;

      if (!drm_info || drm_info->fd < 0) {
         FREE(drv);
         return VA_STATUS_ERROR_INVALID_PARAMETER;
      }

      drv->vscreen = vl_drm_screen_create(drm_info->fd);
      if (!drv->vscreen)
         goto error_screen;
      }
      break;
   default:
      FREE(drv);
      return VA_STATUS_ERROR_INVALID_DISPLAY;
   }

   drv->pipe = drv->vscreen->pscreen->context_create(drv->vscreen->pscreen,
                                                     drv->vscreen, 0);
   if (!drv->pipe)
      goto error_pipe;

   drv->htab = handle_table_create();
   if (!drv->htab)
      goto error_htab;

   if (!vl_compositor_init(&drv->compositor, drv->pipe))
      goto error_compositor;
   if (!vl_compositor_init_state(&drv->cstate, drv->pipe))
      goto error_compositor_state;

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, true, &drv->csc);
   if (!vl_compositor_set_csc_matrix(&drv->cstate, (const vl_csc_matrix *)&drv->csc, 1.0f, 0.0f))
      goto error_csc_matrix;
   pipe_mutex_init(drv->mutex);

   ctx->pDriverData = (void *)drv;
   ctx->version_major = 0;
   ctx->version_minor = 1;
   *ctx->vtable = vtable;
   *ctx->vtable_vpp = vtable_vpp;
   ctx->max_profiles = PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH - PIPE_VIDEO_PROFILE_UNKNOWN;
   ctx->max_entrypoints = 1;
   ctx->max_attributes = 1;
   ctx->max_image_formats = VL_VA_MAX_IMAGE_FORMATS;
   ctx->max_subpic_formats = 1;
   ctx->max_display_attributes = 1;
   ctx->str_vendor = "mesa gallium vaapi";

   return VA_STATUS_SUCCESS;

error_csc_matrix:
   vl_compositor_cleanup_state(&drv->cstate);

error_compositor_state:
   vl_compositor_cleanup(&drv->compositor);

error_compositor:
   handle_table_destroy(drv->htab);

error_htab:
   drv->pipe->destroy(drv->pipe);

error_pipe:
   drv->vscreen->destroy(drv->vscreen);

error_screen:
   FREE(drv);
   return VA_STATUS_ERROR_ALLOCATION_FAILED;
}
Exemple #10
0
PUBLIC
Status XvMCCreateContext(Display *dpy, XvPortID port, int surface_type_id,
                         int width, int height, int flags, XvMCContext *context)
{
   bool found_port;
   int scrn = 0;
   int chroma_format = 0;
   int mc_type = 0;
   int surface_flags = 0;
   unsigned short subpic_max_w = 0;
   unsigned short subpic_max_h = 0;
   Status ret;
   struct vl_screen *vscreen;
   struct pipe_context *pipe;
   struct pipe_video_codec templat = {0};
   XvMCContextPrivate *context_priv;
   vl_csc_matrix csc;

   XVMC_MSG(XVMC_TRACE, "[XvMC] Creating context %p.\n", context);

   assert(dpy);

   if (!context)
      return XvMCBadContext;

   ret = Validate(dpy, port, surface_type_id, width, height, flags,
                  &found_port, &scrn, &chroma_format, &mc_type, &surface_flags,
                  &subpic_max_w, &subpic_max_h);

   /* Success and XvBadPort have the same value */
   if (ret != Success || !found_port)
      return ret;

   /* XXX: Current limits */
   if (chroma_format != XVMC_CHROMA_FORMAT_420) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Cannot decode requested surface type. Unsupported chroma format.\n");
      return BadImplementation;
   }
   if ((mc_type & ~XVMC_IDCT) != (XVMC_MOCOMP | XVMC_MPEG_2)) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Cannot decode requested surface type. Non-MPEG2/Mocomp/iDCT acceleration unsupported.\n");
      return BadImplementation;
   }
   if (surface_flags & XVMC_INTRA_UNSIGNED) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Cannot decode requested surface type. Unsigned intra unsupported.\n");
      return BadImplementation;
   }

   context_priv = CALLOC(1, sizeof(XvMCContextPrivate));
   if (!context_priv)
      return BadAlloc;

   /* TODO: Reuse screen if process creates another context */
   vscreen = vl_screen_create(dpy, scrn);

   if (!vscreen) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL screen.\n");
      FREE(context_priv);
      return BadAlloc;
   }

   pipe = vscreen->pscreen->context_create(vscreen->pscreen, vscreen, 0);
   if (!pipe) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL context.\n");
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }

   templat.profile = ProfileToPipe(mc_type);
   templat.entrypoint = (mc_type & XVMC_IDCT) ? PIPE_VIDEO_ENTRYPOINT_IDCT : PIPE_VIDEO_ENTRYPOINT_MC;
   templat.chroma_format = FormatToPipe(chroma_format);
   templat.width = width;
   templat.height = height;
   templat.max_references = 2;
   templat.expect_chunked_decode = true;

   context_priv->decoder = pipe->create_video_codec(pipe, &templat);

   if (!context_priv->decoder) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL decoder.\n");
      pipe->destroy(pipe);
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }

   if (!vl_compositor_init(&context_priv->compositor, pipe)) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL compositor.\n");
      context_priv->decoder->destroy(context_priv->decoder);
      pipe->destroy(pipe);
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }

   if (!vl_compositor_init_state(&context_priv->cstate, pipe)) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL compositor state.\n");
      vl_compositor_cleanup(&context_priv->compositor);
      context_priv->decoder->destroy(context_priv->decoder);
      pipe->destroy(pipe);
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }


   context_priv->color_standard =
      debug_get_bool_option("G3DVL_NO_CSC", FALSE) ?
      VL_CSC_COLOR_STANDARD_IDENTITY : VL_CSC_COLOR_STANDARD_BT_601;
   context_priv->procamp = vl_default_procamp;

   vl_csc_get_matrix
   (
      context_priv->color_standard,
      &context_priv->procamp, true, &csc
   );
   vl_compositor_set_csc_matrix(&context_priv->cstate, (const vl_csc_matrix *)&csc);

   context_priv->vscreen = vscreen;
   context_priv->pipe = pipe;
   context_priv->subpicture_max_width = subpic_max_w;
   context_priv->subpicture_max_height = subpic_max_h;

   context->context_id = XAllocID(dpy);
   context->surface_type_id = surface_type_id;
   context->width = width;
   context->height = height;
   context->flags = flags;
   context->port = port;
   context->privData = context_priv;

   SyncHandle();

   XVMC_MSG(XVMC_TRACE, "[XvMC] Context %p created.\n", context);

   return Success;
}