Пример #1
0
bool
vl_compositor_init_state(struct vl_compositor_state *s, struct pipe_context *pipe)
{
   vl_csc_matrix csc_matrix;

   assert(s);

   memset(s, 0, sizeof(*s));

   s->pipe = pipe;

   s->clear_color.f[0] = s->clear_color.f[1] = 0.0f;
   s->clear_color.f[2] = s->clear_color.f[3] = 0.0f;

   /*
    * Create our fragment shader's constant buffer
    * Const buffer contains the color conversion matrix and bias vectors
    */
   /* XXX: Create with IMMUTABLE/STATIC... although it does change every once in a long while... */
   s->csc_matrix = pipe_buffer_create
   (
      pipe->screen,
      PIPE_BIND_CONSTANT_BUFFER,
      PIPE_USAGE_STATIC,
      sizeof(csc_matrix)
   );

   vl_compositor_clear_layers(s);

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY, NULL, true, &csc_matrix);
   vl_compositor_set_csc_matrix(s, (const vl_csc_matrix *)&csc_matrix);

   return true;
}
Пример #2
0
/**
 * Generate a color space conversion matrix.
 */
VdpStatus
vlVdpGenerateCSCMatrix(VdpProcamp *procamp,
                       VdpColorStandard standard,
                       VdpCSCMatrix *csc_matrix)
{
   enum VL_CSC_COLOR_STANDARD vl_std;
   struct vl_procamp camp;

   if (!csc_matrix)
      return VDP_STATUS_INVALID_POINTER;

   switch (standard) {
      case VDP_COLOR_STANDARD_ITUR_BT_601: vl_std = VL_CSC_COLOR_STANDARD_BT_601; break;
      case VDP_COLOR_STANDARD_ITUR_BT_709: vl_std = VL_CSC_COLOR_STANDARD_BT_709; break;
      case VDP_COLOR_STANDARD_SMPTE_240M:  vl_std = VL_CSC_COLOR_STANDARD_SMPTE_240M; break;
      default: return VDP_STATUS_INVALID_COLOR_STANDARD;
   }

   if (procamp) {
      if (procamp->struct_version > VDP_PROCAMP_VERSION)
         return VDP_STATUS_INVALID_STRUCT_VERSION;
      camp.brightness = procamp->brightness;
      camp.contrast = procamp->contrast;
      camp.saturation = procamp->saturation;
      camp.hue = procamp->hue;
   }

   vl_csc_get_matrix(vl_std, procamp ? &camp : NULL, true, csc_matrix);
   return VDP_STATUS_OK;
}
Пример #3
0
PUBLIC
Status XvMCSetAttribute(Display *dpy, XvMCContext *context, Atom attribute, int value)
{
   XvMCContextPrivate *context_priv;
   const char *attr;
   vl_csc_matrix csc;

   assert(dpy);

   if (!context || !context->privData)
      return XvMCBadContext;

   context_priv = context->privData;

   attr = XGetAtomName(dpy, attribute);
   if (!attr)
      return XvMCBadContext;

   if (strcmp(attr, XV_BRIGHTNESS))
      context_priv->procamp.brightness = value / 1000.0f;
   else if (strcmp(attr, XV_CONTRAST))
      context_priv->procamp.contrast = value / 1000.0f + 1.0f;
   else if (strcmp(attr, XV_SATURATION))
      context_priv->procamp.saturation = value / 1000.0f + 1.0f;
   else if (strcmp(attr, XV_HUE))
      context_priv->procamp.hue = value / 1000.0f;
   else if (strcmp(attr, XV_COLORSPACE))
      context_priv->color_standard = value ?
         VL_CSC_COLOR_STANDARD_BT_601 :
         VL_CSC_COLOR_STANDARD_BT_709;
   else
      return BadName;

   vl_csc_get_matrix
   (
      context_priv->color_standard,
      &context_priv->procamp, true, &csc
   );
   vl_compositor_set_csc_matrix(&context_priv->cstate, (const vl_csc_matrix *)&csc);

   XVMC_MSG(XVMC_TRACE, "[XvMC] Set attribute %s to value %d.\n", attr, value);

   return Success;
}
Пример #4
0
static OMX_ERRORTYPE h264d_prc_allocate_resources(void *ap_obj, OMX_U32 a_pid)
{
   vid_dec_PrivateType*priv = ap_obj;
   struct pipe_screen *screen;
   vl_csc_matrix csc;

   assert (priv);

   priv->screen = omx_get_screen();
   if (!priv->screen)
      return OMX_ErrorInsufficientResources;

   screen = priv->screen->pscreen;
   priv->pipe = screen->context_create(screen, priv->screen, 0);
   if (!priv->pipe)
      return OMX_ErrorInsufficientResources;

   if (!vl_compositor_init(&priv->compositor, priv->pipe)) {
      priv->pipe->destroy(priv->pipe);
      priv->pipe = NULL;
      return OMX_ErrorInsufficientResources;
   }

   if (!vl_compositor_init_state(&priv->cstate, priv->pipe)) {
      vl_compositor_cleanup(&priv->compositor);
      priv->pipe->destroy(priv->pipe);
      priv->pipe = NULL;
      return OMX_ErrorInsufficientResources;
   }

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, true, &csc);
   if (!vl_compositor_set_csc_matrix(&priv->cstate, (const vl_csc_matrix *)&csc, 1.0f, 0.0f)) {
      vl_compositor_cleanup(&priv->compositor);
      priv->pipe->destroy(priv->pipe);
      priv->pipe = NULL;
      return OMX_ErrorInsufficientResources;
   }

   LIST_INITHEAD(&priv->codec_data.h264.dpb_list);

   priv->video_buffer_map = util_hash_table_create(handle_hash, handle_compare);

   return OMX_ErrorNone;
}
Пример #5
0
PUBLIC VAStatus
VA_DRIVER_INIT_FUNC(VADriverContextP ctx)
{
   vlVaDriver *drv;
   struct drm_state *drm_info;

   if (!ctx)
      return VA_STATUS_ERROR_INVALID_CONTEXT;

   drv = CALLOC(1, sizeof(vlVaDriver));
   if (!drv)
      return VA_STATUS_ERROR_ALLOCATION_FAILED;

   switch (ctx->display_type) {
   case VA_DISPLAY_ANDROID:
      FREE(drv);
      return VA_STATUS_ERROR_UNIMPLEMENTED;
   case VA_DISPLAY_GLX:
   case VA_DISPLAY_X11:
#if defined(HAVE_DRI3)
      drv->vscreen = vl_dri3_screen_create(ctx->native_dpy, ctx->x11_screen);
#endif
      if (!drv->vscreen)
         drv->vscreen = vl_dri2_screen_create(ctx->native_dpy, ctx->x11_screen);
      if (!drv->vscreen)
         goto error_screen;
      break;
   case VA_DISPLAY_WAYLAND:
   case VA_DISPLAY_DRM:
   case VA_DISPLAY_DRM_RENDERNODES: {
      drm_info = (struct drm_state *) ctx->drm_state;

      if (!drm_info || drm_info->fd < 0) {
         FREE(drv);
         return VA_STATUS_ERROR_INVALID_PARAMETER;
      }

      drv->vscreen = vl_drm_screen_create(drm_info->fd);
      if (!drv->vscreen)
         goto error_screen;
      }
      break;
   default:
      FREE(drv);
      return VA_STATUS_ERROR_INVALID_DISPLAY;
   }

   drv->pipe = drv->vscreen->pscreen->context_create(drv->vscreen->pscreen,
                                                     drv->vscreen, 0);
   if (!drv->pipe)
      goto error_pipe;

   drv->htab = handle_table_create();
   if (!drv->htab)
      goto error_htab;

   if (!vl_compositor_init(&drv->compositor, drv->pipe))
      goto error_compositor;
   if (!vl_compositor_init_state(&drv->cstate, drv->pipe))
      goto error_compositor_state;

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, true, &drv->csc);
   if (!vl_compositor_set_csc_matrix(&drv->cstate, (const vl_csc_matrix *)&drv->csc, 1.0f, 0.0f))
      goto error_csc_matrix;
   pipe_mutex_init(drv->mutex);

   ctx->pDriverData = (void *)drv;
   ctx->version_major = 0;
   ctx->version_minor = 1;
   *ctx->vtable = vtable;
   *ctx->vtable_vpp = vtable_vpp;
   ctx->max_profiles = PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH - PIPE_VIDEO_PROFILE_UNKNOWN;
   ctx->max_entrypoints = 1;
   ctx->max_attributes = 1;
   ctx->max_image_formats = VL_VA_MAX_IMAGE_FORMATS;
   ctx->max_subpic_formats = 1;
   ctx->max_display_attributes = 1;
   ctx->str_vendor = "mesa gallium vaapi";

   return VA_STATUS_SUCCESS;

error_csc_matrix:
   vl_compositor_cleanup_state(&drv->cstate);

error_compositor_state:
   vl_compositor_cleanup(&drv->compositor);

error_compositor:
   handle_table_destroy(drv->htab);

error_htab:
   drv->pipe->destroy(drv->pipe);

error_pipe:
   drv->vscreen->destroy(drv->vscreen);

error_screen:
   FREE(drv);
   return VA_STATUS_ERROR_ALLOCATION_FAILED;
}
/**
 * Copy image data from application memory in a specific YCbCr format to
 * a VdpOutputSurface.
 */
VdpStatus
vlVdpOutputSurfacePutBitsYCbCr(VdpOutputSurface surface,
                               VdpYCbCrFormat source_ycbcr_format,
                               void const *const *source_data,
                               uint32_t const *source_pitches,
                               VdpRect const *destination_rect,
                               VdpCSCMatrix const *csc_matrix)
{
   vlVdpOutputSurface *vlsurface;
   struct vl_compositor *compositor;
   struct vl_compositor_state *cstate;

   struct pipe_context *pipe;
   enum pipe_format format;
   struct pipe_video_buffer vtmpl, *vbuffer;
   struct u_rect dst_rect;
   struct pipe_sampler_view **sampler_views;

   unsigned i;

   vlsurface = vlGetDataHTAB(surface);
   if (!vlsurface)
      return VDP_STATUS_INVALID_HANDLE;


   pipe = vlsurface->device->context;
   compositor = &vlsurface->device->compositor;
   cstate = &vlsurface->cstate;

   format = FormatYCBCRToPipe(source_ycbcr_format);
   if (format == PIPE_FORMAT_NONE)
       return VDP_STATUS_INVALID_Y_CB_CR_FORMAT;

   if (!source_data || !source_pitches)
       return VDP_STATUS_INVALID_POINTER;

   pipe_mutex_lock(vlsurface->device->mutex);
   vlVdpResolveDelayedRendering(vlsurface->device, NULL, NULL);
   memset(&vtmpl, 0, sizeof(vtmpl));
   vtmpl.buffer_format = format;
   vtmpl.chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;

   if (destination_rect) {
      vtmpl.width = abs(destination_rect->x0-destination_rect->x1);
      vtmpl.height = abs(destination_rect->y0-destination_rect->y1);
   } else {
      vtmpl.width = vlsurface->surface->texture->width0;
      vtmpl.height = vlsurface->surface->texture->height0;
   }

   vbuffer = pipe->create_video_buffer(pipe, &vtmpl);
   if (!vbuffer) {
      pipe_mutex_unlock(vlsurface->device->mutex);
      return VDP_STATUS_RESOURCES;
   }

   sampler_views = vbuffer->get_sampler_view_planes(vbuffer);
   if (!sampler_views) {
      vbuffer->destroy(vbuffer);
      pipe_mutex_unlock(vlsurface->device->mutex);
      return VDP_STATUS_RESOURCES;
   }

   for (i = 0; i < 3; ++i) {
      struct pipe_sampler_view *sv = sampler_views[i];
      if (!sv) continue;

      struct pipe_box dst_box = {
         0, 0, 0,
         sv->texture->width0, sv->texture->height0, 1
      };

      pipe->transfer_inline_write(pipe, sv->texture, 0, PIPE_TRANSFER_WRITE, &dst_box,
                                  source_data[i], source_pitches[i], 0);
   }

   if (!csc_matrix) {
      vl_csc_matrix csc;
      vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, 1, &csc);
      vl_compositor_set_csc_matrix(cstate, (const vl_csc_matrix*)&csc);
   } else {
      vl_compositor_set_csc_matrix(cstate, csc_matrix);
   }

   vl_compositor_clear_layers(cstate);
   vl_compositor_set_buffer_layer(cstate, compositor, 0, vbuffer, NULL, NULL, VL_COMPOSITOR_WEAVE);
   vl_compositor_set_layer_dst_area(cstate, 0, RectToPipe(destination_rect, &dst_rect));
   vl_compositor_render(cstate, compositor, vlsurface->surface, NULL);

   vbuffer->destroy(vbuffer);
   pipe_mutex_unlock(vlsurface->device->mutex);

   return VDP_STATUS_OK;
}
Пример #7
0
/**
 * Set attribute values.
 */
VdpStatus
vlVdpVideoMixerSetAttributeValues(VdpVideoMixer mixer,
                                  uint32_t attribute_count,
                                  VdpVideoMixerAttribute const *attributes,
                                  void const *const *attribute_values)
{
   const VdpColor *background_color;
   union pipe_color_union color;
   const float *vdp_csc;
   float val;
   unsigned i;
   VdpStatus ret;

   if (!(attributes && attribute_values))
      return VDP_STATUS_INVALID_POINTER;

   vlVdpVideoMixer *vmixer = vlGetDataHTAB(mixer);
   if (!vmixer)
      return VDP_STATUS_INVALID_HANDLE;

   pipe_mutex_lock(vmixer->device->mutex);
   for (i = 0; i < attribute_count; ++i) {
      switch (attributes[i]) {
      case VDP_VIDEO_MIXER_ATTRIBUTE_BACKGROUND_COLOR:
         background_color = attribute_values[i];
         color.f[0] = background_color->red;
         color.f[1] = background_color->green;
         color.f[2] = background_color->blue;
         color.f[3] = background_color->alpha;
         vl_compositor_set_clear_color(&vmixer->cstate, &color);
         break;
      case VDP_VIDEO_MIXER_ATTRIBUTE_CSC_MATRIX:
         vdp_csc = attribute_values[i];
         vmixer->custom_csc = !!vdp_csc;
         if (!vdp_csc)
            vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, 1, &vmixer->csc);
         else
            memcpy(vmixer->csc, vdp_csc, sizeof(vl_csc_matrix));
         if (!debug_get_bool_option("G3DVL_NO_CSC", FALSE))
            vl_compositor_set_csc_matrix(&vmixer->cstate, (const vl_csc_matrix *)&vmixer->csc);
         break;

      case VDP_VIDEO_MIXER_ATTRIBUTE_NOISE_REDUCTION_LEVEL:

         val = *(float*)attribute_values[i];
         if (val < 0.f || val > 1.f) {
            ret = VDP_STATUS_INVALID_VALUE;
            goto fail;
         }

         vmixer->noise_reduction.level = val * 10;
         vlVdpVideoMixerUpdateNoiseReductionFilter(vmixer);
         break;

      case VDP_VIDEO_MIXER_ATTRIBUTE_LUMA_KEY_MIN_LUMA:
         val = *(float*)attribute_values[i];
         if (val < 0.f || val > 1.f) {
            ret = VDP_STATUS_INVALID_VALUE;
            goto fail;
         }
         vmixer->luma_key_min = val;
         break;
      case VDP_VIDEO_MIXER_ATTRIBUTE_LUMA_KEY_MAX_LUMA:
         val = *(float*)attribute_values[i];
         if (val < 0.f || val > 1.f) {
            ret = VDP_STATUS_INVALID_VALUE;
            goto fail;
         }
         vmixer->luma_key_max = val;
         break;

      case VDP_VIDEO_MIXER_ATTRIBUTE_SHARPNESS_LEVEL:

         val = *(float*)attribute_values[i];
         if (val < -1.f || val > 1.f) {
            ret = VDP_STATUS_INVALID_VALUE;
            goto fail;
         }

         vmixer->sharpness.value = val;
         vlVdpVideoMixerUpdateSharpnessFilter(vmixer);
         break;

      case VDP_VIDEO_MIXER_ATTRIBUTE_SKIP_CHROMA_DEINTERLACE:
         if (*(uint8_t*)attribute_values[i] > 1) {
            ret = VDP_STATUS_INVALID_VALUE;
            goto fail;
         }
         vmixer->skip_chroma_deint = *(uint8_t*)attribute_values[i];
         vlVdpVideoMixerUpdateDeinterlaceFilter(vmixer);
         break;
      default:
         ret = VDP_STATUS_INVALID_VIDEO_MIXER_ATTRIBUTE;
         goto fail;
      }
   }
   pipe_mutex_unlock(vmixer->device->mutex);

   return VDP_STATUS_OK;
fail:
   pipe_mutex_unlock(vmixer->device->mutex);
   return ret;
}
Пример #8
0
/**
 * Create a VdpVideoMixer.
 */
VdpStatus
vlVdpVideoMixerCreate(VdpDevice device,
                      uint32_t feature_count,
                      VdpVideoMixerFeature const *features,
                      uint32_t parameter_count,
                      VdpVideoMixerParameter const *parameters,
                      void const *const *parameter_values,
                      VdpVideoMixer *mixer)
{
   vlVdpVideoMixer *vmixer = NULL;
   VdpStatus ret;
   struct pipe_screen *screen;
   uint32_t max_2d_texture_level;
   unsigned max_size, i;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;
   screen = dev->vscreen->pscreen;

   vmixer = CALLOC(1, sizeof(vlVdpVideoMixer));
   if (!vmixer)
      return VDP_STATUS_RESOURCES;

   DeviceReference(&vmixer->device, dev);

   pipe_mutex_lock(dev->mutex);

   vl_compositor_init_state(&vmixer->cstate, dev->context);

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, true, &vmixer->csc);
   if (!debug_get_bool_option("G3DVL_NO_CSC", FALSE))
      vl_compositor_set_csc_matrix(&vmixer->cstate, (const vl_csc_matrix *)&vmixer->csc);

   *mixer = vlAddDataHTAB(vmixer);
   if (*mixer == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   ret = VDP_STATUS_INVALID_VIDEO_MIXER_FEATURE;
   for (i = 0; i < feature_count; ++i) {
      switch (features[i]) {
      /* they are valid, but we doesn't support them */
      case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L1:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L2:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L3:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L4:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L5:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L6:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L7:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L8:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L9:
      case VDP_VIDEO_MIXER_FEATURE_INVERSE_TELECINE:
      case VDP_VIDEO_MIXER_FEATURE_LUMA_KEY:
         break;

      case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL:
         vmixer->deint.supported = true;
         break;

      case VDP_VIDEO_MIXER_FEATURE_SHARPNESS:
         vmixer->sharpness.supported = true;
         break;

      case VDP_VIDEO_MIXER_FEATURE_NOISE_REDUCTION:
         vmixer->noise_reduction.supported = true;
         break;

      default: goto no_params;
      }
   }

   vmixer->chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;
   ret = VDP_STATUS_INVALID_VIDEO_MIXER_PARAMETER;
   for (i = 0; i < parameter_count; ++i) {
      switch (parameters[i]) {
      case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH:
         vmixer->video_width = *(uint32_t*)parameter_values[i];
         break;
      case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT:
         vmixer->video_height = *(uint32_t*)parameter_values[i];
         break;
      case VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE:
         vmixer->chroma_format = ChromaToPipe(*(VdpChromaType*)parameter_values[i]);
         break;
      case VDP_VIDEO_MIXER_PARAMETER_LAYERS:
         vmixer->max_layers = *(uint32_t*)parameter_values[i];
         break;
      default: goto no_params;
      }
   }
   ret = VDP_STATUS_INVALID_VALUE;
   if (vmixer->max_layers > 4) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] Max layers > 4 not supported\n", vmixer->max_layers);
      goto no_params;
   }

   max_2d_texture_level = screen->get_param(screen, PIPE_CAP_MAX_TEXTURE_2D_LEVELS);
   max_size = pow(2, max_2d_texture_level-1);
   if (vmixer->video_width < 48 || vmixer->video_width > max_size) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u not valid for width\n",
                vmixer->video_width, max_size);
      goto no_params;
   }
   if (vmixer->video_height < 48 || vmixer->video_height > max_size) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u  not valid for height\n",
                vmixer->video_height, max_size);
      goto no_params;
   }
   vmixer->luma_key_min = 0.f;
   vmixer->luma_key_max = 1.f;
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;

no_params:
   vlRemoveDataHTAB(*mixer);

no_handle:
   vl_compositor_cleanup_state(&vmixer->cstate);
   pipe_mutex_unlock(dev->mutex);
   DeviceReference(&vmixer->device, NULL);
   FREE(vmixer);
   return ret;
}
Пример #9
0
PUBLIC
Status XvMCCreateContext(Display *dpy, XvPortID port, int surface_type_id,
                         int width, int height, int flags, XvMCContext *context)
{
   bool found_port;
   int scrn = 0;
   int chroma_format = 0;
   int mc_type = 0;
   int surface_flags = 0;
   unsigned short subpic_max_w = 0;
   unsigned short subpic_max_h = 0;
   Status ret;
   struct vl_screen *vscreen;
   struct pipe_context *pipe;
   struct pipe_video_codec templat = {0};
   XvMCContextPrivate *context_priv;
   vl_csc_matrix csc;

   XVMC_MSG(XVMC_TRACE, "[XvMC] Creating context %p.\n", context);

   assert(dpy);

   if (!context)
      return XvMCBadContext;

   ret = Validate(dpy, port, surface_type_id, width, height, flags,
                  &found_port, &scrn, &chroma_format, &mc_type, &surface_flags,
                  &subpic_max_w, &subpic_max_h);

   /* Success and XvBadPort have the same value */
   if (ret != Success || !found_port)
      return ret;

   /* XXX: Current limits */
   if (chroma_format != XVMC_CHROMA_FORMAT_420) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Cannot decode requested surface type. Unsupported chroma format.\n");
      return BadImplementation;
   }
   if ((mc_type & ~XVMC_IDCT) != (XVMC_MOCOMP | XVMC_MPEG_2)) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Cannot decode requested surface type. Non-MPEG2/Mocomp/iDCT acceleration unsupported.\n");
      return BadImplementation;
   }
   if (surface_flags & XVMC_INTRA_UNSIGNED) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Cannot decode requested surface type. Unsigned intra unsupported.\n");
      return BadImplementation;
   }

   context_priv = CALLOC(1, sizeof(XvMCContextPrivate));
   if (!context_priv)
      return BadAlloc;

   /* TODO: Reuse screen if process creates another context */
   vscreen = vl_screen_create(dpy, scrn);

   if (!vscreen) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL screen.\n");
      FREE(context_priv);
      return BadAlloc;
   }

   pipe = vscreen->pscreen->context_create(vscreen->pscreen, vscreen, 0);
   if (!pipe) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL context.\n");
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }

   templat.profile = ProfileToPipe(mc_type);
   templat.entrypoint = (mc_type & XVMC_IDCT) ? PIPE_VIDEO_ENTRYPOINT_IDCT : PIPE_VIDEO_ENTRYPOINT_MC;
   templat.chroma_format = FormatToPipe(chroma_format);
   templat.width = width;
   templat.height = height;
   templat.max_references = 2;
   templat.expect_chunked_decode = true;

   context_priv->decoder = pipe->create_video_codec(pipe, &templat);

   if (!context_priv->decoder) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL decoder.\n");
      pipe->destroy(pipe);
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }

   if (!vl_compositor_init(&context_priv->compositor, pipe)) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL compositor.\n");
      context_priv->decoder->destroy(context_priv->decoder);
      pipe->destroy(pipe);
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }

   if (!vl_compositor_init_state(&context_priv->cstate, pipe)) {
      XVMC_MSG(XVMC_ERR, "[XvMC] Could not create VL compositor state.\n");
      vl_compositor_cleanup(&context_priv->compositor);
      context_priv->decoder->destroy(context_priv->decoder);
      pipe->destroy(pipe);
      vl_screen_destroy(vscreen);
      FREE(context_priv);
      return BadAlloc;
   }


   context_priv->color_standard =
      debug_get_bool_option("G3DVL_NO_CSC", FALSE) ?
      VL_CSC_COLOR_STANDARD_IDENTITY : VL_CSC_COLOR_STANDARD_BT_601;
   context_priv->procamp = vl_default_procamp;

   vl_csc_get_matrix
   (
      context_priv->color_standard,
      &context_priv->procamp, true, &csc
   );
   vl_compositor_set_csc_matrix(&context_priv->cstate, (const vl_csc_matrix *)&csc);

   context_priv->vscreen = vscreen;
   context_priv->pipe = pipe;
   context_priv->subpicture_max_width = subpic_max_w;
   context_priv->subpicture_max_height = subpic_max_h;

   context->context_id = XAllocID(dpy);
   context->surface_type_id = surface_type_id;
   context->width = width;
   context->height = height;
   context->flags = flags;
   context->port = port;
   context->privData = context_priv;

   SyncHandle();

   XVMC_MSG(XVMC_TRACE, "[XvMC] Context %p created.\n", context);

   return Success;
}
Пример #10
0
static bool
init_buffers(struct vl_compositor *c)
{
   struct fragment_shader_consts fsc;

   assert(c);
	
   /*
    * Create our vertex buffer and vertex buffer element
    * VB contains 4 vertices that render a quad covering the entire window
    * to display a rendered surface
    * Quad is rendered as a tri strip
    */
   c->vertex_bufs[0].stride = sizeof(struct vertex2f);
   c->vertex_bufs[0].max_index = 3;
   c->vertex_bufs[0].buffer_offset = 0;
   c->vertex_bufs[0].buffer = pipe_buffer_create
   (
      c->pipe->screen,
      1,
      PIPE_BUFFER_USAGE_VERTEX,
      sizeof(struct vertex2f) * 4
   );

   memcpy
   (
      pipe_buffer_map(c->pipe->screen, c->vertex_bufs[0].buffer, PIPE_BUFFER_USAGE_CPU_WRITE),
      surface_verts,
      sizeof(struct vertex2f) * 4
   );

   pipe_buffer_unmap(c->pipe->screen, c->vertex_bufs[0].buffer);

   c->vertex_elems[0].src_offset = 0;
   c->vertex_elems[0].vertex_buffer_index = 0;
   c->vertex_elems[0].nr_components = 2;
   c->vertex_elems[0].src_format = PIPE_FORMAT_R32G32_FLOAT;

   /*
    * Create our texcoord buffer and texcoord buffer element
    * Texcoord buffer contains the TCs for mapping the rendered surface to the 4 vertices
    */
   c->vertex_bufs[1].stride = sizeof(struct vertex2f);
   c->vertex_bufs[1].max_index = 3;
   c->vertex_bufs[1].buffer_offset = 0;
   c->vertex_bufs[1].buffer = pipe_buffer_create
   (
      c->pipe->screen,
      1,
      PIPE_BUFFER_USAGE_VERTEX,
      sizeof(struct vertex2f) * 4
   );

   memcpy
   (
      pipe_buffer_map(c->pipe->screen, c->vertex_bufs[1].buffer, PIPE_BUFFER_USAGE_CPU_WRITE),
      surface_texcoords,
      sizeof(struct vertex2f) * 4
   );

   pipe_buffer_unmap(c->pipe->screen, c->vertex_bufs[1].buffer);

   c->vertex_elems[1].src_offset = 0;
   c->vertex_elems[1].vertex_buffer_index = 1;
   c->vertex_elems[1].nr_components = 2;
   c->vertex_elems[1].src_format = PIPE_FORMAT_R32G32_FLOAT;

   /*
    * Create our vertex shader's constant buffer
    * Const buffer contains scaling and translation vectors
    */
   c->vs_const_buf.buffer = pipe_buffer_create
   (
      c->pipe->screen,
      1,
      PIPE_BUFFER_USAGE_CONSTANT | PIPE_BUFFER_USAGE_DISCARD,
      sizeof(struct vertex_shader_consts)
   );

   /*
    * Create our fragment shader's constant buffer
    * Const buffer contains the color conversion matrix and bias vectors
    */
   c->fs_const_buf.buffer = pipe_buffer_create
   (
      c->pipe->screen,
      1,
      PIPE_BUFFER_USAGE_CONSTANT,
      sizeof(struct fragment_shader_consts)
   );

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY, NULL, true, fsc.matrix);

   vl_compositor_set_csc_matrix(c, fsc.matrix);

   return true;
}