Exemple #1
0
/**
 * Create a VdpVideoMixer.
 */
VdpStatus
vlVdpVideoMixerCreate(VdpDevice device,
                      uint32_t feature_count,
                      VdpVideoMixerFeature const *features,
                      uint32_t parameter_count,
                      VdpVideoMixerParameter const *parameters,
                      void const *const *parameter_values,
                      VdpVideoMixer *mixer)
{
   vlVdpVideoMixer *vmixer = NULL;
   VdpStatus ret;
   struct pipe_screen *screen;
   uint32_t max_2d_texture_level;
   unsigned max_size, i;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;
   screen = dev->vscreen->pscreen;

   vmixer = CALLOC(1, sizeof(vlVdpVideoMixer));
   if (!vmixer)
      return VDP_STATUS_RESOURCES;

   DeviceReference(&vmixer->device, dev);

   pipe_mutex_lock(dev->mutex);

   vl_compositor_init_state(&vmixer->cstate, dev->context);

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, true, &vmixer->csc);
   if (!debug_get_bool_option("G3DVL_NO_CSC", FALSE))
      vl_compositor_set_csc_matrix(&vmixer->cstate, (const vl_csc_matrix *)&vmixer->csc);

   *mixer = vlAddDataHTAB(vmixer);
   if (*mixer == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   ret = VDP_STATUS_INVALID_VIDEO_MIXER_FEATURE;
   for (i = 0; i < feature_count; ++i) {
      switch (features[i]) {
      /* they are valid, but we doesn't support them */
      case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L1:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L2:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L3:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L4:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L5:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L6:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L7:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L8:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L9:
      case VDP_VIDEO_MIXER_FEATURE_INVERSE_TELECINE:
      case VDP_VIDEO_MIXER_FEATURE_LUMA_KEY:
         break;

      case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL:
         vmixer->deint.supported = true;
         break;

      case VDP_VIDEO_MIXER_FEATURE_SHARPNESS:
         vmixer->sharpness.supported = true;
         break;

      case VDP_VIDEO_MIXER_FEATURE_NOISE_REDUCTION:
         vmixer->noise_reduction.supported = true;
         break;

      default: goto no_params;
      }
   }

   vmixer->chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;
   ret = VDP_STATUS_INVALID_VIDEO_MIXER_PARAMETER;
   for (i = 0; i < parameter_count; ++i) {
      switch (parameters[i]) {
      case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH:
         vmixer->video_width = *(uint32_t*)parameter_values[i];
         break;
      case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT:
         vmixer->video_height = *(uint32_t*)parameter_values[i];
         break;
      case VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE:
         vmixer->chroma_format = ChromaToPipe(*(VdpChromaType*)parameter_values[i]);
         break;
      case VDP_VIDEO_MIXER_PARAMETER_LAYERS:
         vmixer->max_layers = *(uint32_t*)parameter_values[i];
         break;
      default: goto no_params;
      }
   }
   ret = VDP_STATUS_INVALID_VALUE;
   if (vmixer->max_layers > 4) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] Max layers > 4 not supported\n", vmixer->max_layers);
      goto no_params;
   }

   max_2d_texture_level = screen->get_param(screen, PIPE_CAP_MAX_TEXTURE_2D_LEVELS);
   max_size = pow(2, max_2d_texture_level-1);
   if (vmixer->video_width < 48 || vmixer->video_width > max_size) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u not valid for width\n",
                vmixer->video_width, max_size);
      goto no_params;
   }
   if (vmixer->video_height < 48 || vmixer->video_height > max_size) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u  not valid for height\n",
                vmixer->video_height, max_size);
      goto no_params;
   }
   vmixer->luma_key_min = 0.f;
   vmixer->luma_key_max = 1.f;
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;

no_params:
   vlRemoveDataHTAB(*mixer);

no_handle:
   vl_compositor_cleanup_state(&vmixer->cstate);
   pipe_mutex_unlock(dev->mutex);
   DeviceReference(&vmixer->device, NULL);
   FREE(vmixer);
   return ret;
}
Exemple #2
0
VAStatus
vlVaCreateSurfaces2(VADriverContextP ctx, unsigned int format,
                    unsigned int width, unsigned int height,
                    VASurfaceID *surfaces, unsigned int num_surfaces,
                    VASurfaceAttrib *attrib_list, unsigned int num_attribs)
{
    vlVaDriver *drv;
    VASurfaceAttribExternalBuffers *memory_attibute;
    struct pipe_video_buffer templat;
    struct pipe_screen *pscreen;
    int i;
    int memory_type;
    int expected_fourcc;
    VAStatus vaStatus;

    if (!ctx)
       return VA_STATUS_ERROR_INVALID_CONTEXT;

    if (!(width && height))
       return VA_STATUS_ERROR_INVALID_IMAGE_FORMAT;

    drv = VL_VA_DRIVER(ctx);

    if (!drv)
        return VA_STATUS_ERROR_INVALID_CONTEXT;

    pscreen = VL_VA_PSCREEN(ctx);

    if (!pscreen)
        return VA_STATUS_ERROR_INVALID_CONTEXT;

    /* Default. */
    memory_attibute = NULL;
    memory_type = VA_SURFACE_ATTRIB_MEM_TYPE_VA;
    expected_fourcc = 0;

    for (i = 0; i < num_attribs && attrib_list; i++) {
        if ((attrib_list[i].type == VASurfaceAttribPixelFormat) &&
            (attrib_list[i].flags & VA_SURFACE_ATTRIB_SETTABLE)) {
            if (attrib_list[i].value.type != VAGenericValueTypeInteger)
                return VA_STATUS_ERROR_INVALID_PARAMETER;
            expected_fourcc = attrib_list[i].value.value.i;
        }

        if ((attrib_list[i].type == VASurfaceAttribMemoryType) &&
            (attrib_list[i].flags & VA_SURFACE_ATTRIB_SETTABLE)) {

            if (attrib_list[i].value.type != VAGenericValueTypeInteger)
                return VA_STATUS_ERROR_INVALID_PARAMETER;

            switch (attrib_list[i].value.value.i) {
                case VA_SURFACE_ATTRIB_MEM_TYPE_VA:
                case VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME:
                   memory_type = attrib_list[i].value.value.i;
                   break;
                default:
                   return VA_STATUS_ERROR_UNSUPPORTED_MEMORY_TYPE;
            }
        }

        if ((attrib_list[i].type == VASurfaceAttribExternalBufferDescriptor) &&
            (attrib_list[i].flags == VA_SURFACE_ATTRIB_SETTABLE)) {
            if (attrib_list[i].value.type != VAGenericValueTypePointer)
                return VA_STATUS_ERROR_INVALID_PARAMETER;
            memory_attibute = (VASurfaceAttribExternalBuffers *)attrib_list[i].value.value.p;
        }
    }

    if (VA_RT_FORMAT_YUV420 != format &&
        VA_RT_FORMAT_YUV422 != format &&
        VA_RT_FORMAT_YUV444 != format &&
        VA_RT_FORMAT_RGB32  != format) {
        return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
    }

    switch (memory_type) {
        case VA_SURFACE_ATTRIB_MEM_TYPE_VA:
            break;
        case VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME:
            if (!memory_attibute)
               return VA_STATUS_ERROR_INVALID_PARAMETER;

            expected_fourcc = memory_attibute->pixel_format;
            break;
        default:
            assert(0);
    }

    memset(&templat, 0, sizeof(templat));

    if (expected_fourcc) {
       templat.buffer_format = VaFourccToPipeFormat(expected_fourcc);
       templat.interlaced = 0;
    } else {
        templat.buffer_format = pscreen->get_video_param
        (
           pscreen,
           PIPE_VIDEO_PROFILE_UNKNOWN,
           PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
           PIPE_VIDEO_CAP_PREFERED_FORMAT
        );
        templat.interlaced = pscreen->get_video_param
        (
           pscreen,
           PIPE_VIDEO_PROFILE_UNKNOWN,
           PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
           PIPE_VIDEO_CAP_PREFERS_INTERLACED
        );
    }

    templat.chroma_format = ChromaToPipe(format);

    templat.width = width;
    templat.height = height;

    memset(surfaces, VA_INVALID_ID, num_surfaces * sizeof(VASurfaceID));

    for (i = 0; i < num_surfaces; i++) {
        vlVaSurface *surf = CALLOC(1, sizeof(vlVaSurface));
        if (!surf)
            goto no_res;

        surf->templat = templat;

        switch (memory_type) {
            case VA_SURFACE_ATTRIB_MEM_TYPE_VA:
                surf->buffer = drv->pipe->create_video_buffer(drv->pipe, &templat);
                if (!surf->buffer)
                    goto no_res;
                util_dynarray_init(&surf->subpics);
                surfaces[i] = handle_table_add(drv->htab, surf);
                break;
            case VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME:
                vaStatus = suface_from_external_memory(ctx, surf, memory_attibute, i, surfaces, &templat);
                if (vaStatus != VA_STATUS_SUCCESS)
                  goto no_res;
                break;
            default:
                assert(0);
        }
    }

    return VA_STATUS_SUCCESS;

no_res:
   if (i)
      vlVaDestroySurfaces(ctx, surfaces, i);

   return VA_STATUS_ERROR_ALLOCATION_FAILED;
}
Exemple #3
0
/**
 * Create a VdpVideoSurface.
 */
VdpStatus
vlVdpVideoSurfaceCreate(VdpDevice device, VdpChromaType chroma_type,
                        uint32_t width, uint32_t height,
                        VdpVideoSurface *surface)
{
   struct pipe_context *pipe;
   vlVdpSurface *p_surf;
   VdpStatus ret;

   if (!(width && height)) {
      ret = VDP_STATUS_INVALID_SIZE;
      goto inv_size;
   }

   p_surf = CALLOC(1, sizeof(vlVdpSurface));
   if (!p_surf) {
      ret = VDP_STATUS_RESOURCES;
      goto no_res;
   }

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev) {
      ret = VDP_STATUS_INVALID_HANDLE;
      goto inv_device;
   }

   DeviceReference(&p_surf->device, dev);
   pipe = dev->context;

   pipe_mutex_lock(dev->mutex);
   memset(&p_surf->templat, 0, sizeof(p_surf->templat));
   p_surf->templat.buffer_format = pipe->screen->get_video_param
   (
      pipe->screen,
      PIPE_VIDEO_PROFILE_UNKNOWN,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_PREFERED_FORMAT
   );
   p_surf->templat.chroma_format = ChromaToPipe(chroma_type);
   p_surf->templat.width = width;
   p_surf->templat.height = height;
   p_surf->templat.interlaced = pipe->screen->get_video_param
   (
      pipe->screen,
      PIPE_VIDEO_PROFILE_UNKNOWN,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_PREFERS_INTERLACED
   );
   if (p_surf->templat.buffer_format != PIPE_FORMAT_NONE)
      p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);

   /* do not mandate early allocation of a video buffer */
   vlVdpVideoSurfaceClear(p_surf);
   pipe_mutex_unlock(dev->mutex);

   *surface = vlAddDataHTAB(p_surf);
   if (*surface == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   return VDP_STATUS_OK;

no_handle:
   p_surf->video_buffer->destroy(p_surf->video_buffer);

inv_device:
   DeviceReference(&p_surf->device, NULL);
   FREE(p_surf);

no_res:
inv_size:
   return ret;
}