/** * Query the implementation's VdpOutputSurface capabilities. */ VdpStatus vlVdpOutputSurfaceQueryCapabilities(VdpDevice device, VdpRGBAFormat surface_rgba_format, VdpBool *is_supported, uint32_t *max_width, uint32_t *max_height) { vlVdpDevice *dev; struct pipe_screen *pscreen; enum pipe_format format; dev = vlGetDataHTAB(device); if (!dev) return VDP_STATUS_INVALID_HANDLE; pscreen = dev->vscreen->pscreen; if (!pscreen) return VDP_STATUS_RESOURCES; format = FormatRGBAToPipe(surface_rgba_format); if (format == PIPE_FORMAT_NONE || format == PIPE_FORMAT_A8_UNORM) return VDP_STATUS_INVALID_RGBA_FORMAT; if (!(is_supported && max_width && max_height)) return VDP_STATUS_INVALID_POINTER; pipe_mutex_lock(dev->mutex); *is_supported = pscreen->is_format_supported ( pscreen, format, PIPE_TEXTURE_3D, 1, PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_RENDER_TARGET ); if (*is_supported) { uint32_t max_2d_texture_level = pscreen->get_param( pscreen, PIPE_CAP_MAX_TEXTURE_2D_LEVELS); if (!max_2d_texture_level) { pipe_mutex_unlock(dev->mutex); return VDP_STATUS_ERROR; } *max_width = *max_height = pow(2, max_2d_texture_level - 1); } else { *max_width = 0; *max_height = 0; } pipe_mutex_unlock(dev->mutex); return VDP_STATUS_OK; }
/** * Query the implementation's VdpDecoder capabilities. */ VdpStatus vlVdpDecoderQueryCapabilities(VdpDevice device, VdpDecoderProfile profile, VdpBool *is_supported, uint32_t *max_level, uint32_t *max_macroblocks, uint32_t *max_width, uint32_t *max_height) { vlVdpDevice *dev; struct pipe_screen *pscreen; enum pipe_video_profile p_profile; if (!(is_supported && max_level && max_macroblocks && max_width && max_height)) return VDP_STATUS_INVALID_POINTER; dev = vlGetDataHTAB(device); if (!dev) return VDP_STATUS_INVALID_HANDLE; pscreen = dev->vscreen->pscreen; if (!pscreen) return VDP_STATUS_RESOURCES; p_profile = ProfileToPipe(profile); if (p_profile == PIPE_VIDEO_PROFILE_UNKNOWN) { *is_supported = false; return VDP_STATUS_OK; } pipe_mutex_lock(dev->mutex); *is_supported = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_SUPPORTED); if (*is_supported) { *max_width = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_MAX_WIDTH); *max_height = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_MAX_HEIGHT); *max_level = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_MAX_LEVEL); *max_macroblocks = (*max_width/16)*(*max_height/16); } else { *max_width = 0; *max_height = 0; *max_level = 0; *max_macroblocks = 0; } pipe_mutex_unlock(dev->mutex); return VDP_STATUS_OK; }
/** * Retrieve the parameters used to create a VdpOutputSurface. */ VdpStatus vlVdpOutputSurfaceGetParameters(VdpOutputSurface surface, VdpRGBAFormat *rgba_format, uint32_t *width, uint32_t *height) { vlVdpOutputSurface *vlsurface; vlsurface = vlGetDataHTAB(surface); if (!vlsurface) return VDP_STATUS_INVALID_HANDLE; *rgba_format = PipeToFormatRGBA(vlsurface->sampler_view->texture->format); *width = vlsurface->sampler_view->texture->width0; *height = vlsurface->sampler_view->texture->height0; return VDP_STATUS_OK; }
/** * Interop to mesa state tracker */ struct pipe_video_buffer *vlVdpVideoSurfaceGallium(VdpVideoSurface surface) { vlVdpSurface *p_surf = vlGetDataHTAB(surface); if (!p_surf) return NULL; pipe_mutex_lock(p_surf->device->mutex); if (p_surf->video_buffer == NULL) { struct pipe_context *pipe = p_surf->device->context; /* try to create a video buffer if we don't already have one */ p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat); } pipe_mutex_unlock(p_surf->device->mutex); return p_surf->video_buffer; }
/** * Query the implementation's capability to perform a PutBits operation using * application data in a specific YCbCr/YUB format. */ VdpStatus vlVdpOutputSurfaceQueryPutBitsYCbCrCapabilities(VdpDevice device, VdpRGBAFormat surface_rgba_format, VdpYCbCrFormat bits_ycbcr_format, VdpBool *is_supported) { vlVdpDevice *dev; struct pipe_screen *pscreen; enum pipe_format rgba_format, ycbcr_format; dev = vlGetDataHTAB(device); if (!dev) return VDP_STATUS_INVALID_HANDLE; pscreen = dev->vscreen->pscreen; if (!pscreen) return VDP_STATUS_ERROR; rgba_format = VdpFormatRGBAToPipe(surface_rgba_format); if (rgba_format == PIPE_FORMAT_NONE || rgba_format == PIPE_FORMAT_A8_UNORM) return VDP_STATUS_INVALID_RGBA_FORMAT; ycbcr_format = FormatYCBCRToPipe(bits_ycbcr_format); if (ycbcr_format == PIPE_FORMAT_NONE) return VDP_STATUS_INVALID_INDEXED_FORMAT; if (!is_supported) return VDP_STATUS_INVALID_POINTER; pipe_mutex_lock(dev->mutex); *is_supported = pscreen->is_format_supported ( pscreen, rgba_format, PIPE_TEXTURE_2D, 1, PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_RENDER_TARGET ); *is_supported &= pscreen->is_video_format_supported ( pscreen, ycbcr_format, PIPE_VIDEO_PROFILE_UNKNOWN, PIPE_VIDEO_ENTRYPOINT_BITSTREAM ); pipe_mutex_unlock(dev->mutex); return VDP_STATUS_OK; }
/** * Destroy a VdpDecoder. */ VdpStatus vlVdpDecoderDestroy(VdpDecoder decoder) { vlVdpDecoder *vldecoder; vldecoder = (vlVdpDecoder *)vlGetDataHTAB(decoder); if (!vldecoder) return VDP_STATUS_INVALID_HANDLE; pipe_mutex_lock(vldecoder->device->mutex); vldecoder->decoder->destroy(vldecoder->decoder); pipe_mutex_unlock(vldecoder->device->mutex); vlRemoveDataHTAB(decoder); FREE(vldecoder); return VDP_STATUS_OK; }
/** * Retrieve the parameters used to create a VdpDecoder. */ VdpStatus vlVdpDecoderGetParameters(VdpDecoder decoder, VdpDecoderProfile *profile, uint32_t *width, uint32_t *height) { vlVdpDecoder *vldecoder; vldecoder = (vlVdpDecoder *)vlGetDataHTAB(decoder); if (!vldecoder) return VDP_STATUS_INVALID_HANDLE; *profile = PipeToProfile(vldecoder->decoder->profile); *width = vldecoder->decoder->width; *height = vldecoder->decoder->height; return VDP_STATUS_OK; }
VdpStatus vlVdpPresentationQueueDestroy(VdpPresentationQueue presentation_queue) { vlVdpPresentationQueue *pq; VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Destroying PresentationQueue\n"); pq = vlGetDataHTAB(presentation_queue); if (!pq) return VDP_STATUS_INVALID_HANDLE; vl_compositor_cleanup(&pq->compositor); vlRemoveDataHTAB(presentation_queue); FREE(pq); return VDP_STATUS_OK; }
/** * Destroy a VdpPresentationQueue. */ VdpStatus vlVdpPresentationQueueDestroy(VdpPresentationQueue presentation_queue) { vlVdpPresentationQueue *pq; pq = vlGetDataHTAB(presentation_queue); if (!pq) return VDP_STATUS_INVALID_HANDLE; pipe_mutex_lock(pq->device->mutex); vl_compositor_cleanup_state(&pq->cstate); pipe_mutex_unlock(pq->device->mutex); vlRemoveDataHTAB(presentation_queue); FREE(pq); return VDP_STATUS_OK; }
VdpStatus vlVdpPresentationQueueSetBackgroundColor(VdpPresentationQueue presentation_queue, VdpColor *const background_color) { vlVdpPresentationQueue *pq; VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Setting Background Color\n"); if (!background_color) return VDP_STATUS_INVALID_POINTER; pq = vlGetDataHTAB(presentation_queue); if (!pq) return VDP_STATUS_INVALID_HANDLE; vl_compositor_set_clear_color(&pq->compositor, (float*)background_color); return VDP_STATUS_OK; }
/** * Destroy a VdpVideoSurface. */ VdpStatus vlVdpVideoSurfaceDestroy(VdpVideoSurface surface) { vlVdpSurface *p_surf; p_surf = (vlVdpSurface *)vlGetDataHTAB((vlHandle)surface); if (!p_surf) return VDP_STATUS_INVALID_HANDLE; pipe_mutex_lock(p_surf->device->mutex); if (p_surf->video_buffer) p_surf->video_buffer->destroy(p_surf->video_buffer); pipe_mutex_unlock(p_surf->device->mutex); vlRemoveDataHTAB(surface); FREE(p_surf); return VDP_STATUS_OK; }
/** * Retrieve the presentation queue's "current" time. */ VdpStatus vlVdpPresentationQueueGetTime(VdpPresentationQueue presentation_queue, VdpTime *current_time) { vlVdpPresentationQueue *pq; if (!current_time) return VDP_STATUS_INVALID_POINTER; pq = vlGetDataHTAB(presentation_queue); if (!pq) return VDP_STATUS_INVALID_HANDLE; pipe_mutex_lock(pq->device->mutex); *current_time = vl_screen_get_timestamp(pq->device->vscreen, pq->drawable); pipe_mutex_unlock(pq->device->mutex); return VDP_STATUS_OK; }
/** * Destroy a VdpBitmapSurface. */ VdpStatus vlVdpBitmapSurfaceDestroy(VdpBitmapSurface surface) { vlVdpBitmapSurface *vlsurface; vlsurface = vlGetDataHTAB(surface); if (!vlsurface) return VDP_STATUS_INVALID_HANDLE; mtx_lock(&vlsurface->device->mutex); pipe_sampler_view_reference(&vlsurface->sampler_view, NULL); mtx_unlock(&vlsurface->device->mutex); vlRemoveDataHTAB(surface); DeviceReference(&vlsurface->device, NULL); FREE(vlsurface); return VDP_STATUS_OK; }
/** * Destroy a VdpOutputSurface. */ VdpStatus vlVdpOutputSurfaceDestroy(VdpOutputSurface surface) { vlVdpOutputSurface *vlsurface; VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Destroying output surface\n"); vlsurface = vlGetDataHTAB(surface); if (!vlsurface) return VDP_STATUS_INVALID_HANDLE; pipe_surface_reference(&vlsurface->surface, NULL); pipe_sampler_view_reference(&vlsurface->sampler_view, NULL); vlRemoveDataHTAB(surface); FREE(vlsurface); return VDP_STATUS_OK; }
/** * Query the implementation's supported for a specific parameter. */ VdpStatus vlVdpVideoMixerQueryParameterValueRange(VdpDevice device, VdpVideoMixerParameter parameter, void *min_value, void *max_value) { vlVdpDevice *dev = vlGetDataHTAB(device); struct pipe_screen *screen; if (!dev) return VDP_STATUS_INVALID_HANDLE; if (!(min_value && max_value)) return VDP_STATUS_INVALID_POINTER; pipe_mutex_lock(dev->mutex); screen = dev->vscreen->pscreen; switch (parameter) { case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH: *(uint32_t*)min_value = 48; *(uint32_t*)max_value = screen->get_video_param(screen, PIPE_VIDEO_PROFILE_UNKNOWN, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_MAX_WIDTH); break; case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT: *(uint32_t*)min_value = 48; *(uint32_t*)max_value = screen->get_video_param(screen, PIPE_VIDEO_PROFILE_UNKNOWN, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_MAX_HEIGHT); break; case VDP_VIDEO_MIXER_PARAMETER_LAYERS: *(uint32_t*)min_value = 0; *(uint32_t*)max_value = 4; break; case VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE: default: pipe_mutex_unlock(dev->mutex); return VDP_STATUS_INVALID_VIDEO_MIXER_PARAMETER; } pipe_mutex_unlock(dev->mutex); return VDP_STATUS_OK; }
static VdpStatus vlVdpGetReferenceFrame(VdpVideoSurface handle, struct pipe_video_buffer **ref_frame) { vlVdpSurface *surface; /* if surfaces equals VDP_STATUS_INVALID_HANDLE, they are not used */ if (handle == VDP_INVALID_HANDLE) { *ref_frame = NULL; return VDP_STATUS_OK; } surface = vlGetDataHTAB(handle); if (!surface) return VDP_STATUS_INVALID_HANDLE; *ref_frame = surface->video_buffer; if (!*ref_frame) return VDP_STATUS_INVALID_HANDLE; return VDP_STATUS_OK; }
/** * Copy image data from a VdpOutputSurface to application memory in the * surface's native format. */ VdpStatus vlVdpOutputSurfaceGetBitsNative(VdpOutputSurface surface, VdpRect const *source_rect, void *const *destination_data, uint32_t const *destination_pitches) { vlVdpOutputSurface *vlsurface; struct pipe_context *pipe; struct pipe_resource *res; struct pipe_box box; struct pipe_transfer *transfer; uint8_t *map; vlsurface = vlGetDataHTAB(surface); if (!vlsurface) return VDP_STATUS_INVALID_HANDLE; pipe = vlsurface->device->context; if (!pipe) return VDP_STATUS_INVALID_HANDLE; pipe_mutex_lock(vlsurface->device->mutex); vlVdpResolveDelayedRendering(vlsurface->device, NULL, NULL); res = vlsurface->sampler_view->texture; box = RectToPipeBox(source_rect, res); map = pipe->transfer_map(pipe, res, 0, PIPE_TRANSFER_READ, &box, &transfer); if (!map) { pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_RESOURCES; } util_copy_rect(*destination_data, res->format, *destination_pitches, 0, 0, box.width, box.height, map, transfer->stride, 0, 0); pipe_transfer_unmap(pipe, transfer); pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_OK; }
/** * Destroy a VdpVideoMixer. */ VdpStatus vlVdpVideoMixerDestroy(VdpVideoMixer mixer) { vlVdpVideoMixer *vmixer; vmixer = vlGetDataHTAB(mixer); if (!vmixer) return VDP_STATUS_INVALID_HANDLE; pipe_mutex_lock(vmixer->device->mutex); vlVdpResolveDelayedRendering(vmixer->device, NULL, NULL); vlRemoveDataHTAB(mixer); vl_compositor_cleanup_state(&vmixer->cstate); if (vmixer->deint.filter) { vl_deint_filter_cleanup(vmixer->deint.filter); FREE(vmixer->deint.filter); } if (vmixer->noise_reduction.filter) { vl_median_filter_cleanup(vmixer->noise_reduction.filter); FREE(vmixer->noise_reduction.filter); } if (vmixer->sharpness.filter) { vl_matrix_filter_cleanup(vmixer->sharpness.filter); FREE(vmixer->sharpness.filter); } pipe_mutex_unlock(vmixer->device->mutex); DeviceReference(&vmixer->device, NULL); FREE(vmixer); return VDP_STATUS_OK; }
/** * Retrieve the parameters used to create a VdpBitmapSurface. */ VdpStatus vlVdpBitmapSurfaceGetParameters(VdpBitmapSurface surface, VdpRGBAFormat *rgba_format, uint32_t *width, uint32_t *height, VdpBool *frequently_accessed) { vlVdpBitmapSurface *vlsurface; struct pipe_resource *res; vlsurface = vlGetDataHTAB(surface); if (!vlsurface) return VDP_STATUS_INVALID_HANDLE; if (!(rgba_format && width && height && frequently_accessed)) return VDP_STATUS_INVALID_POINTER; res = vlsurface->sampler_view->texture; *rgba_format = PipeToFormatRGBA(res->format); *width = res->width0; *height = res->height0; *frequently_accessed = res->usage == PIPE_USAGE_DYNAMIC; return VDP_STATUS_OK; }
/** * Retrieve the parameters used to create a VdpVideoSurface. */ VdpStatus vlVdpVideoSurfaceGetParameters(VdpVideoSurface surface, VdpChromaType *chroma_type, uint32_t *width, uint32_t *height) { if (!(width && height && chroma_type)) return VDP_STATUS_INVALID_POINTER; vlVdpSurface *p_surf = vlGetDataHTAB(surface); if (!p_surf) return VDP_STATUS_INVALID_HANDLE; if (p_surf->video_buffer) { *width = p_surf->video_buffer->width; *height = p_surf->video_buffer->height; *chroma_type = PipeToChroma(p_surf->video_buffer->chroma_format); } else { *width = p_surf->templat.width; *height = p_surf->templat.height; *chroma_type = PipeToChroma(p_surf->templat.chroma_format); } return VDP_STATUS_OK; }
/** * Perform a video post-processing and compositing operation. */ VdpStatus vlVdpVideoMixerRender(VdpVideoMixer mixer, VdpOutputSurface background_surface, VdpRect const *background_source_rect, VdpVideoMixerPictureStructure current_picture_structure, uint32_t video_surface_past_count, VdpVideoSurface const *video_surface_past, VdpVideoSurface video_surface_current, uint32_t video_surface_future_count, VdpVideoSurface const *video_surface_future, VdpRect const *video_source_rect, VdpOutputSurface destination_surface, VdpRect const *destination_rect, VdpRect const *destination_video_rect, uint32_t layer_count, VdpLayer const *layers) { enum vl_compositor_deinterlace deinterlace; struct u_rect rect, clip, *prect; unsigned i, layer = 0; struct pipe_video_buffer *video_buffer; vlVdpVideoMixer *vmixer; vlVdpSurface *surf; vlVdpOutputSurface *dst, *bg = NULL; struct vl_compositor *compositor; vmixer = vlGetDataHTAB(mixer); if (!vmixer) return VDP_STATUS_INVALID_HANDLE; compositor = &vmixer->device->compositor; surf = vlGetDataHTAB(video_surface_current); if (!surf) return VDP_STATUS_INVALID_HANDLE; video_buffer = surf->video_buffer; if (surf->device != vmixer->device) return VDP_STATUS_HANDLE_DEVICE_MISMATCH; if (vmixer->video_width > video_buffer->width || vmixer->video_height > video_buffer->height || vmixer->chroma_format != video_buffer->chroma_format) return VDP_STATUS_INVALID_SIZE; if (layer_count > vmixer->max_layers) return VDP_STATUS_INVALID_VALUE; dst = vlGetDataHTAB(destination_surface); if (!dst) return VDP_STATUS_INVALID_HANDLE; if (background_surface != VDP_INVALID_HANDLE) { bg = vlGetDataHTAB(background_surface); if (!bg) return VDP_STATUS_INVALID_HANDLE; } pipe_mutex_lock(vmixer->device->mutex); vlVdpResolveDelayedRendering(vmixer->device, NULL, NULL); vl_compositor_clear_layers(&vmixer->cstate); if (bg) vl_compositor_set_rgba_layer(&vmixer->cstate, compositor, layer++, bg->sampler_view, RectToPipe(background_source_rect, &rect), NULL, NULL); switch (current_picture_structure) { case VDP_VIDEO_MIXER_PICTURE_STRUCTURE_TOP_FIELD: deinterlace = VL_COMPOSITOR_BOB_TOP; break; case VDP_VIDEO_MIXER_PICTURE_STRUCTURE_BOTTOM_FIELD: deinterlace = VL_COMPOSITOR_BOB_BOTTOM; break; case VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME: deinterlace = VL_COMPOSITOR_WEAVE; break; default: pipe_mutex_unlock(vmixer->device->mutex); return VDP_STATUS_INVALID_VIDEO_MIXER_PICTURE_STRUCTURE; } if (deinterlace != VL_COMPOSITOR_WEAVE && vmixer->deint.enabled && video_surface_past_count > 1 && video_surface_future_count > 0) { vlVdpSurface *prevprev = vlGetDataHTAB(video_surface_past[1]); vlVdpSurface *prev = vlGetDataHTAB(video_surface_past[0]); vlVdpSurface *next = vlGetDataHTAB(video_surface_future[0]); if (prevprev && prev && next && vl_deint_filter_check_buffers(vmixer->deint.filter, prevprev->video_buffer, prev->video_buffer, surf->video_buffer, next->video_buffer)) { vl_deint_filter_render(vmixer->deint.filter, prevprev->video_buffer, prev->video_buffer, surf->video_buffer, next->video_buffer, deinterlace == VL_COMPOSITOR_BOB_BOTTOM); deinterlace = VL_COMPOSITOR_WEAVE; video_buffer = vmixer->deint.filter->video_buffer; } } prect = RectToPipe(video_source_rect, &rect); if (!prect) { rect.x0 = 0; rect.y0 = 0; rect.x1 = surf->templat.width; rect.y1 = surf->templat.height; prect = ▭ } vl_compositor_set_buffer_layer(&vmixer->cstate, compositor, layer, video_buffer, prect, NULL, deinterlace); vl_compositor_set_layer_dst_area(&vmixer->cstate, layer++, RectToPipe(destination_video_rect, &rect)); for (i = 0; i < layer_count; ++i) { vlVdpOutputSurface *src = vlGetDataHTAB(layers->source_surface); if (!src) { pipe_mutex_unlock(vmixer->device->mutex); return VDP_STATUS_INVALID_HANDLE; } assert(layers->struct_version == VDP_LAYER_VERSION); vl_compositor_set_rgba_layer(&vmixer->cstate, compositor, layer, src->sampler_view, RectToPipe(layers->source_rect, &rect), NULL, NULL); vl_compositor_set_layer_dst_area(&vmixer->cstate, layer++, RectToPipe(layers->destination_rect, &rect)); ++layers; } vl_compositor_set_dst_clip(&vmixer->cstate, RectToPipe(destination_rect, &clip)); if (!vmixer->noise_reduction.filter && !vmixer->sharpness.filter) vlVdpSave4DelayedRendering(vmixer->device, destination_surface, &vmixer->cstate); else { vl_compositor_render(&vmixer->cstate, compositor, dst->surface, &dst->dirty_area, true); /* applying the noise reduction after scaling is actually not very clever, but currently we should avoid to copy around the image data once more. */ if (vmixer->noise_reduction.filter) vl_median_filter_render(vmixer->noise_reduction.filter, dst->sampler_view, dst->surface); if (vmixer->sharpness.filter) vl_matrix_filter_render(vmixer->sharpness.filter, dst->sampler_view, dst->surface); } pipe_mutex_unlock(vmixer->device->mutex); return VDP_STATUS_OK; }
/** * Decode a compressed field/frame and render the result into a VdpVideoSurface. */ VdpStatus vlVdpDecoderRender(VdpDecoder decoder, VdpVideoSurface target, VdpPictureInfo const *picture_info, uint32_t bitstream_buffer_count, VdpBitstreamBuffer const *bitstream_buffers) { const void * buffers[bitstream_buffer_count + 1]; unsigned sizes[bitstream_buffer_count + 1]; vlVdpDecoder *vldecoder; vlVdpSurface *vlsurf; VdpStatus ret; struct pipe_screen *screen; struct pipe_video_codec *dec; bool buffer_support[2]; unsigned i; struct pipe_h264_sps sps = {}; struct pipe_h264_pps pps = { &sps }; union { struct pipe_picture_desc base; struct pipe_mpeg12_picture_desc mpeg12; struct pipe_mpeg4_picture_desc mpeg4; struct pipe_vc1_picture_desc vc1; struct pipe_h264_picture_desc h264; } desc; if (!(picture_info && bitstream_buffers)) return VDP_STATUS_INVALID_POINTER; vldecoder = (vlVdpDecoder *)vlGetDataHTAB(decoder); if (!vldecoder) return VDP_STATUS_INVALID_HANDLE; dec = vldecoder->decoder; screen = dec->context->screen; vlsurf = (vlVdpSurface *)vlGetDataHTAB(target); if (!vlsurf) return VDP_STATUS_INVALID_HANDLE; if (vlsurf->device != vldecoder->device) return VDP_STATUS_HANDLE_DEVICE_MISMATCH; if (vlsurf->video_buffer != NULL && vlsurf->video_buffer->chroma_format != dec->chroma_format) // TODO: Recreate decoder with correct chroma return VDP_STATUS_INVALID_CHROMA_TYPE; buffer_support[0] = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE); buffer_support[1] = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_SUPPORTS_INTERLACED); if (vlsurf->video_buffer == NULL || !screen->is_video_format_supported(screen, vlsurf->video_buffer->buffer_format, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM) || !buffer_support[vlsurf->video_buffer->interlaced]) { pipe_mutex_lock(vlsurf->device->mutex); /* destroy the old one */ if (vlsurf->video_buffer) vlsurf->video_buffer->destroy(vlsurf->video_buffer); /* set the buffer format to the prefered one */ vlsurf->templat.buffer_format = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_PREFERED_FORMAT); /* also set interlacing to decoders preferences */ vlsurf->templat.interlaced = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_PREFERS_INTERLACED); /* and recreate the video buffer */ vlsurf->video_buffer = dec->context->create_video_buffer(dec->context, &vlsurf->templat); /* still no luck? get me out of here... */ if (!vlsurf->video_buffer) { pipe_mutex_unlock(vlsurf->device->mutex); return VDP_STATUS_NO_IMPLEMENTATION; } vlVdpVideoSurfaceClear(vlsurf); pipe_mutex_unlock(vlsurf->device->mutex); } for (i = 0; i < bitstream_buffer_count; ++i) { buffers[i] = bitstream_buffers[i].bitstream; sizes[i] = bitstream_buffers[i].bitstream_bytes; } memset(&desc, 0, sizeof(desc)); desc.base.profile = dec->profile; switch (u_reduce_video_profile(dec->profile)) { case PIPE_VIDEO_FORMAT_MPEG12: ret = vlVdpDecoderRenderMpeg12(&desc.mpeg12, (VdpPictureInfoMPEG1Or2 *)picture_info); break; case PIPE_VIDEO_FORMAT_MPEG4: ret = vlVdpDecoderRenderMpeg4(&desc.mpeg4, (VdpPictureInfoMPEG4Part2 *)picture_info); break; case PIPE_VIDEO_FORMAT_VC1: if (dec->profile == PIPE_VIDEO_PROFILE_VC1_ADVANCED) vlVdpDecoderFixVC1Startcode(&bitstream_buffer_count, buffers, sizes); ret = vlVdpDecoderRenderVC1(&desc.vc1, (VdpPictureInfoVC1 *)picture_info); break; case PIPE_VIDEO_FORMAT_MPEG4_AVC: desc.h264.pps = &pps; ret = vlVdpDecoderRenderH264(&desc.h264, (VdpPictureInfoH264 *)picture_info); break; default: return VDP_STATUS_INVALID_DECODER_PROFILE; } if (ret != VDP_STATUS_OK) return ret; pipe_mutex_lock(vldecoder->mutex); dec->begin_frame(dec, vlsurf->video_buffer, &desc.base); dec->decode_bitstream(dec, vlsurf->video_buffer, &desc.base, bitstream_buffer_count, buffers, sizes); dec->end_frame(dec, vlsurf->video_buffer, &desc.base); pipe_mutex_unlock(vldecoder->mutex); return ret; }
/** * Create a VdpDecoder. */ VdpStatus vlVdpDecoderCreate(VdpDevice device, VdpDecoderProfile profile, uint32_t width, uint32_t height, uint32_t max_references, VdpDecoder *decoder) { struct pipe_video_codec templat = {}; struct pipe_context *pipe; struct pipe_screen *screen; vlVdpDevice *dev; vlVdpDecoder *vldecoder; VdpStatus ret; bool supported; uint32_t maxwidth, maxheight; if (!decoder) return VDP_STATUS_INVALID_POINTER; *decoder = 0; if (!(width && height)) return VDP_STATUS_INVALID_VALUE; templat.profile = ProfileToPipe(profile); if (templat.profile == PIPE_VIDEO_PROFILE_UNKNOWN) return VDP_STATUS_INVALID_DECODER_PROFILE; dev = vlGetDataHTAB(device); if (!dev) return VDP_STATUS_INVALID_HANDLE; pipe = dev->context; screen = dev->vscreen->pscreen; pipe_mutex_lock(dev->mutex); supported = screen->get_video_param ( screen, templat.profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_SUPPORTED ); if (!supported) { pipe_mutex_unlock(dev->mutex); return VDP_STATUS_INVALID_DECODER_PROFILE; } maxwidth = screen->get_video_param ( screen, templat.profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_MAX_WIDTH ); maxheight = screen->get_video_param ( screen, templat.profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_MAX_HEIGHT ); if (width > maxwidth || height > maxheight) { pipe_mutex_unlock(dev->mutex); return VDP_STATUS_INVALID_SIZE; } vldecoder = CALLOC(1,sizeof(vlVdpDecoder)); if (!vldecoder) { pipe_mutex_unlock(dev->mutex); return VDP_STATUS_RESOURCES; } DeviceReference(&vldecoder->device, dev); templat.entrypoint = PIPE_VIDEO_ENTRYPOINT_BITSTREAM; templat.chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420; templat.width = width; templat.height = height; templat.max_references = max_references; if (u_reduce_video_profile(templat.profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC) templat.level = u_get_h264_level(templat.width, templat.height, &templat.max_references); vldecoder->decoder = pipe->create_video_codec(pipe, &templat); if (!vldecoder->decoder) { ret = VDP_STATUS_ERROR; goto error_decoder; } *decoder = vlAddDataHTAB(vldecoder); if (*decoder == 0) { ret = VDP_STATUS_ERROR; goto error_handle; } pipe_mutex_init(vldecoder->mutex); pipe_mutex_unlock(dev->mutex); return VDP_STATUS_OK; error_handle: vldecoder->decoder->destroy(vldecoder->decoder); error_decoder: pipe_mutex_unlock(dev->mutex); DeviceReference(&vldecoder->device, NULL); FREE(vldecoder); return ret; }
/** * Create a VdpOutputSurface. */ VdpStatus vlVdpOutputSurfaceCreate(VdpDevice device, VdpRGBAFormat rgba_format, uint32_t width, uint32_t height, VdpOutputSurface *surface) { struct pipe_context *pipe; struct pipe_resource res_tmpl, *res; struct pipe_sampler_view sv_templ; struct pipe_surface surf_templ; vlVdpOutputSurface *vlsurface = NULL; if (!(width && height)) return VDP_STATUS_INVALID_SIZE; vlVdpDevice *dev = vlGetDataHTAB(device); if (!dev) return VDP_STATUS_INVALID_HANDLE; pipe = dev->context; if (!pipe) return VDP_STATUS_INVALID_HANDLE; vlsurface = CALLOC(1, sizeof(vlVdpOutputSurface)); if (!vlsurface) return VDP_STATUS_RESOURCES; vlsurface->device = dev; memset(&res_tmpl, 0, sizeof(res_tmpl)); res_tmpl.target = PIPE_TEXTURE_2D; res_tmpl.format = FormatRGBAToPipe(rgba_format); res_tmpl.width0 = width; res_tmpl.height0 = height; res_tmpl.depth0 = 1; res_tmpl.array_size = 1; res_tmpl.bind = PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_RENDER_TARGET; res_tmpl.usage = PIPE_USAGE_STATIC; pipe_mutex_lock(dev->mutex); res = pipe->screen->resource_create(pipe->screen, &res_tmpl); if (!res) { pipe_mutex_unlock(dev->mutex); FREE(dev); return VDP_STATUS_ERROR; } vlVdpDefaultSamplerViewTemplate(&sv_templ, res); vlsurface->sampler_view = pipe->create_sampler_view(pipe, res, &sv_templ); if (!vlsurface->sampler_view) { pipe_resource_reference(&res, NULL); pipe_mutex_unlock(dev->mutex); FREE(dev); return VDP_STATUS_ERROR; } memset(&surf_templ, 0, sizeof(surf_templ)); surf_templ.format = res->format; vlsurface->surface = pipe->create_surface(pipe, res, &surf_templ); if (!vlsurface->surface) { pipe_resource_reference(&res, NULL); pipe_mutex_unlock(dev->mutex); FREE(dev); return VDP_STATUS_ERROR; } *surface = vlAddDataHTAB(vlsurface); if (*surface == 0) { pipe_resource_reference(&res, NULL); pipe_mutex_unlock(dev->mutex); FREE(dev); return VDP_STATUS_ERROR; } pipe_resource_reference(&res, NULL); vl_compositor_init_state(&vlsurface->cstate, pipe); vl_compositor_reset_dirty_area(&vlsurface->dirty_area); pipe_mutex_unlock(dev->mutex); return VDP_STATUS_OK; }
/** * Copy image data from application memory in a specific YCbCr format to * a VdpOutputSurface. */ VdpStatus vlVdpOutputSurfacePutBitsYCbCr(VdpOutputSurface surface, VdpYCbCrFormat source_ycbcr_format, void const *const *source_data, uint32_t const *source_pitches, VdpRect const *destination_rect, VdpCSCMatrix const *csc_matrix) { vlVdpOutputSurface *vlsurface; struct vl_compositor *compositor; struct vl_compositor_state *cstate; struct pipe_context *pipe; enum pipe_format format; struct pipe_video_buffer vtmpl, *vbuffer; struct u_rect dst_rect; struct pipe_sampler_view **sampler_views; unsigned i; vlsurface = vlGetDataHTAB(surface); if (!vlsurface) return VDP_STATUS_INVALID_HANDLE; pipe = vlsurface->device->context; compositor = &vlsurface->device->compositor; cstate = &vlsurface->cstate; format = FormatYCBCRToPipe(source_ycbcr_format); if (format == PIPE_FORMAT_NONE) return VDP_STATUS_INVALID_Y_CB_CR_FORMAT; if (!source_data || !source_pitches) return VDP_STATUS_INVALID_POINTER; pipe_mutex_lock(vlsurface->device->mutex); vlVdpResolveDelayedRendering(vlsurface->device, NULL, NULL); memset(&vtmpl, 0, sizeof(vtmpl)); vtmpl.buffer_format = format; vtmpl.chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420; if (destination_rect) { vtmpl.width = abs(destination_rect->x0-destination_rect->x1); vtmpl.height = abs(destination_rect->y0-destination_rect->y1); } else { vtmpl.width = vlsurface->surface->texture->width0; vtmpl.height = vlsurface->surface->texture->height0; } vbuffer = pipe->create_video_buffer(pipe, &vtmpl); if (!vbuffer) { pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_RESOURCES; } sampler_views = vbuffer->get_sampler_view_planes(vbuffer); if (!sampler_views) { vbuffer->destroy(vbuffer); pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_RESOURCES; } for (i = 0; i < 3; ++i) { struct pipe_sampler_view *sv = sampler_views[i]; if (!sv) continue; struct pipe_box dst_box = { 0, 0, 0, sv->texture->width0, sv->texture->height0, 1 }; pipe->transfer_inline_write(pipe, sv->texture, 0, PIPE_TRANSFER_WRITE, &dst_box, source_data[i], source_pitches[i], 0); } if (!csc_matrix) { vl_csc_matrix csc; vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, 1, &csc); vl_compositor_set_csc_matrix(cstate, (const vl_csc_matrix*)&csc); } else { vl_compositor_set_csc_matrix(cstate, csc_matrix); } vl_compositor_clear_layers(cstate); vl_compositor_set_buffer_layer(cstate, compositor, 0, vbuffer, NULL, NULL, VL_COMPOSITOR_WEAVE); vl_compositor_set_layer_dst_area(cstate, 0, RectToPipe(destination_rect, &dst_rect)); vl_compositor_render(cstate, compositor, vlsurface->surface, NULL); vbuffer->destroy(vbuffer); pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_OK; }
/** * Copy image data from application memory in a specific indexed format to * a VdpOutputSurface. */ VdpStatus vlVdpOutputSurfacePutBitsIndexed(VdpOutputSurface surface, VdpIndexedFormat source_indexed_format, void const *const *source_data, uint32_t const *source_pitch, VdpRect const *destination_rect, VdpColorTableFormat color_table_format, void const *color_table) { vlVdpOutputSurface *vlsurface; struct pipe_context *context; struct vl_compositor *compositor; struct vl_compositor_state *cstate; enum pipe_format index_format; enum pipe_format colortbl_format; struct pipe_resource *res, res_tmpl; struct pipe_sampler_view sv_tmpl; struct pipe_sampler_view *sv_idx = NULL, *sv_tbl = NULL; struct pipe_box box; struct u_rect dst_rect; vlsurface = vlGetDataHTAB(surface); if (!vlsurface) return VDP_STATUS_INVALID_HANDLE; context = vlsurface->device->context; compositor = &vlsurface->device->compositor; cstate = &vlsurface->cstate; index_format = FormatIndexedToPipe(source_indexed_format); if (index_format == PIPE_FORMAT_NONE) return VDP_STATUS_INVALID_INDEXED_FORMAT; if (!source_data || !source_pitch) return VDP_STATUS_INVALID_POINTER; colortbl_format = FormatColorTableToPipe(color_table_format); if (colortbl_format == PIPE_FORMAT_NONE) return VDP_STATUS_INVALID_COLOR_TABLE_FORMAT; if (!color_table) return VDP_STATUS_INVALID_POINTER; memset(&res_tmpl, 0, sizeof(res_tmpl)); res_tmpl.target = PIPE_TEXTURE_2D; res_tmpl.format = index_format; if (destination_rect) { res_tmpl.width0 = abs(destination_rect->x0-destination_rect->x1); res_tmpl.height0 = abs(destination_rect->y0-destination_rect->y1); } else { res_tmpl.width0 = vlsurface->surface->texture->width0; res_tmpl.height0 = vlsurface->surface->texture->height0; } res_tmpl.depth0 = 1; res_tmpl.array_size = 1; res_tmpl.usage = PIPE_USAGE_STAGING; res_tmpl.bind = PIPE_BIND_SAMPLER_VIEW; pipe_mutex_lock(vlsurface->device->mutex); vlVdpResolveDelayedRendering(vlsurface->device, NULL, NULL); res = context->screen->resource_create(context->screen, &res_tmpl); if (!res) goto error_resource; box.x = box.y = box.z = 0; box.width = res->width0; box.height = res->height0; box.depth = res->depth0; context->transfer_inline_write(context, res, 0, PIPE_TRANSFER_WRITE, &box, source_data[0], source_pitch[0], source_pitch[0] * res->height0); memset(&sv_tmpl, 0, sizeof(sv_tmpl)); u_sampler_view_default_template(&sv_tmpl, res, res->format); sv_idx = context->create_sampler_view(context, res, &sv_tmpl); pipe_resource_reference(&res, NULL); if (!sv_idx) goto error_resource; memset(&res_tmpl, 0, sizeof(res_tmpl)); res_tmpl.target = PIPE_TEXTURE_1D; res_tmpl.format = colortbl_format; res_tmpl.width0 = 1 << util_format_get_component_bits( index_format, UTIL_FORMAT_COLORSPACE_RGB, 0); res_tmpl.height0 = 1; res_tmpl.depth0 = 1; res_tmpl.array_size = 1; res_tmpl.usage = PIPE_USAGE_STAGING; res_tmpl.bind = PIPE_BIND_SAMPLER_VIEW; res = context->screen->resource_create(context->screen, &res_tmpl); if (!res) goto error_resource; box.x = box.y = box.z = 0; box.width = res->width0; box.height = res->height0; box.depth = res->depth0; context->transfer_inline_write(context, res, 0, PIPE_TRANSFER_WRITE, &box, color_table, util_format_get_stride(colortbl_format, res->width0), 0); memset(&sv_tmpl, 0, sizeof(sv_tmpl)); u_sampler_view_default_template(&sv_tmpl, res, res->format); sv_tbl = context->create_sampler_view(context, res, &sv_tmpl); pipe_resource_reference(&res, NULL); if (!sv_tbl) goto error_resource; vl_compositor_clear_layers(cstate); vl_compositor_set_palette_layer(cstate, compositor, 0, sv_idx, sv_tbl, NULL, NULL, false); vl_compositor_set_layer_dst_area(cstate, 0, RectToPipe(destination_rect, &dst_rect)); vl_compositor_render(cstate, compositor, vlsurface->surface, NULL); pipe_sampler_view_reference(&sv_idx, NULL); pipe_sampler_view_reference(&sv_tbl, NULL); pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_OK; error_resource: pipe_sampler_view_reference(&sv_idx, NULL); pipe_sampler_view_reference(&sv_tbl, NULL); pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_RESOURCES; }
VdpStatus vlVdpVideoSurfaceDMABuf(VdpVideoSurface surface, VdpVideoSurfacePlane plane, struct VdpSurfaceDMABufDesc *result) { vlVdpSurface *p_surf = vlGetDataHTAB(surface); struct pipe_screen *pscreen; struct winsys_handle whandle; struct pipe_surface *surf; if (!p_surf) return VDP_STATUS_INVALID_HANDLE; if (plane > 3) return VDP_STATUS_INVALID_VALUE; if (!result) return VDP_STATUS_INVALID_POINTER; memset(result, 0, sizeof(*result)); result->handle = -1; pipe_mutex_lock(p_surf->device->mutex); if (p_surf->video_buffer == NULL) { struct pipe_context *pipe = p_surf->device->context; /* try to create a video buffer if we don't already have one */ p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat); } /* Check if surface match interop requirements */ if (p_surf->video_buffer == NULL || !p_surf->video_buffer->interlaced || p_surf->video_buffer->buffer_format != PIPE_FORMAT_NV12) { pipe_mutex_unlock(p_surf->device->mutex); return VDP_STATUS_NO_IMPLEMENTATION; } surf = p_surf->video_buffer->get_surfaces(p_surf->video_buffer)[plane]; pipe_mutex_unlock(p_surf->device->mutex); if (!surf) return VDP_STATUS_RESOURCES; memset(&whandle, 0, sizeof(struct winsys_handle)); whandle.type = DRM_API_HANDLE_TYPE_FD; whandle.layer = surf->u.tex.first_layer; pscreen = surf->texture->screen; if (!pscreen->resource_get_handle(pscreen, surf->texture, &whandle, PIPE_HANDLE_USAGE_READ_WRITE)) return VDP_STATUS_NO_IMPLEMENTATION; result->handle = whandle.handle; result->width = surf->width; result->height = surf->height; result->offset = whandle.offset; result->stride = whandle.stride; if (surf->format == PIPE_FORMAT_R8_UNORM) result->format = VDP_RGBA_FORMAT_R8; else result->format = VDP_RGBA_FORMAT_R8G8; return VDP_STATUS_OK; }
/** * Create a VdpVideoSurface. */ VdpStatus vlVdpVideoSurfaceCreate(VdpDevice device, VdpChromaType chroma_type, uint32_t width, uint32_t height, VdpVideoSurface *surface) { struct pipe_context *pipe; vlVdpSurface *p_surf; VdpStatus ret; if (!(width && height)) { ret = VDP_STATUS_INVALID_SIZE; goto inv_size; } p_surf = CALLOC(1, sizeof(vlVdpSurface)); if (!p_surf) { ret = VDP_STATUS_RESOURCES; goto no_res; } vlVdpDevice *dev = vlGetDataHTAB(device); if (!dev) { ret = VDP_STATUS_INVALID_HANDLE; goto inv_device; } DeviceReference(&p_surf->device, dev); pipe = dev->context; pipe_mutex_lock(dev->mutex); memset(&p_surf->templat, 0, sizeof(p_surf->templat)); p_surf->templat.buffer_format = pipe->screen->get_video_param ( pipe->screen, PIPE_VIDEO_PROFILE_UNKNOWN, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_PREFERED_FORMAT ); p_surf->templat.chroma_format = ChromaToPipe(chroma_type); p_surf->templat.width = width; p_surf->templat.height = height; p_surf->templat.interlaced = pipe->screen->get_video_param ( pipe->screen, PIPE_VIDEO_PROFILE_UNKNOWN, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_PREFERS_INTERLACED ); if (p_surf->templat.buffer_format != PIPE_FORMAT_NONE) p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat); /* do not mandate early allocation of a video buffer */ vlVdpVideoSurfaceClear(p_surf); pipe_mutex_unlock(dev->mutex); *surface = vlAddDataHTAB(p_surf); if (*surface == 0) { ret = VDP_STATUS_ERROR; goto no_handle; } return VDP_STATUS_OK; no_handle: p_surf->video_buffer->destroy(p_surf->video_buffer); inv_device: DeviceReference(&p_surf->device, NULL); FREE(p_surf); no_res: inv_size: return ret; }
/** * Set attribute values. */ VdpStatus vlVdpVideoMixerSetAttributeValues(VdpVideoMixer mixer, uint32_t attribute_count, VdpVideoMixerAttribute const *attributes, void const *const *attribute_values) { const VdpColor *background_color; union pipe_color_union color; const float *vdp_csc; float val; unsigned i; VdpStatus ret; if (!(attributes && attribute_values)) return VDP_STATUS_INVALID_POINTER; vlVdpVideoMixer *vmixer = vlGetDataHTAB(mixer); if (!vmixer) return VDP_STATUS_INVALID_HANDLE; pipe_mutex_lock(vmixer->device->mutex); for (i = 0; i < attribute_count; ++i) { switch (attributes[i]) { case VDP_VIDEO_MIXER_ATTRIBUTE_BACKGROUND_COLOR: background_color = attribute_values[i]; color.f[0] = background_color->red; color.f[1] = background_color->green; color.f[2] = background_color->blue; color.f[3] = background_color->alpha; vl_compositor_set_clear_color(&vmixer->cstate, &color); break; case VDP_VIDEO_MIXER_ATTRIBUTE_CSC_MATRIX: vdp_csc = attribute_values[i]; vmixer->custom_csc = !!vdp_csc; if (!vdp_csc) vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, 1, &vmixer->csc); else memcpy(vmixer->csc, vdp_csc, sizeof(vl_csc_matrix)); if (!debug_get_bool_option("G3DVL_NO_CSC", FALSE)) vl_compositor_set_csc_matrix(&vmixer->cstate, (const vl_csc_matrix *)&vmixer->csc); break; case VDP_VIDEO_MIXER_ATTRIBUTE_NOISE_REDUCTION_LEVEL: val = *(float*)attribute_values[i]; if (val < 0.f || val > 1.f) { ret = VDP_STATUS_INVALID_VALUE; goto fail; } vmixer->noise_reduction.level = val * 10; vlVdpVideoMixerUpdateNoiseReductionFilter(vmixer); break; case VDP_VIDEO_MIXER_ATTRIBUTE_LUMA_KEY_MIN_LUMA: val = *(float*)attribute_values[i]; if (val < 0.f || val > 1.f) { ret = VDP_STATUS_INVALID_VALUE; goto fail; } vmixer->luma_key_min = val; break; case VDP_VIDEO_MIXER_ATTRIBUTE_LUMA_KEY_MAX_LUMA: val = *(float*)attribute_values[i]; if (val < 0.f || val > 1.f) { ret = VDP_STATUS_INVALID_VALUE; goto fail; } vmixer->luma_key_max = val; break; case VDP_VIDEO_MIXER_ATTRIBUTE_SHARPNESS_LEVEL: val = *(float*)attribute_values[i]; if (val < -1.f || val > 1.f) { ret = VDP_STATUS_INVALID_VALUE; goto fail; } vmixer->sharpness.value = val; vlVdpVideoMixerUpdateSharpnessFilter(vmixer); break; case VDP_VIDEO_MIXER_ATTRIBUTE_SKIP_CHROMA_DEINTERLACE: if (*(uint8_t*)attribute_values[i] > 1) { ret = VDP_STATUS_INVALID_VALUE; goto fail; } vmixer->skip_chroma_deint = *(uint8_t*)attribute_values[i]; vlVdpVideoMixerUpdateDeinterlaceFilter(vmixer); break; default: ret = VDP_STATUS_INVALID_VIDEO_MIXER_ATTRIBUTE; goto fail; } } pipe_mutex_unlock(vmixer->device->mutex); return VDP_STATUS_OK; fail: pipe_mutex_unlock(vmixer->device->mutex); return ret; }
/** * Create a VdpVideoMixer. */ VdpStatus vlVdpVideoMixerCreate(VdpDevice device, uint32_t feature_count, VdpVideoMixerFeature const *features, uint32_t parameter_count, VdpVideoMixerParameter const *parameters, void const *const *parameter_values, VdpVideoMixer *mixer) { vlVdpVideoMixer *vmixer = NULL; VdpStatus ret; struct pipe_screen *screen; uint32_t max_2d_texture_level; unsigned max_size, i; vlVdpDevice *dev = vlGetDataHTAB(device); if (!dev) return VDP_STATUS_INVALID_HANDLE; screen = dev->vscreen->pscreen; vmixer = CALLOC(1, sizeof(vlVdpVideoMixer)); if (!vmixer) return VDP_STATUS_RESOURCES; DeviceReference(&vmixer->device, dev); pipe_mutex_lock(dev->mutex); vl_compositor_init_state(&vmixer->cstate, dev->context); vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, true, &vmixer->csc); if (!debug_get_bool_option("G3DVL_NO_CSC", FALSE)) vl_compositor_set_csc_matrix(&vmixer->cstate, (const vl_csc_matrix *)&vmixer->csc); *mixer = vlAddDataHTAB(vmixer); if (*mixer == 0) { ret = VDP_STATUS_ERROR; goto no_handle; } ret = VDP_STATUS_INVALID_VIDEO_MIXER_FEATURE; for (i = 0; i < feature_count; ++i) { switch (features[i]) { /* they are valid, but we doesn't support them */ case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL: case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L1: case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L2: case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L3: case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L4: case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L5: case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L6: case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L7: case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L8: case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L9: case VDP_VIDEO_MIXER_FEATURE_INVERSE_TELECINE: case VDP_VIDEO_MIXER_FEATURE_LUMA_KEY: break; case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL: vmixer->deint.supported = true; break; case VDP_VIDEO_MIXER_FEATURE_SHARPNESS: vmixer->sharpness.supported = true; break; case VDP_VIDEO_MIXER_FEATURE_NOISE_REDUCTION: vmixer->noise_reduction.supported = true; break; default: goto no_params; } } vmixer->chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420; ret = VDP_STATUS_INVALID_VIDEO_MIXER_PARAMETER; for (i = 0; i < parameter_count; ++i) { switch (parameters[i]) { case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH: vmixer->video_width = *(uint32_t*)parameter_values[i]; break; case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT: vmixer->video_height = *(uint32_t*)parameter_values[i]; break; case VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE: vmixer->chroma_format = ChromaToPipe(*(VdpChromaType*)parameter_values[i]); break; case VDP_VIDEO_MIXER_PARAMETER_LAYERS: vmixer->max_layers = *(uint32_t*)parameter_values[i]; break; default: goto no_params; } } ret = VDP_STATUS_INVALID_VALUE; if (vmixer->max_layers > 4) { VDPAU_MSG(VDPAU_WARN, "[VDPAU] Max layers > 4 not supported\n", vmixer->max_layers); goto no_params; } max_2d_texture_level = screen->get_param(screen, PIPE_CAP_MAX_TEXTURE_2D_LEVELS); max_size = pow(2, max_2d_texture_level-1); if (vmixer->video_width < 48 || vmixer->video_width > max_size) { VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u not valid for width\n", vmixer->video_width, max_size); goto no_params; } if (vmixer->video_height < 48 || vmixer->video_height > max_size) { VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u not valid for height\n", vmixer->video_height, max_size); goto no_params; } vmixer->luma_key_min = 0.f; vmixer->luma_key_max = 1.f; pipe_mutex_unlock(dev->mutex); return VDP_STATUS_OK; no_params: vlRemoveDataHTAB(*mixer); no_handle: vl_compositor_cleanup_state(&vmixer->cstate); pipe_mutex_unlock(dev->mutex); DeviceReference(&vmixer->device, NULL); FREE(vmixer); return ret; }