/** * Query the implementation's VdpVideoSurface GetBits/PutBits capabilities. */ VdpStatus vlVdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities(VdpDevice device, VdpChromaType surface_chroma_type, VdpYCbCrFormat bits_ycbcr_format, VdpBool *is_supported) { vlVdpDevice *dev; struct pipe_screen *pscreen; if (!is_supported) return VDP_STATUS_INVALID_POINTER; dev = vlGetDataHTAB(device); if (!dev) return VDP_STATUS_INVALID_HANDLE; pscreen = dev->vscreen->pscreen; if (!pscreen) return VDP_STATUS_RESOURCES; pipe_mutex_lock(dev->mutex); switch(bits_ycbcr_format) { case VDP_YCBCR_FORMAT_NV12: case VDP_YCBCR_FORMAT_YV12: *is_supported = surface_chroma_type == VDP_CHROMA_TYPE_420; break; case VDP_YCBCR_FORMAT_UYVY: case VDP_YCBCR_FORMAT_YUYV: *is_supported = surface_chroma_type == VDP_CHROMA_TYPE_422; break; case VDP_YCBCR_FORMAT_Y8U8V8A8: case VDP_YCBCR_FORMAT_V8U8Y8A8: *is_supported = surface_chroma_type == VDP_CHROMA_TYPE_444; break; default: *is_supported = false; break; } *is_supported &= pscreen->is_video_format_supported ( pscreen, FormatYCBCRToPipe(bits_ycbcr_format), PIPE_VIDEO_PROFILE_UNKNOWN, PIPE_VIDEO_ENTRYPOINT_BITSTREAM ); pipe_mutex_unlock(dev->mutex); return VDP_STATUS_OK; }
/** * Query the implementation's capability to perform a PutBits operation using * application data in a specific YCbCr/YUB format. */ VdpStatus vlVdpOutputSurfaceQueryPutBitsYCbCrCapabilities(VdpDevice device, VdpRGBAFormat surface_rgba_format, VdpYCbCrFormat bits_ycbcr_format, VdpBool *is_supported) { vlVdpDevice *dev; struct pipe_screen *pscreen; enum pipe_format rgba_format, ycbcr_format; dev = vlGetDataHTAB(device); if (!dev) return VDP_STATUS_INVALID_HANDLE; pscreen = dev->vscreen->pscreen; if (!pscreen) return VDP_STATUS_ERROR; rgba_format = VdpFormatRGBAToPipe(surface_rgba_format); if (rgba_format == PIPE_FORMAT_NONE || rgba_format == PIPE_FORMAT_A8_UNORM) return VDP_STATUS_INVALID_RGBA_FORMAT; ycbcr_format = FormatYCBCRToPipe(bits_ycbcr_format); if (ycbcr_format == PIPE_FORMAT_NONE) return VDP_STATUS_INVALID_INDEXED_FORMAT; if (!is_supported) return VDP_STATUS_INVALID_POINTER; pipe_mutex_lock(dev->mutex); *is_supported = pscreen->is_format_supported ( pscreen, rgba_format, PIPE_TEXTURE_2D, 1, PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_RENDER_TARGET ); *is_supported &= pscreen->is_video_format_supported ( pscreen, ycbcr_format, PIPE_VIDEO_PROFILE_UNKNOWN, PIPE_VIDEO_ENTRYPOINT_BITSTREAM ); pipe_mutex_unlock(dev->mutex); return VDP_STATUS_OK; }
/** * Copy image data from application memory in a specific YCbCr format to * a VdpOutputSurface. */ VdpStatus vlVdpOutputSurfacePutBitsYCbCr(VdpOutputSurface surface, VdpYCbCrFormat source_ycbcr_format, void const *const *source_data, uint32_t const *source_pitches, VdpRect const *destination_rect, VdpCSCMatrix const *csc_matrix) { vlVdpOutputSurface *vlsurface; struct vl_compositor *compositor; struct vl_compositor_state *cstate; struct pipe_context *pipe; enum pipe_format format; struct pipe_video_buffer vtmpl, *vbuffer; struct u_rect dst_rect; struct pipe_sampler_view **sampler_views; unsigned i; vlsurface = vlGetDataHTAB(surface); if (!vlsurface) return VDP_STATUS_INVALID_HANDLE; pipe = vlsurface->device->context; compositor = &vlsurface->device->compositor; cstate = &vlsurface->cstate; format = FormatYCBCRToPipe(source_ycbcr_format); if (format == PIPE_FORMAT_NONE) return VDP_STATUS_INVALID_Y_CB_CR_FORMAT; if (!source_data || !source_pitches) return VDP_STATUS_INVALID_POINTER; pipe_mutex_lock(vlsurface->device->mutex); vlVdpResolveDelayedRendering(vlsurface->device, NULL, NULL); memset(&vtmpl, 0, sizeof(vtmpl)); vtmpl.buffer_format = format; vtmpl.chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420; if (destination_rect) { vtmpl.width = abs(destination_rect->x0-destination_rect->x1); vtmpl.height = abs(destination_rect->y0-destination_rect->y1); } else { vtmpl.width = vlsurface->surface->texture->width0; vtmpl.height = vlsurface->surface->texture->height0; } vbuffer = pipe->create_video_buffer(pipe, &vtmpl); if (!vbuffer) { pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_RESOURCES; } sampler_views = vbuffer->get_sampler_view_planes(vbuffer); if (!sampler_views) { vbuffer->destroy(vbuffer); pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_RESOURCES; } for (i = 0; i < 3; ++i) { struct pipe_sampler_view *sv = sampler_views[i]; if (!sv) continue; struct pipe_box dst_box = { 0, 0, 0, sv->texture->width0, sv->texture->height0, 1 }; pipe->transfer_inline_write(pipe, sv->texture, 0, PIPE_TRANSFER_WRITE, &dst_box, source_data[i], source_pitches[i], 0); } if (!csc_matrix) { vl_csc_matrix csc; vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, 1, &csc); vl_compositor_set_csc_matrix(cstate, (const vl_csc_matrix*)&csc); } else { vl_compositor_set_csc_matrix(cstate, csc_matrix); } vl_compositor_clear_layers(cstate); vl_compositor_set_buffer_layer(cstate, compositor, 0, vbuffer, NULL, NULL, VL_COMPOSITOR_WEAVE); vl_compositor_set_layer_dst_area(cstate, 0, RectToPipe(destination_rect, &dst_rect)); vl_compositor_render(cstate, compositor, vlsurface->surface, NULL); vbuffer->destroy(vbuffer); pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_OK; }
/** * Copy image data from application memory in a specific YCbCr format to * a VdpVideoSurface. */ VdpStatus vlVdpVideoSurfacePutBitsYCbCr(VdpVideoSurface surface, VdpYCbCrFormat source_ycbcr_format, void const *const *source_data, uint32_t const *source_pitches) { enum pipe_format pformat = FormatYCBCRToPipe(source_ycbcr_format); struct pipe_context *pipe; struct pipe_sampler_view **sampler_views; unsigned i, j; vlVdpSurface *p_surf = vlGetDataHTAB(surface); if (!p_surf) return VDP_STATUS_INVALID_HANDLE; pipe = p_surf->device->context; if (!pipe) return VDP_STATUS_INVALID_HANDLE; if (!source_data || !source_pitches) return VDP_STATUS_INVALID_POINTER; pipe_mutex_lock(p_surf->device->mutex); if (p_surf->video_buffer == NULL || pformat != p_surf->video_buffer->buffer_format) { /* destroy the old one */ if (p_surf->video_buffer) p_surf->video_buffer->destroy(p_surf->video_buffer); /* adjust the template parameters */ p_surf->templat.buffer_format = pformat; /* and try to create the video buffer with the new format */ p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat); /* stil no luck? ok forget it we don't support it */ if (!p_surf->video_buffer) { pipe_mutex_unlock(p_surf->device->mutex); return VDP_STATUS_NO_IMPLEMENTATION; } vlVdpVideoSurfaceClear(p_surf); } sampler_views = p_surf->video_buffer->get_sampler_view_planes(p_surf->video_buffer); if (!sampler_views) { pipe_mutex_unlock(p_surf->device->mutex); return VDP_STATUS_RESOURCES; } for (i = 0; i < 3; ++i) { unsigned width, height; struct pipe_sampler_view *sv = sampler_views[i]; if (!sv || !source_pitches[i]) continue; vlVdpVideoSurfaceSize(p_surf, i, &width, &height); for (j = 0; j < sv->texture->array_size; ++j) { struct pipe_box dst_box = { 0, 0, j, width, height, 1 }; pipe->transfer_inline_write(pipe, sv->texture, 0, PIPE_TRANSFER_WRITE, &dst_box, source_data[i] + source_pitches[i] * j, source_pitches[i] * sv->texture->array_size, 0); } } pipe_mutex_unlock(p_surf->device->mutex); return VDP_STATUS_OK; }
/** * Copy image data from a VdpVideoSurface to application memory in a specified * YCbCr format. */ VdpStatus vlVdpVideoSurfaceGetBitsYCbCr(VdpVideoSurface surface, VdpYCbCrFormat destination_ycbcr_format, void *const *destination_data, uint32_t const *destination_pitches) { vlVdpSurface *vlsurface; struct pipe_context *pipe; enum pipe_format format, buffer_format; struct pipe_sampler_view **sampler_views; enum getbits_conversion conversion = CONVERSION_NONE; unsigned i, j; vlsurface = vlGetDataHTAB(surface); if (!vlsurface) return VDP_STATUS_INVALID_HANDLE; pipe = vlsurface->device->context; if (!pipe) return VDP_STATUS_INVALID_HANDLE; if (!destination_data || !destination_pitches) return VDP_STATUS_INVALID_POINTER; format = FormatYCBCRToPipe(destination_ycbcr_format); if (format == PIPE_FORMAT_NONE) return VDP_STATUS_INVALID_Y_CB_CR_FORMAT; if (vlsurface->video_buffer == NULL) return VDP_STATUS_INVALID_VALUE; buffer_format = vlsurface->video_buffer->buffer_format; if (format != buffer_format) { if (format == PIPE_FORMAT_YV12 && buffer_format == PIPE_FORMAT_NV12) conversion = CONVERSION_NV12_TO_YV12; else if (format == PIPE_FORMAT_NV12 && buffer_format == PIPE_FORMAT_YV12) conversion = CONVERSION_YV12_TO_NV12; else if ((format == PIPE_FORMAT_YUYV && buffer_format == PIPE_FORMAT_UYVY) || (format == PIPE_FORMAT_UYVY && buffer_format == PIPE_FORMAT_YUYV)) conversion = CONVERSION_SWAP_YUYV_UYVY; else return VDP_STATUS_NO_IMPLEMENTATION; } pipe_mutex_lock(vlsurface->device->mutex); sampler_views = vlsurface->video_buffer->get_sampler_view_planes(vlsurface->video_buffer); if (!sampler_views) { pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_RESOURCES; } for (i = 0; i < 3; ++i) { unsigned width, height; struct pipe_sampler_view *sv = sampler_views[i]; if (!sv) continue; vlVdpVideoSurfaceSize(vlsurface, i, &width, &height); for (j = 0; j < sv->texture->array_size; ++j) { struct pipe_box box = { 0, 0, j, width, height, 1 }; struct pipe_transfer *transfer; uint8_t *map; map = pipe->transfer_map(pipe, sv->texture, 0, PIPE_TRANSFER_READ, &box, &transfer); if (!map) { pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_RESOURCES; } if (conversion == CONVERSION_NV12_TO_YV12 && i == 1) { u_copy_nv12_to_yv12(destination_data, destination_pitches, i, j, transfer->stride, sv->texture->array_size, map, box.width, box.height); } else if (conversion == CONVERSION_YV12_TO_NV12 && i > 0) { u_copy_yv12_to_nv12(destination_data, destination_pitches, i, j, transfer->stride, sv->texture->array_size, map, box.width, box.height); } else if (conversion == CONVERSION_SWAP_YUYV_UYVY) { u_copy_swap422_packed(destination_data, destination_pitches, i, j, transfer->stride, sv->texture->array_size, map, box.width, box.height); } else { util_copy_rect(destination_data[i] + destination_pitches[i] * j, sv->texture->format, destination_pitches[i] * sv->texture->array_size, 0, 0, box.width, box.height, map, transfer->stride, 0, 0); } pipe_transfer_unmap(pipe, transfer); } } pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_OK; }
/** * Copy image data from a VdpVideoSurface to application memory in a specified * YCbCr format. */ VdpStatus vlVdpVideoSurfaceGetBitsYCbCr(VdpVideoSurface surface, VdpYCbCrFormat destination_ycbcr_format, void *const *destination_data, uint32_t const *destination_pitches) { vlVdpSurface *vlsurface; struct pipe_context *pipe; enum pipe_format format; struct pipe_sampler_view **sampler_views; unsigned i, j; vlsurface = vlGetDataHTAB(surface); if (!vlsurface) return VDP_STATUS_INVALID_HANDLE; pipe = vlsurface->device->context; if (!pipe) return VDP_STATUS_INVALID_HANDLE; format = FormatYCBCRToPipe(destination_ycbcr_format); if (format == PIPE_FORMAT_NONE) return VDP_STATUS_INVALID_Y_CB_CR_FORMAT; if (vlsurface->video_buffer == NULL || format != vlsurface->video_buffer->buffer_format) return VDP_STATUS_NO_IMPLEMENTATION; /* TODO We don't support conversion (yet) */ pipe_mutex_lock(vlsurface->device->mutex); sampler_views = vlsurface->video_buffer->get_sampler_view_planes(vlsurface->video_buffer); if (!sampler_views) { pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_RESOURCES; } for (i = 0; i < 3; ++i) { unsigned width, height; struct pipe_sampler_view *sv = sampler_views[i]; if (!sv) continue; vlVdpVideoSurfaceSize(vlsurface, i, &width, &height); for (j = 0; j < sv->texture->array_size; ++j) { struct pipe_box box = { 0, 0, j, width, height, 1 }; struct pipe_transfer *transfer; uint8_t *map; map = pipe->transfer_map(pipe, sv->texture, 0, PIPE_TRANSFER_READ, &box, &transfer); if (!map) { pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_RESOURCES; } util_copy_rect(destination_data[i] + destination_pitches[i] * j, sv->texture->format, destination_pitches[i] * sv->texture->array_size, 0, 0, box.width, box.height, map, transfer->stride, 0, 0); pipe_transfer_unmap(pipe, transfer); } } pipe_mutex_unlock(vlsurface->device->mutex); return VDP_STATUS_OK; }