Ejemplo n.º 1
0
/**
 * Create a VdpVideoSurface.
 */
VdpStatus
vlVdpVideoSurfaceCreate(VdpDevice device, VdpChromaType chroma_type,
                        uint32_t width, uint32_t height,
                        VdpVideoSurface *surface)
{
   struct pipe_context *pipe;
   vlVdpSurface *p_surf;
   VdpStatus ret;

   if (!(width && height)) {
      ret = VDP_STATUS_INVALID_SIZE;
      goto inv_size;
   }

   p_surf = CALLOC(1, sizeof(vlVdpSurface));
   if (!p_surf) {
      ret = VDP_STATUS_RESOURCES;
      goto no_res;
   }

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev) {
      ret = VDP_STATUS_INVALID_HANDLE;
      goto inv_device;
   }

   DeviceReference(&p_surf->device, dev);
   pipe = dev->context;

   pipe_mutex_lock(dev->mutex);
   memset(&p_surf->templat, 0, sizeof(p_surf->templat));
   p_surf->templat.buffer_format = pipe->screen->get_video_param
   (
      pipe->screen,
      PIPE_VIDEO_PROFILE_UNKNOWN,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_PREFERED_FORMAT
   );
   p_surf->templat.chroma_format = ChromaToPipe(chroma_type);
   p_surf->templat.width = width;
   p_surf->templat.height = height;
   p_surf->templat.interlaced = pipe->screen->get_video_param
   (
      pipe->screen,
      PIPE_VIDEO_PROFILE_UNKNOWN,
      PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
      PIPE_VIDEO_CAP_PREFERS_INTERLACED
   );
   if (p_surf->templat.buffer_format != PIPE_FORMAT_NONE)
      p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);

   /* do not mandate early allocation of a video buffer */
   vlVdpVideoSurfaceClear(p_surf);
   pipe_mutex_unlock(dev->mutex);

   *surface = vlAddDataHTAB(p_surf);
   if (*surface == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   return VDP_STATUS_OK;

no_handle:
   p_surf->video_buffer->destroy(p_surf->video_buffer);

inv_device:
   DeviceReference(&p_surf->device, NULL);
   FREE(p_surf);

no_res:
inv_size:
   return ret;
}
Ejemplo n.º 2
0
/**
 * Decode a compressed field/frame and render the result into a VdpVideoSurface.
 */
VdpStatus
vlVdpDecoderRender(VdpDecoder decoder,
                   VdpVideoSurface target,
                   VdpPictureInfo const *picture_info,
                   uint32_t bitstream_buffer_count,
                   VdpBitstreamBuffer const *bitstream_buffers)
{
   const void * buffers[bitstream_buffer_count + 1];
   unsigned sizes[bitstream_buffer_count + 1];
   vlVdpDecoder *vldecoder;
   vlVdpSurface *vlsurf;
   VdpStatus ret;
   struct pipe_screen *screen;
   struct pipe_video_codec *dec;
   bool buffer_support[2];
   unsigned i;
   struct pipe_h264_sps sps = {};
   struct pipe_h264_pps pps = { &sps };
   union {
      struct pipe_picture_desc base;
      struct pipe_mpeg12_picture_desc mpeg12;
      struct pipe_mpeg4_picture_desc mpeg4;
      struct pipe_vc1_picture_desc vc1;
      struct pipe_h264_picture_desc h264;
   } desc;

   if (!(picture_info && bitstream_buffers))
      return VDP_STATUS_INVALID_POINTER;

   vldecoder = (vlVdpDecoder *)vlGetDataHTAB(decoder);
   if (!vldecoder)
      return VDP_STATUS_INVALID_HANDLE;
   dec = vldecoder->decoder;
   screen = dec->context->screen;

   vlsurf = (vlVdpSurface *)vlGetDataHTAB(target);
   if (!vlsurf)
      return VDP_STATUS_INVALID_HANDLE;

   if (vlsurf->device != vldecoder->device)
      return VDP_STATUS_HANDLE_DEVICE_MISMATCH;

   if (vlsurf->video_buffer != NULL && vlsurf->video_buffer->chroma_format != dec->chroma_format)
      // TODO: Recreate decoder with correct chroma
      return VDP_STATUS_INVALID_CHROMA_TYPE;

   buffer_support[0] = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
                                               PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE);
   buffer_support[1] = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
                                               PIPE_VIDEO_CAP_SUPPORTS_INTERLACED);

   if (vlsurf->video_buffer == NULL ||
       !screen->is_video_format_supported(screen, vlsurf->video_buffer->buffer_format,
                                          dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM) ||
       !buffer_support[vlsurf->video_buffer->interlaced]) {

      pipe_mutex_lock(vlsurf->device->mutex);

      /* destroy the old one */
      if (vlsurf->video_buffer)
         vlsurf->video_buffer->destroy(vlsurf->video_buffer);

      /* set the buffer format to the prefered one */
      vlsurf->templat.buffer_format = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
                                                              PIPE_VIDEO_CAP_PREFERED_FORMAT);

      /* also set interlacing to decoders preferences */
      vlsurf->templat.interlaced = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
                                                           PIPE_VIDEO_CAP_PREFERS_INTERLACED);

      /* and recreate the video buffer */
      vlsurf->video_buffer = dec->context->create_video_buffer(dec->context, &vlsurf->templat);

      /* still no luck? get me out of here... */
      if (!vlsurf->video_buffer) {
         pipe_mutex_unlock(vlsurf->device->mutex);
         return VDP_STATUS_NO_IMPLEMENTATION;
      }
      vlVdpVideoSurfaceClear(vlsurf);
      pipe_mutex_unlock(vlsurf->device->mutex);
   }

   for (i = 0; i < bitstream_buffer_count; ++i) {
      buffers[i] = bitstream_buffers[i].bitstream;
      sizes[i] = bitstream_buffers[i].bitstream_bytes;
   }

   memset(&desc, 0, sizeof(desc));
   desc.base.profile = dec->profile;
   switch (u_reduce_video_profile(dec->profile)) {
   case PIPE_VIDEO_FORMAT_MPEG12:
      ret = vlVdpDecoderRenderMpeg12(&desc.mpeg12, (VdpPictureInfoMPEG1Or2 *)picture_info);
      break;
   case PIPE_VIDEO_FORMAT_MPEG4:
      ret = vlVdpDecoderRenderMpeg4(&desc.mpeg4, (VdpPictureInfoMPEG4Part2 *)picture_info);
      break;
   case PIPE_VIDEO_FORMAT_VC1:
      if (dec->profile == PIPE_VIDEO_PROFILE_VC1_ADVANCED)
         vlVdpDecoderFixVC1Startcode(&bitstream_buffer_count, buffers, sizes);
      ret = vlVdpDecoderRenderVC1(&desc.vc1, (VdpPictureInfoVC1 *)picture_info);
      break;
   case PIPE_VIDEO_FORMAT_MPEG4_AVC:
      desc.h264.pps = &pps;
      ret = vlVdpDecoderRenderH264(&desc.h264, (VdpPictureInfoH264 *)picture_info);
      break;
   default:
      return VDP_STATUS_INVALID_DECODER_PROFILE;
   }

   if (ret != VDP_STATUS_OK)
      return ret;

   pipe_mutex_lock(vldecoder->mutex);
   dec->begin_frame(dec, vlsurf->video_buffer, &desc.base);
   dec->decode_bitstream(dec, vlsurf->video_buffer, &desc.base, bitstream_buffer_count, buffers, sizes);
   dec->end_frame(dec, vlsurf->video_buffer, &desc.base);
   pipe_mutex_unlock(vldecoder->mutex);
   return ret;
}
Ejemplo n.º 3
0
/**
 * Copy image data from application memory in a specific YCbCr format to
 * a VdpVideoSurface.
 */
VdpStatus
vlVdpVideoSurfacePutBitsYCbCr(VdpVideoSurface surface,
                              VdpYCbCrFormat source_ycbcr_format,
                              void const *const *source_data,
                              uint32_t const *source_pitches)
{
   enum pipe_format pformat = FormatYCBCRToPipe(source_ycbcr_format);
   struct pipe_context *pipe;
   struct pipe_sampler_view **sampler_views;
   unsigned i, j;

   vlVdpSurface *p_surf = vlGetDataHTAB(surface);
   if (!p_surf)
      return VDP_STATUS_INVALID_HANDLE;

   pipe = p_surf->device->context;
   if (!pipe)
      return VDP_STATUS_INVALID_HANDLE;

   if (!source_data || !source_pitches)
       return VDP_STATUS_INVALID_POINTER;

   pipe_mutex_lock(p_surf->device->mutex);
   if (p_surf->video_buffer == NULL || pformat != p_surf->video_buffer->buffer_format) {

      /* destroy the old one */
      if (p_surf->video_buffer)
         p_surf->video_buffer->destroy(p_surf->video_buffer);

      /* adjust the template parameters */
      p_surf->templat.buffer_format = pformat;

      /* and try to create the video buffer with the new format */
      p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);

      /* stil no luck? ok forget it we don't support it */
      if (!p_surf->video_buffer) {
         pipe_mutex_unlock(p_surf->device->mutex);
         return VDP_STATUS_NO_IMPLEMENTATION;
      }
      vlVdpVideoSurfaceClear(p_surf);
   }

   sampler_views = p_surf->video_buffer->get_sampler_view_planes(p_surf->video_buffer);
   if (!sampler_views) {
      pipe_mutex_unlock(p_surf->device->mutex);
      return VDP_STATUS_RESOURCES;
   }

   for (i = 0; i < 3; ++i) {
      unsigned width, height;
      struct pipe_sampler_view *sv = sampler_views[i];
      if (!sv || !source_pitches[i]) continue;

      vlVdpVideoSurfaceSize(p_surf, i, &width, &height);

      for (j = 0; j < sv->texture->array_size; ++j) {
         struct pipe_box dst_box = {
            0, 0, j,
            width, height, 1
         };

         pipe->transfer_inline_write(pipe, sv->texture, 0,
                                     PIPE_TRANSFER_WRITE, &dst_box,
                                     source_data[i] + source_pitches[i] * j,
                                     source_pitches[i] * sv->texture->array_size,
                                     0);
      }
   }
   pipe_mutex_unlock(p_surf->device->mutex);

   return VDP_STATUS_OK;
}