Пример #1
0
static void
vlVdpDecoderFixVC1Startcode(uint32_t *num_buffers, const void *buffers[], unsigned sizes[])
{
   static const uint8_t vc1_startcode[] = { 0x00, 0x00, 0x01, 0x0D };
   struct vl_vlc vlc;
   unsigned i;

   /* search the first 64 bytes for a startcode */
   vl_vlc_init(&vlc, *num_buffers, buffers, sizes);
   while (vl_vlc_search_byte(&vlc, 64*8, 0x00) && vl_vlc_bits_left(&vlc) >= 32) {
      uint32_t value = vl_vlc_peekbits(&vlc, 32);
      if (value == 0x0000010D ||
          value == 0x0000010C ||
          value == 0x0000010B)
         return;
      vl_vlc_eatbits(&vlc, 8);
   }

   /* none found, ok add one manually */
   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Manually adding VC-1 startcode\n");
   for (i = *num_buffers; i > 0; --i) {
      buffers[i] = buffers[i - 1];
      sizes[i] = sizes[i - 1];
   }
   ++(*num_buffers);
   buffers[0] = vc1_startcode;
   sizes[0] = 4;
}
Пример #2
0
static VdpStatus
vlVdpDecoderRenderH264(struct pipe_h264_picture_desc *picture,
                       VdpPictureInfoH264 *picture_info)
{
   unsigned i;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoding H264\n");

   picture->slice_count = picture_info->slice_count;
   picture->field_order_cnt[0] = picture_info->field_order_cnt[0];
   picture->field_order_cnt[1] = picture_info->field_order_cnt[1];
   picture->is_reference = picture_info->is_reference;
   picture->frame_num = picture_info->frame_num;
   picture->field_pic_flag = picture_info->field_pic_flag;
   picture->bottom_field_flag = picture_info->bottom_field_flag;
   picture->num_ref_frames = picture_info->num_ref_frames;
   picture->mb_adaptive_frame_field_flag = picture_info->mb_adaptive_frame_field_flag;
   picture->constrained_intra_pred_flag = picture_info->constrained_intra_pred_flag;
   picture->weighted_pred_flag = picture_info->weighted_pred_flag;
   picture->weighted_bipred_idc = picture_info->weighted_bipred_idc;
   picture->frame_mbs_only_flag = picture_info->frame_mbs_only_flag;
   picture->transform_8x8_mode_flag = picture_info->transform_8x8_mode_flag;
   picture->chroma_qp_index_offset = picture_info->chroma_qp_index_offset;
   picture->second_chroma_qp_index_offset = picture_info->second_chroma_qp_index_offset;
   picture->pic_init_qp_minus26 = picture_info->pic_init_qp_minus26;
   picture->num_ref_idx_l0_active_minus1 = picture_info->num_ref_idx_l0_active_minus1;
   picture->num_ref_idx_l1_active_minus1 = picture_info->num_ref_idx_l1_active_minus1;
   picture->log2_max_frame_num_minus4 = picture_info->log2_max_frame_num_minus4;
   picture->pic_order_cnt_type = picture_info->pic_order_cnt_type;
   picture->log2_max_pic_order_cnt_lsb_minus4 = picture_info->log2_max_pic_order_cnt_lsb_minus4;
   picture->delta_pic_order_always_zero_flag = picture_info->delta_pic_order_always_zero_flag;
   picture->direct_8x8_inference_flag = picture_info->direct_8x8_inference_flag;
   picture->entropy_coding_mode_flag = picture_info->entropy_coding_mode_flag;
   picture->pic_order_present_flag = picture_info->pic_order_present_flag;
   picture->deblocking_filter_control_present_flag = picture_info->deblocking_filter_control_present_flag;
   picture->redundant_pic_cnt_present_flag = picture_info->redundant_pic_cnt_present_flag;

   memcpy(picture->scaling_lists_4x4, picture_info->scaling_lists_4x4, 6*16);
   memcpy(picture->scaling_lists_8x8, picture_info->scaling_lists_8x8, 2*64);

   for (i = 0; i < 16; ++i) {
      VdpStatus ret = vlVdpGetReferenceFrame
      (
         picture_info->referenceFrames[i].surface,
         &picture->ref[i]
      );
      if (ret != VDP_STATUS_OK)
         return ret;

      picture->is_long_term[i] = picture_info->referenceFrames[i].is_long_term;
      picture->top_is_reference[i] = picture_info->referenceFrames[i].top_is_reference;
      picture->bottom_is_reference[i] = picture_info->referenceFrames[i].bottom_is_reference;
      picture->field_order_cnt_list[i][0] = picture_info->referenceFrames[i].field_order_cnt[0];
      picture->field_order_cnt_list[i][1] = picture_info->referenceFrames[i].field_order_cnt[1];
      picture->frame_num_list[i] = picture_info->referenceFrames[i].frame_idx;
   }

   return VDP_STATUS_OK;
}
Пример #3
0
VdpStatus
vlVdpPresentationQueueDisplay(VdpPresentationQueue presentation_queue,
                              VdpOutputSurface surface,
                              uint32_t clip_width,
                              uint32_t clip_height,
                              VdpTime  earliest_presentation_time)
{
   static int dump_window = -1;

   vlVdpPresentationQueue *pq;
   vlVdpOutputSurface *surf;
   struct pipe_surface *drawable_surface;

   pq = vlGetDataHTAB(presentation_queue);
   if (!pq)
      return VDP_STATUS_INVALID_HANDLE;

   drawable_surface = vl_drawable_surface_get(pq->device->context, pq->drawable);
   if (!drawable_surface)
      return VDP_STATUS_INVALID_HANDLE;

   surf = vlGetDataHTAB(surface);
   if (!surf)
      return VDP_STATUS_INVALID_HANDLE;

   vl_compositor_clear_layers(&pq->compositor);
   vl_compositor_set_rgba_layer(&pq->compositor, 0, surf->sampler_view, NULL, NULL);
   vl_compositor_render(&pq->compositor, PIPE_MPEG12_PICTURE_TYPE_FRAME,
                        drawable_surface, NULL, NULL);

   pq->device->context->pipe->screen->flush_frontbuffer
   (
      pq->device->context->pipe->screen,
      drawable_surface->texture,
      0, 0,
      vl_contextprivate_get(pq->device->context, drawable_surface)
   );

   if(dump_window == -1) {
      dump_window = debug_get_num_option("VDPAU_DUMP", 0);
   }

   if(dump_window) {
      static unsigned int framenum = 0;
      char cmd[256];

      sprintf(cmd, "xwd -id %d -out vdpau_frame_%08d.xwd", (int)pq->drawable, ++framenum);
      if (system(cmd) != 0)
         VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Dumping surface %d failed.\n", surface);
   }
   
   pipe_surface_reference(&drawable_surface, NULL);

   return VDP_STATUS_OK;
}
Пример #4
0
/**
 * Composite a sub-rectangle of a VdpOutputSurface into a sub-rectangle of
 * another VdpOutputSurface; Output Surface object VdpOutputSurface.
 */
VdpStatus
vlVdpOutputSurfaceRenderOutputSurface(VdpOutputSurface destination_surface,
                                      VdpRect const *destination_rect,
                                      VdpOutputSurface source_surface,
                                      VdpRect const *source_rect,
                                      VdpColor const *colors,
                                      VdpOutputSurfaceRenderBlendState const *blend_state,
                                      uint32_t flags)
{
   vlVdpOutputSurface *dst_vlsurface;
   vlVdpOutputSurface *src_vlsurface;

   struct pipe_context *context;
   struct vl_compositor *compositor;

   struct pipe_video_rect src_rect;
   struct pipe_video_rect dst_rect;

   void *blend;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Composing output surfaces\n");

   dst_vlsurface = vlGetDataHTAB(destination_surface);
   if (!dst_vlsurface)
      return VDP_STATUS_INVALID_HANDLE;

   src_vlsurface = vlGetDataHTAB(source_surface);
   if (!src_vlsurface)
      return VDP_STATUS_INVALID_HANDLE;

   if (dst_vlsurface->device != src_vlsurface->device)
      return VDP_STATUS_HANDLE_DEVICE_MISMATCH;

   context = dst_vlsurface->device->context->pipe;
   compositor = &dst_vlsurface->device->compositor;

   blend = BlenderToPipe(context, blend_state);

   vl_compositor_clear_layers(compositor);
   vl_compositor_set_layer_blend(compositor, 0, blend, false);
   vl_compositor_set_rgba_layer(compositor, 0, src_vlsurface->sampler_view,
                                RectToPipe(source_rect, &src_rect), NULL);
   vl_compositor_render(compositor, dst_vlsurface->surface,
                        RectToPipe(destination_rect, &dst_rect), NULL, false);

   context->delete_blend_state(context, blend);

   return VDP_STATUS_OK;
}
Пример #5
0
static VdpStatus
vlVdpDecoderRenderVC1(struct pipe_vc1_picture_desc *picture,
                      VdpPictureInfoVC1 *picture_info)
{
   VdpStatus r;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoding VC-1\n");

   r = vlVdpGetReferenceFrame(picture_info->forward_reference, &picture->ref[0]);
   if (r != VDP_STATUS_OK)
      return r;

   r = vlVdpGetReferenceFrame(picture_info->backward_reference, &picture->ref[1]);
   if (r != VDP_STATUS_OK)
      return r;

   picture->slice_count = picture_info->slice_count;
   picture->picture_type = picture_info->picture_type;
   picture->frame_coding_mode = picture_info->frame_coding_mode;
   picture->postprocflag = picture_info->postprocflag;
   picture->pulldown = picture_info->pulldown;
   picture->interlace = picture_info->interlace;
   picture->tfcntrflag = picture_info->tfcntrflag;
   picture->finterpflag = picture_info->finterpflag;
   picture->psf = picture_info->psf;
   picture->dquant = picture_info->dquant;
   picture->panscan_flag = picture_info->panscan_flag;
   picture->refdist_flag = picture_info->refdist_flag;
   picture->quantizer = picture_info->quantizer;
   picture->extended_mv = picture_info->extended_mv;
   picture->extended_dmv = picture_info->extended_dmv;
   picture->overlap = picture_info->overlap;
   picture->vstransform = picture_info->vstransform;
   picture->loopfilter = picture_info->loopfilter;
   picture->fastuvmc = picture_info->fastuvmc;
   picture->range_mapy_flag = picture_info->range_mapy_flag;
   picture->range_mapy = picture_info->range_mapy;
   picture->range_mapuv_flag = picture_info->range_mapuv_flag;
   picture->range_mapuv = picture_info->range_mapuv;
   picture->multires = picture_info->multires;
   picture->syncmarker = picture_info->syncmarker;
   picture->rangered = picture_info->rangered;
   picture->maxbframes = picture_info->maxbframes;
   picture->deblockEnable = picture_info->deblockEnable;
   picture->pquant = picture_info->pquant;

   return VDP_STATUS_OK;
}
Пример #6
0
VdpStatus
vlVdpPresentationQueueCreate(VdpDevice device,
                             VdpPresentationQueueTarget presentation_queue_target,
                             VdpPresentationQueue *presentation_queue)
{
   vlVdpPresentationQueue *pq = NULL;
   VdpStatus ret;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Creating PresentationQueue\n");

   if (!presentation_queue)
      return VDP_STATUS_INVALID_POINTER;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   vlVdpPresentationQueueTarget *pqt = vlGetDataHTAB(presentation_queue_target);
   if (!pqt)
      return VDP_STATUS_INVALID_HANDLE;

   if (dev != pqt->device)
      return VDP_STATUS_HANDLE_DEVICE_MISMATCH;

   pq = CALLOC(1, sizeof(vlVdpPresentationQueue));
   if (!pq)
      return VDP_STATUS_RESOURCES;

   pq->device = dev;
   pq->drawable = pqt->drawable;
   
   if (!vl_compositor_init(&pq->compositor, dev->context->pipe)) {
      ret = VDP_STATUS_ERROR;
      goto no_compositor;
   }

   *presentation_queue = vlAddDataHTAB(pq);
   if (*presentation_queue == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   return VDP_STATUS_OK;
no_handle:
no_compositor:
   FREE(pq);
   return ret;
}
Пример #7
0
VdpStatus
vlVdpPresentationQueueDestroy(VdpPresentationQueue presentation_queue)
{
   vlVdpPresentationQueue *pq;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Destroying PresentationQueue\n");

   pq = vlGetDataHTAB(presentation_queue);
   if (!pq)
      return VDP_STATUS_INVALID_HANDLE;

   vl_compositor_cleanup(&pq->compositor);

   vlRemoveDataHTAB(presentation_queue);
   FREE(pq);

   return VDP_STATUS_OK;
}
Пример #8
0
VdpStatus
vlVdpPresentationQueueSetBackgroundColor(VdpPresentationQueue presentation_queue,
                                         VdpColor *const background_color)
{
   vlVdpPresentationQueue *pq;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Setting Background Color\n");

   if (!background_color)
      return VDP_STATUS_INVALID_POINTER;

   pq = vlGetDataHTAB(presentation_queue);
   if (!pq)
      return VDP_STATUS_INVALID_HANDLE;

   vl_compositor_set_clear_color(&pq->compositor, (float*)background_color);

   return VDP_STATUS_OK;
}
Пример #9
0
/**
 * Retrieve the parameters used to create a VdpOutputSurface.
 */
VdpStatus
vlVdpOutputSurfaceGetParameters(VdpOutputSurface surface,
                                VdpRGBAFormat *rgba_format,
                                uint32_t *width, uint32_t *height)
{
   vlVdpOutputSurface *vlsurface;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Getting output surface parameters\n");
        
   vlsurface = vlGetDataHTAB(surface);
   if (!vlsurface)
      return VDP_STATUS_INVALID_HANDLE;

   *rgba_format = PipeToFormatRGBA(vlsurface->sampler_view->texture->format);
   *width = vlsurface->sampler_view->texture->width0;
   *height = vlsurface->sampler_view->texture->height0;

   return VDP_STATUS_OK;
}
Пример #10
0
/**
 * Destroy a VdpOutputSurface.
 */
VdpStatus
vlVdpOutputSurfaceDestroy(VdpOutputSurface surface)
{
   vlVdpOutputSurface *vlsurface;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Destroying output surface\n");

   vlsurface = vlGetDataHTAB(surface);
   if (!vlsurface)
      return VDP_STATUS_INVALID_HANDLE;

   pipe_surface_reference(&vlsurface->surface, NULL);
   pipe_sampler_view_reference(&vlsurface->sampler_view, NULL);

   vlRemoveDataHTAB(surface);
   FREE(vlsurface);

   return VDP_STATUS_OK;
}
Пример #11
0
/**
 * Decode a mpeg 4 video.
 */
static VdpStatus
vlVdpDecoderRenderMpeg4(struct pipe_mpeg4_picture_desc *picture,
                        VdpPictureInfoMPEG4Part2 *picture_info)
{
   VdpStatus r;
   unsigned i;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoding MPEG4\n");

   r = vlVdpGetReferenceFrame(picture_info->forward_reference, &picture->ref[0]);
   if (r != VDP_STATUS_OK)
      return r;

   r = vlVdpGetReferenceFrame(picture_info->backward_reference, &picture->ref[1]);
   if (r != VDP_STATUS_OK)
      return r;

   for (i = 0; i < 2; ++i) {
      picture->trd[i] = picture_info->trd[i];
      picture->trb[i] = picture_info->trb[i];
   }
   picture->vop_time_increment_resolution = picture_info->vop_time_increment_resolution;
   picture->vop_coding_type = picture_info->vop_coding_type;
   picture->vop_fcode_forward = picture_info->vop_fcode_forward;
   picture->vop_fcode_backward = picture_info->vop_fcode_backward;
   picture->resync_marker_disable = picture_info->resync_marker_disable;
   picture->interlaced = picture_info->interlaced;
   picture->quant_type = picture_info->quant_type;
   picture->quarter_sample = picture_info->quarter_sample;
   picture->short_video_header = picture_info->short_video_header;
   picture->rounding_control = picture_info->rounding_control;
   picture->alternate_vertical_scan_flag = picture_info->alternate_vertical_scan_flag;
   picture->top_field_first = picture_info->top_field_first;
   picture->intra_matrix = picture_info->intra_quantizer_matrix;
   picture->non_intra_matrix = picture_info->non_intra_quantizer_matrix;

   return VDP_STATUS_OK;
}
Пример #12
0
/**
 * Decode a mpeg 1/2 video.
 */
static VdpStatus
vlVdpDecoderRenderMpeg12(struct pipe_mpeg12_picture_desc *picture,
                         VdpPictureInfoMPEG1Or2 *picture_info)
{
   VdpStatus r;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoding MPEG12\n");

   r = vlVdpGetReferenceFrame(picture_info->forward_reference, &picture->ref[0]);
   if (r != VDP_STATUS_OK)
      return r;

   r = vlVdpGetReferenceFrame(picture_info->backward_reference, &picture->ref[1]);
   if (r != VDP_STATUS_OK)
      return r;

   picture->picture_coding_type = picture_info->picture_coding_type;
   picture->picture_structure = picture_info->picture_structure;
   picture->frame_pred_frame_dct = picture_info->frame_pred_frame_dct;
   picture->q_scale_type = picture_info->q_scale_type;
   picture->alternate_scan = picture_info->alternate_scan;
   picture->intra_vlc_format = picture_info->intra_vlc_format;
   picture->concealment_motion_vectors = picture_info->concealment_motion_vectors;
   picture->intra_dc_precision = picture_info->intra_dc_precision;
   picture->f_code[0][0] = picture_info->f_code[0][0] - 1;
   picture->f_code[0][1] = picture_info->f_code[0][1] - 1;
   picture->f_code[1][0] = picture_info->f_code[1][0] - 1;
   picture->f_code[1][1] = picture_info->f_code[1][1] - 1;
   picture->num_slices = picture_info->slice_count;
   picture->top_field_first = picture_info->top_field_first;
   picture->full_pel_forward_vector = picture_info->full_pel_forward_vector;
   picture->full_pel_backward_vector = picture_info->full_pel_backward_vector;
   picture->intra_matrix = picture_info->intra_quantizer_matrix;
   picture->non_intra_matrix = picture_info->non_intra_quantizer_matrix;

   return VDP_STATUS_OK;
}
Пример #13
0
/**
 * Create a VdpVideoMixer.
 */
VdpStatus
vlVdpVideoMixerCreate(VdpDevice device,
                      uint32_t feature_count,
                      VdpVideoMixerFeature const *features,
                      uint32_t parameter_count,
                      VdpVideoMixerParameter const *parameters,
                      void const *const *parameter_values,
                      VdpVideoMixer *mixer)
{
   vlVdpVideoMixer *vmixer = NULL;
   VdpStatus ret;
   struct pipe_screen *screen;
   uint32_t max_2d_texture_level;
   unsigned max_size, i;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;
   screen = dev->vscreen->pscreen;

   vmixer = CALLOC(1, sizeof(vlVdpVideoMixer));
   if (!vmixer)
      return VDP_STATUS_RESOURCES;

   DeviceReference(&vmixer->device, dev);

   pipe_mutex_lock(dev->mutex);

   vl_compositor_init_state(&vmixer->cstate, dev->context);

   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_601, NULL, true, &vmixer->csc);
   if (!debug_get_bool_option("G3DVL_NO_CSC", FALSE))
      vl_compositor_set_csc_matrix(&vmixer->cstate, (const vl_csc_matrix *)&vmixer->csc);

   *mixer = vlAddDataHTAB(vmixer);
   if (*mixer == 0) {
      ret = VDP_STATUS_ERROR;
      goto no_handle;
   }

   ret = VDP_STATUS_INVALID_VIDEO_MIXER_FEATURE;
   for (i = 0; i < feature_count; ++i) {
      switch (features[i]) {
      /* they are valid, but we doesn't support them */
      case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L1:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L2:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L3:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L4:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L5:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L6:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L7:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L8:
      case VDP_VIDEO_MIXER_FEATURE_HIGH_QUALITY_SCALING_L9:
      case VDP_VIDEO_MIXER_FEATURE_INVERSE_TELECINE:
      case VDP_VIDEO_MIXER_FEATURE_LUMA_KEY:
         break;

      case VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL:
         vmixer->deint.supported = true;
         break;

      case VDP_VIDEO_MIXER_FEATURE_SHARPNESS:
         vmixer->sharpness.supported = true;
         break;

      case VDP_VIDEO_MIXER_FEATURE_NOISE_REDUCTION:
         vmixer->noise_reduction.supported = true;
         break;

      default: goto no_params;
      }
   }

   vmixer->chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;
   ret = VDP_STATUS_INVALID_VIDEO_MIXER_PARAMETER;
   for (i = 0; i < parameter_count; ++i) {
      switch (parameters[i]) {
      case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH:
         vmixer->video_width = *(uint32_t*)parameter_values[i];
         break;
      case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT:
         vmixer->video_height = *(uint32_t*)parameter_values[i];
         break;
      case VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE:
         vmixer->chroma_format = ChromaToPipe(*(VdpChromaType*)parameter_values[i]);
         break;
      case VDP_VIDEO_MIXER_PARAMETER_LAYERS:
         vmixer->max_layers = *(uint32_t*)parameter_values[i];
         break;
      default: goto no_params;
      }
   }
   ret = VDP_STATUS_INVALID_VALUE;
   if (vmixer->max_layers > 4) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] Max layers > 4 not supported\n", vmixer->max_layers);
      goto no_params;
   }

   max_2d_texture_level = screen->get_param(screen, PIPE_CAP_MAX_TEXTURE_2D_LEVELS);
   max_size = pow(2, max_2d_texture_level-1);
   if (vmixer->video_width < 48 || vmixer->video_width > max_size) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u not valid for width\n",
                vmixer->video_width, max_size);
      goto no_params;
   }
   if (vmixer->video_height < 48 || vmixer->video_height > max_size) {
      VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u  not valid for height\n",
                vmixer->video_height, max_size);
      goto no_params;
   }
   vmixer->luma_key_min = 0.f;
   vmixer->luma_key_max = 1.f;
   pipe_mutex_unlock(dev->mutex);

   return VDP_STATUS_OK;

no_params:
   vlRemoveDataHTAB(*mixer);

no_handle:
   vl_compositor_cleanup_state(&vmixer->cstate);
   pipe_mutex_unlock(dev->mutex);
   DeviceReference(&vmixer->device, NULL);
   FREE(vmixer);
   return ret;
}
/**
 * Enter a surface into the presentation queue.
 */
VdpStatus
vlVdpPresentationQueueDisplay(VdpPresentationQueue presentation_queue,
                              VdpOutputSurface surface,
                              uint32_t clip_width,
                              uint32_t clip_height,
                              VdpTime  earliest_presentation_time)
{
   static int dump_window = -1;

   vlVdpPresentationQueue *pq;
   vlVdpOutputSurface *surf;

   struct pipe_context *pipe;
   struct pipe_resource *tex;
   struct pipe_surface surf_templ, *surf_draw;
   struct u_rect src_rect, dst_clip, *dirty_area;

   struct vl_compositor *compositor;
   struct vl_compositor_state *cstate;

   pq = vlGetDataHTAB(presentation_queue);
   if (!pq)
      return VDP_STATUS_INVALID_HANDLE;

   surf = vlGetDataHTAB(surface);
   if (!surf)
      return VDP_STATUS_INVALID_HANDLE;

   pipe = pq->device->context;
   compositor = &pq->device->compositor;
   cstate = &pq->cstate;

   pipe_mutex_lock(pq->device->mutex);
   tex = vl_screen_texture_from_drawable(pq->device->vscreen, pq->drawable);
   if (!tex) {
      pipe_mutex_unlock(pq->device->mutex);
      return VDP_STATUS_INVALID_HANDLE;
   }

   dirty_area = vl_screen_get_dirty_area(pq->device->vscreen);

   memset(&surf_templ, 0, sizeof(surf_templ));
   surf_templ.format = tex->format;
   surf_templ.usage = PIPE_BIND_RENDER_TARGET;
   surf_draw = pipe->create_surface(pipe, tex, &surf_templ);

   surf->timestamp = (vlVdpTime)earliest_presentation_time;

   dst_clip.x0 = 0;
   dst_clip.y0 = 0;
   dst_clip.x1 = clip_width ? clip_width : surf_draw->width;
   dst_clip.y1 = clip_height ? clip_height : surf_draw->height;

   if (pq->device->delayed_rendering.surface == surface &&
       dst_clip.x1 == surf_draw->width && dst_clip.y1 == surf_draw->height) {

      // TODO: we correctly support the clipping here, but not the pq background color in the clipped area....
      cstate = pq->device->delayed_rendering.cstate;
      vl_compositor_set_dst_clip(cstate, &dst_clip);
      vlVdpResolveDelayedRendering(pq->device, surf_draw, dirty_area);

   } else {
      vlVdpResolveDelayedRendering(pq->device, NULL, NULL);

      src_rect.x0 = 0;
      src_rect.y0 = 0;
      src_rect.x1 = surf_draw->width;
      src_rect.y1 = surf_draw->height;

      vl_compositor_clear_layers(cstate);
      vl_compositor_set_rgba_layer(cstate, compositor, 0, surf->sampler_view, &src_rect, NULL, NULL);
      vl_compositor_set_dst_clip(cstate, &dst_clip);
      vl_compositor_render(cstate, compositor, surf_draw, dirty_area);
   }

   vl_screen_set_next_timestamp(pq->device->vscreen, earliest_presentation_time);
   pipe->screen->flush_frontbuffer
   (
      pipe->screen, tex, 0, 0,
      vl_screen_get_private(pq->device->vscreen)
   );

   pipe->screen->fence_reference(pipe->screen, &surf->fence, NULL);
   pipe->flush(pipe, &surf->fence);

   if (dump_window == -1) {
      dump_window = debug_get_num_option("VDPAU_DUMP", 0);
   }

   if (dump_window) {
      static unsigned int framenum = 0;
      char cmd[256];

      if (framenum) {
         sprintf(cmd, "xwd -id %d -silent -out vdpau_frame_%08d.xwd", (int)pq->drawable, framenum);
         if (system(cmd) != 0)
            VDPAU_MSG(VDPAU_ERR, "[VDPAU] Dumping surface %d failed.\n", surface);
      }
      framenum++;
   }

   pipe_resource_reference(&tex, NULL);
   pipe_surface_reference(&surf_draw, NULL);
   pipe_mutex_unlock(pq->device->mutex);

   return VDP_STATUS_OK;
}
Пример #15
0
/**
 * Create a VdpOutputSurface.
 */
VdpStatus
vlVdpOutputSurfaceCreate(VdpDevice device,
                         VdpRGBAFormat rgba_format,
                         uint32_t width, uint32_t height,
                         VdpOutputSurface  *surface)
{
   struct pipe_context *pipe;
   struct pipe_resource res_tmpl, *res;
   struct pipe_sampler_view sv_templ;
   struct pipe_surface surf_templ;

   vlVdpOutputSurface *vlsurface = NULL;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Creating output surface\n");
   if (!(width && height))
      return VDP_STATUS_INVALID_SIZE;

   vlVdpDevice *dev = vlGetDataHTAB(device);
   if (!dev)
      return VDP_STATUS_INVALID_HANDLE;

   pipe = dev->context->pipe;
   if (!pipe)
      return VDP_STATUS_INVALID_HANDLE;

   vlsurface = CALLOC(1, sizeof(vlVdpOutputSurface));
   if (!vlsurface)
      return VDP_STATUS_RESOURCES;

   vlsurface->device = dev;

   memset(&res_tmpl, 0, sizeof(res_tmpl));

   res_tmpl.target = PIPE_TEXTURE_2D;
   res_tmpl.format = FormatRGBAToPipe(rgba_format);
   res_tmpl.width0 = width;
   res_tmpl.height0 = height;
   res_tmpl.depth0 = 1;
   res_tmpl.array_size = 1;
   res_tmpl.bind = PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_RENDER_TARGET;
   res_tmpl.usage = PIPE_USAGE_STATIC;

   res = pipe->screen->resource_create(pipe->screen, &res_tmpl);
   if (!res) {
      FREE(dev);
      return VDP_STATUS_ERROR;
   }

   memset(&sv_templ, 0, sizeof(sv_templ));
   u_sampler_view_default_template(&sv_templ, res, res->format);
   vlsurface->sampler_view = pipe->create_sampler_view(pipe, res, &sv_templ);
   if (!vlsurface->sampler_view) {
      pipe_resource_reference(&res, NULL);
      FREE(dev);
      return VDP_STATUS_ERROR;
   }

   memset(&surf_templ, 0, sizeof(surf_templ));
   surf_templ.format = res->format;
   surf_templ.usage = PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_RENDER_TARGET;
   vlsurface->surface = pipe->create_surface(pipe, res, &surf_templ);
   if (!vlsurface->surface) {
      pipe_resource_reference(&res, NULL);
      FREE(dev);
      return VDP_STATUS_ERROR;
   }

   *surface = vlAddDataHTAB(vlsurface);
   if (*surface == 0) {
      pipe_resource_reference(&res, NULL);
      FREE(dev);
      return VDP_STATUS_ERROR;
   }
   
   pipe_resource_reference(&res, NULL);

   vl_compositor_reset_dirty_area(&vlsurface->dirty_area);

   return VDP_STATUS_OK;
}
Пример #16
0
/**
 * Copy image data from application memory in a specific indexed format to
 * a VdpOutputSurface.
 */
VdpStatus
vlVdpOutputSurfacePutBitsIndexed(VdpOutputSurface surface,
                                 VdpIndexedFormat source_indexed_format,
                                 void const *const *source_data,
                                 uint32_t const *source_pitch,
                                 VdpRect const *destination_rect,
                                 VdpColorTableFormat color_table_format,
                                 void const *color_table)
{
   vlVdpOutputSurface *vlsurface;
   struct pipe_context *context;
   struct vl_compositor *compositor;

   enum pipe_format index_format;
   enum pipe_format colortbl_format;

   struct pipe_resource *res, res_tmpl;
   struct pipe_sampler_view sv_tmpl;
   struct pipe_sampler_view *sv_idx = NULL, *sv_tbl = NULL;

   struct pipe_box box;
   struct pipe_video_rect dst_rect;

   VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Uploading indexed output surface\n");

   vlsurface = vlGetDataHTAB(surface);
   if (!vlsurface)
      return VDP_STATUS_INVALID_HANDLE;

   context = vlsurface->device->context->pipe;
   compositor = &vlsurface->device->compositor;

   index_format = FormatIndexedToPipe(source_indexed_format);
   if (index_format == PIPE_FORMAT_NONE)
       return VDP_STATUS_INVALID_INDEXED_FORMAT;

   if (!source_data || !source_pitch)
       return VDP_STATUS_INVALID_POINTER;

   colortbl_format = FormatColorTableToPipe(color_table_format);
   if (colortbl_format == PIPE_FORMAT_NONE)
       return VDP_STATUS_INVALID_COLOR_TABLE_FORMAT;

   if (!color_table)
       return VDP_STATUS_INVALID_POINTER;

   memset(&res_tmpl, 0, sizeof(res_tmpl));
   res_tmpl.target = PIPE_TEXTURE_2D;
   res_tmpl.format = index_format;

   if (destination_rect) {
      res_tmpl.width0 = abs(destination_rect->x0-destination_rect->x1);
      res_tmpl.height0 = abs(destination_rect->y0-destination_rect->y1);
   } else {
      res_tmpl.width0 = vlsurface->surface->texture->width0;
      res_tmpl.height0 = vlsurface->surface->texture->height0;
   }
   res_tmpl.depth0 = 1;
   res_tmpl.array_size = 1;
   res_tmpl.usage = PIPE_USAGE_STAGING;
   res_tmpl.bind = PIPE_BIND_SAMPLER_VIEW;

   res = context->screen->resource_create(context->screen, &res_tmpl);
   if (!res)
      goto error_resource;

   box.x = box.y = box.z = 0;
   box.width = res->width0;
   box.height = res->height0;
   box.depth = res->depth0;

   context->transfer_inline_write(context, res, 0, PIPE_TRANSFER_WRITE, &box,
                                  source_data[0], source_pitch[0],
                                  source_pitch[0] * res->height0);

   memset(&sv_tmpl, 0, sizeof(sv_tmpl));
   u_sampler_view_default_template(&sv_tmpl, res, res->format);

   sv_idx = context->create_sampler_view(context, res, &sv_tmpl);
   pipe_resource_reference(&res, NULL);

   if (!sv_idx)
      goto error_resource;

   memset(&res_tmpl, 0, sizeof(res_tmpl));
   res_tmpl.target = PIPE_TEXTURE_1D;
   res_tmpl.format = colortbl_format;
   res_tmpl.width0 = 1 << util_format_get_component_bits(
      index_format, UTIL_FORMAT_COLORSPACE_RGB, 0);
   res_tmpl.height0 = 1;
   res_tmpl.depth0 = 1;
   res_tmpl.array_size = 1;
   res_tmpl.usage = PIPE_USAGE_STAGING;
   res_tmpl.bind = PIPE_BIND_SAMPLER_VIEW;

   res = context->screen->resource_create(context->screen, &res_tmpl);
   if (!res)
      goto error_resource;

   box.x = box.y = box.z = 0;
   box.width = res->width0;
   box.height = res->height0;
   box.depth = res->depth0;

   context->transfer_inline_write(context, res, 0, PIPE_TRANSFER_WRITE, &box, color_table,
                                  util_format_get_stride(colortbl_format, res->width0), 0);

   memset(&sv_tmpl, 0, sizeof(sv_tmpl));
   u_sampler_view_default_template(&sv_tmpl, res, res->format);

   sv_tbl = context->create_sampler_view(context, res, &sv_tmpl);
   pipe_resource_reference(&res, NULL);

   if (!sv_tbl)
      goto error_resource;

   vl_compositor_clear_layers(compositor);
   vl_compositor_set_palette_layer(compositor, 0, sv_idx, sv_tbl, NULL, NULL, false);
   vl_compositor_render(compositor, vlsurface->surface,
                        RectToPipe(destination_rect, &dst_rect), NULL, NULL);

   pipe_sampler_view_reference(&sv_idx, NULL);
   pipe_sampler_view_reference(&sv_tbl, NULL);

   return VDP_STATUS_OK;

error_resource:
   pipe_sampler_view_reference(&sv_idx, NULL);
   pipe_sampler_view_reference(&sv_tbl, NULL);
   return VDP_STATUS_RESOURCES;
}