/* destroy a plane */ static void xilinx_drm_plane_destroy(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); unsigned int i; xilinx_drm_plane_dpms(base_plane, DRM_MODE_DPMS_OFF); plane->manager->planes[plane->id] = NULL; drm_plane_cleanup(base_plane); for (i = 0; i < MAX_NUM_SUB_PLANES; i++) if (plane->dma[i].chan) dma_release_channel(plane->dma[i].chan); if (plane->manager->osd) { xilinx_osd_layer_disable(plane->osd_layer); xilinx_osd_layer_put(plane->osd_layer); } if (plane->manager->dp_sub) { xilinx_drm_dp_sub_layer_disable(plane->manager->dp_sub, plane->dp_layer); xilinx_drm_dp_sub_layer_put(plane->manager->dp_sub, plane->dp_layer); } }
/* apply mode to plane pipe */ void xilinx_drm_plane_commit(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct dma_async_tx_descriptor *desc; enum dma_ctrl_flags flags; unsigned int i; DRM_DEBUG_KMS("plane->id: %d\n", plane->id); for (i = 0; i < MAX_NUM_SUB_PLANES; i++) { struct xilinx_drm_plane_dma *dma = &plane->dma[i]; if (dma->chan && dma->is_active) { flags = DMA_CTRL_ACK | DMA_PREP_INTERRUPT; desc = dmaengine_prep_interleaved_dma(dma->chan, &dma->xt, flags); if (!desc) { DRM_ERROR("failed to prepare DMA descriptor\n"); return; } dmaengine_submit(desc); dma_async_issue_pending(dma->chan); } } }
static void xilinx_drm_plane_set_zpos(struct drm_plane *base_plane, unsigned int zpos) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct xilinx_drm_plane_manager *manager = plane->manager; bool update = false; int i; if (plane->zpos == zpos) return; for (i = 0; i < manager->num_planes; i++) { if (manager->planes[i] != plane && manager->planes[i]->prio == zpos) { update = true; break; } } plane->zpos = zpos; if (update) { xilinx_drm_plane_update_prio(manager); } else { plane->prio = zpos; xilinx_osd_layer_set_priority(plane->osd_layer, plane->prio); } }
/* apply mode to plane pipe */ void xilinx_drm_plane_commit(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct dma_async_tx_descriptor *desc; uint32_t height = plane->vdma.dma_config.hsize; int pitch = plane->vdma.dma_config.stride; size_t offset; DRM_DEBUG_KMS("plane->id: %d\n", plane->id); offset = plane->x * plane->bpp + plane->y * pitch; desc = dmaengine_prep_slave_single(plane->vdma.chan, plane->paddr + offset, height * pitch, DMA_MEM_TO_DEV, 0); if (!desc) { DRM_ERROR("failed to prepare DMA descriptor\n"); return; } /* submit vdma desc */ dmaengine_submit(desc); /* start vdma with new mode */ dma_async_issue_pending(plane->vdma.chan); }
void xilinx_drm_plane_set_alpha(struct drm_plane *base_plane, unsigned int alpha) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); if (plane->alpha == alpha) return; plane->alpha = alpha; /* FIXME: use global alpha for now */ xilinx_osd_layer_set_alpha(plane->osd_layer, 1, plane->alpha); }
/* attach plane properties */ static void xilinx_drm_plane_attach_property(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct xilinx_drm_plane_manager *manager = plane->manager; if (manager->zpos_prop) drm_object_attach_property(&base_plane->base, manager->zpos_prop, plane->id); if (manager->alpha_prop) drm_object_attach_property(&base_plane->base, manager->alpha_prop, manager->default_alpha); }
static void xilinx_drm_plane_set_alpha(struct drm_plane *base_plane, unsigned int alpha) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct xilinx_drm_plane_manager *manager = plane->manager; if (plane->alpha == alpha) return; plane->alpha = alpha; if (plane->osd_layer) xilinx_osd_layer_set_alpha(plane->osd_layer, plane->alpha); else if (manager->dp_sub) xilinx_drm_dp_sub_set_alpha(manager->dp_sub, plane->alpha); }
static void xilinx_drm_plane_enable_alpha(struct drm_plane *base_plane, bool enable) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct xilinx_drm_plane_manager *manager = plane->manager; if (plane->alpha_enable == enable) return; plane->alpha_enable = enable; if (plane->osd_layer) xilinx_osd_layer_enable_alpha(plane->osd_layer, enable); else if (manager->dp_sub) xilinx_drm_dp_sub_enable_alpha(manager->dp_sub, enable); }
/* destroy a plane */ static void xilinx_drm_plane_destroy(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); xilinx_drm_plane_dpms(base_plane, DRM_MODE_DPMS_OFF); plane->manager->planes[plane->id] = NULL; drm_plane_cleanup(base_plane); dma_release_channel(plane->dma.chan); if (plane->manager->osd) { xilinx_osd_layer_disable(plane->osd_layer); xilinx_osd_layer_put(plane->osd_layer); } }
/* set property of a plane */ static int xilinx_drm_plane_set_property(struct drm_plane *base_plane, struct drm_property *property, uint64_t val) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct xilinx_drm_plane_manager *manager = plane->manager; if (property == manager->zpos_prop) xilinx_drm_plane_set_zpos(base_plane, val); else if (property == manager->alpha_prop) xilinx_drm_plane_set_alpha(base_plane, val); else return -EINVAL; drm_object_property_set_value(&base_plane->base, property, val); return 0; }
/* apply mode to plane pipe */ void xilinx_drm_plane_commit(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct dma_async_tx_descriptor *desc; enum dma_ctrl_flags flags; DRM_DEBUG_KMS("plane->id: %d\n", plane->id); flags = DMA_CTRL_ACK | DMA_PREP_INTERRUPT; desc = dmaengine_prep_interleaved_dma(plane->dma.chan, &plane->dma.xt, flags); if (!desc) { DRM_ERROR("failed to prepare DMA descriptor\n"); return; } /* submit dma desc */ dmaengine_submit(desc); /* start dma with new mode */ dma_async_issue_pending(plane->dma.chan); }
/* attach plane properties */ static void xilinx_drm_plane_attach_property(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct xilinx_drm_plane_manager *manager = plane->manager; if (manager->zpos_prop) drm_object_attach_property(&base_plane->base, manager->zpos_prop, plane->id); if (manager->alpha_prop) { if (manager->dp_sub && !plane->primary) return; drm_object_attach_property(&base_plane->base, manager->alpha_prop, manager->default_alpha); drm_object_attach_property(&base_plane->base, manager->alpha_enable_prop, false); } plane->alpha_enable = true; }
/* set plane dpms */ void xilinx_drm_plane_dpms(struct drm_plane *base_plane, int dpms) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct xilinx_drm_plane_manager *manager = plane->manager; unsigned int i; DRM_DEBUG_KMS("plane->id: %d\n", plane->id); DRM_DEBUG_KMS("dpms: %d -> %d\n", plane->dpms, dpms); if (plane->dpms == dpms) return; plane->dpms = dpms; switch (dpms) { case DRM_MODE_DPMS_ON: if (manager->dp_sub) { if (plane->primary) { xilinx_drm_dp_sub_enable_alpha(manager->dp_sub, plane->alpha_enable); xilinx_drm_dp_sub_set_alpha(manager->dp_sub, plane->alpha); } xilinx_drm_dp_sub_layer_enable(manager->dp_sub, plane->dp_layer); } /* start dma engine */ for (i = 0; i < MAX_NUM_SUB_PLANES; i++) if (plane->dma[i].chan && plane->dma[i].is_active) dma_async_issue_pending(plane->dma[i].chan); if (plane->rgb2yuv) xilinx_rgb2yuv_enable(plane->rgb2yuv); if (plane->cresample) xilinx_cresample_enable(plane->cresample); /* enable osd */ if (manager->osd) { xilinx_osd_disable_rue(manager->osd); xilinx_osd_layer_set_priority(plane->osd_layer, plane->prio); xilinx_osd_layer_enable_alpha(plane->osd_layer, plane->alpha_enable); xilinx_osd_layer_set_alpha(plane->osd_layer, plane->alpha); xilinx_osd_layer_enable(plane->osd_layer); xilinx_osd_enable_rue(manager->osd); } break; default: /* disable/reset osd */ if (manager->osd) { xilinx_osd_disable_rue(manager->osd); xilinx_osd_layer_set_dimension(plane->osd_layer, 0, 0, 0, 0); xilinx_osd_layer_disable(plane->osd_layer); xilinx_osd_enable_rue(manager->osd); } if (plane->cresample) { xilinx_cresample_disable(plane->cresample); xilinx_cresample_reset(plane->cresample); } if (plane->rgb2yuv) { xilinx_rgb2yuv_disable(plane->rgb2yuv); xilinx_rgb2yuv_reset(plane->rgb2yuv); } /* stop dma engine and release descriptors */ for (i = 0; i < MAX_NUM_SUB_PLANES; i++) { if (plane->dma[i].chan && plane->dma[i].is_active) { dmaengine_terminate_all(plane->dma[i].chan); plane->dma[i].is_active = false; } } if (manager->dp_sub) xilinx_drm_dp_sub_layer_disable(manager->dp_sub, plane->dp_layer); break; } }
/* get the plane format */ uint32_t xilinx_drm_plane_get_format(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); return plane->format; }
/** * xilinx_drm_plane_get_align - Get the alignment value for pitch * @base_plane: Base drm plane object * * Get the alignment value for pitch from the dma device * * Return: The alignment value if successful, or the error code. */ unsigned int xilinx_drm_plane_get_align(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); return 1 << plane->dma[0].chan->device->copy_align; }
/* set plane dpms */ void xilinx_drm_plane_dpms(struct drm_plane *base_plane, int dpms) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct xilinx_drm_plane_manager *manager = plane->manager; DRM_DEBUG_KMS("plane->id: %d\n", plane->id); DRM_DEBUG_KMS("dpms: %d -> %d\n", plane->dpms, dpms); if (plane->dpms == dpms) return; plane->dpms = dpms; switch (dpms) { case DRM_MODE_DPMS_ON: /* start dma engine */ dma_async_issue_pending(plane->dma.chan); if (plane->rgb2yuv) xilinx_rgb2yuv_enable(plane->rgb2yuv); if (plane->cresample) xilinx_cresample_enable(plane->cresample); /* enable osd */ if (manager->osd) { xilinx_osd_disable_rue(manager->osd); xilinx_osd_layer_set_priority(plane->osd_layer, plane->prio); xilinx_osd_layer_set_alpha(plane->osd_layer, 1, plane->alpha); xilinx_osd_layer_enable(plane->osd_layer); if (plane->priv) { /* set background color as black */ xilinx_osd_set_color(manager->osd, 0x0, 0x0, 0x0); xilinx_osd_enable(manager->osd); } xilinx_osd_enable_rue(manager->osd); } break; default: /* disable/reset osd */ if (manager->osd) { xilinx_osd_disable_rue(manager->osd); xilinx_osd_layer_set_dimension(plane->osd_layer, 0, 0, 0, 0); xilinx_osd_layer_disable(plane->osd_layer); if (plane->priv) xilinx_osd_reset(manager->osd); xilinx_osd_enable_rue(manager->osd); } if (plane->cresample) { xilinx_cresample_disable(plane->cresample); xilinx_cresample_reset(plane->cresample); } if (plane->rgb2yuv) { xilinx_rgb2yuv_disable(plane->rgb2yuv); xilinx_rgb2yuv_reset(plane->rgb2yuv); } /* stop dma engine and release descriptors */ dmaengine_terminate_all(plane->dma.chan); break; } }
/* get a plane max width */ int xilinx_drm_plane_get_max_width(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); return plane->manager->max_width; }
/* mode set a plane */ int xilinx_drm_plane_mode_set(struct drm_plane *base_plane, struct drm_framebuffer *fb, int crtc_x, int crtc_y, unsigned int crtc_w, unsigned int crtc_h, uint32_t src_x, uint32_t src_y, uint32_t src_w, uint32_t src_h) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct drm_gem_cma_object *obj; size_t offset; DRM_DEBUG_KMS("plane->id: %d\n", plane->id); if (fb->pixel_format != plane->format) { DRM_ERROR("unsupported pixel format %08x\n", fb->pixel_format); return -EINVAL; } /* configure cresample */ if (plane->cresample) xilinx_cresample_configure(plane->cresample, crtc_w, crtc_h); /* configure rgb2yuv */ if (plane->rgb2yuv) xilinx_rgb2yuv_configure(plane->rgb2yuv, crtc_w, crtc_h); obj = drm_fb_cma_get_gem_obj(fb, 0); if (!obj) { DRM_ERROR("failed to get a gem obj for fb\n"); return -EINVAL; } DRM_DEBUG_KMS("h: %d(%d), v: %d(%d), paddr: %p\n", src_w, crtc_x, src_h, crtc_y, (void *)obj->paddr); DRM_DEBUG_KMS("bpp: %d\n", fb->bits_per_pixel / 8); /* configure dma desc */ plane->dma.xt.numf = src_h; plane->dma.sgl[0].size = src_w * fb->bits_per_pixel / 8; plane->dma.sgl[0].icg = fb->pitches[0] - plane->dma.sgl[0].size; offset = src_x * fb->bits_per_pixel / 8 + src_y * fb->pitches[0]; plane->dma.xt.src_start = obj->paddr + offset; plane->dma.xt.frame_size = 1; plane->dma.xt.dir = DMA_MEM_TO_DEV; plane->dma.xt.src_sgl = true; plane->dma.xt.dst_sgl = false; /* set OSD dimensions */ if (plane->manager->osd) { xilinx_osd_disable_rue(plane->manager->osd); /* if a plane is private, it's for crtc */ if (plane->priv) xilinx_osd_set_dimension(plane->manager->osd, crtc_w, crtc_h); xilinx_osd_layer_set_dimension(plane->osd_layer, crtc_x, crtc_y, src_w, src_h); xilinx_osd_enable_rue(plane->manager->osd); } return 0; }
/* get the max alpha value */ unsigned int xilinx_drm_plane_get_max_alpha(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); return plane->manager->default_alpha; }
/* get the default z-position value which is the plane id */ unsigned int xilinx_drm_plane_get_default_zpos(struct drm_plane *base_plane) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); return plane->id; }
/* mode set a plane */ int xilinx_drm_plane_mode_set(struct drm_plane *base_plane, struct drm_framebuffer *fb, int crtc_x, int crtc_y, unsigned int crtc_w, unsigned int crtc_h, uint32_t src_x, uint32_t src_y, uint32_t src_w, uint32_t src_h) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct drm_gem_cma_object *obj; size_t offset; unsigned int hsub, vsub, i; DRM_DEBUG_KMS("plane->id: %d\n", plane->id); if (fb->pixel_format != plane->format) { DRM_ERROR("unsupported pixel format %08x\n", fb->pixel_format); return -EINVAL; } /* configure cresample */ if (plane->cresample) xilinx_cresample_configure(plane->cresample, crtc_w, crtc_h); /* configure rgb2yuv */ if (plane->rgb2yuv) xilinx_rgb2yuv_configure(plane->rgb2yuv, crtc_w, crtc_h); DRM_DEBUG_KMS("h: %d(%d), v: %d(%d)\n", src_w, crtc_x, src_h, crtc_y); DRM_DEBUG_KMS("bpp: %d\n", fb->bits_per_pixel / 8); hsub = drm_format_horz_chroma_subsampling(fb->pixel_format); vsub = drm_format_vert_chroma_subsampling(fb->pixel_format); for (i = 0; i < drm_format_num_planes(fb->pixel_format); i++) { unsigned int width = src_w / (i ? hsub : 1); unsigned int height = src_h / (i ? vsub : 1); unsigned int cpp = drm_format_plane_cpp(fb->pixel_format, i); obj = xilinx_drm_fb_get_gem_obj(fb, i); if (!obj) { DRM_ERROR("failed to get a gem obj for fb\n"); return -EINVAL; } plane->dma[i].xt.numf = height; plane->dma[i].sgl[0].size = width * cpp; plane->dma[i].sgl[0].icg = fb->pitches[i] - plane->dma[i].sgl[0].size; offset = src_x * cpp + src_y * fb->pitches[i]; offset += fb->offsets[i]; plane->dma[i].xt.src_start = obj->paddr + offset; plane->dma[i].xt.frame_size = 1; plane->dma[i].xt.dir = DMA_MEM_TO_DEV; plane->dma[i].xt.src_sgl = true; plane->dma[i].xt.dst_sgl = false; plane->dma[i].is_active = true; } for (; i < MAX_NUM_SUB_PLANES; i++) plane->dma[i].is_active = false; /* set OSD dimensions */ if (plane->manager->osd) { xilinx_osd_disable_rue(plane->manager->osd); xilinx_osd_layer_set_dimension(plane->osd_layer, crtc_x, crtc_y, src_w, src_h); xilinx_osd_enable_rue(plane->manager->osd); } if (plane->manager->dp_sub) { int ret; ret = xilinx_drm_dp_sub_layer_check_size(plane->manager->dp_sub, plane->dp_layer, src_w, src_h); if (ret) return ret; } return 0; }
/* mode set a plane */ int xilinx_drm_plane_mode_set(struct drm_plane *base_plane, struct drm_framebuffer *fb, int crtc_x, int crtc_y, unsigned int crtc_w, unsigned int crtc_h, uint32_t src_x, uint32_t src_y, uint32_t src_w, uint32_t src_h) { struct xilinx_drm_plane *plane = to_xilinx_plane(base_plane); struct drm_gem_cma_object *obj; DRM_DEBUG_KMS("plane->id: %d\n", plane->id); if (fb->pixel_format != plane->format) { DRM_ERROR("unsupported pixel format %08x\n", fb->pixel_format); return -EINVAL; } /* configure cresample */ if (plane->cresample) xilinx_cresample_configure(plane->cresample, crtc_w, crtc_h); /* configure rgb2yuv */ if (plane->rgb2yuv) xilinx_rgb2yuv_configure(plane->rgb2yuv, crtc_w, crtc_h); obj = drm_fb_cma_get_gem_obj(fb, 0); if (!obj) { DRM_ERROR("failed to get a gem obj for fb\n"); return -EINVAL; } plane->x = src_x; plane->y = src_y; plane->bpp = fb->bits_per_pixel / 8; plane->paddr = obj->paddr; DRM_DEBUG_KMS("h: %d(%d), v: %d(%d), paddr: %p\n", src_w, crtc_x, src_h, crtc_y, (void *)obj->paddr); DRM_DEBUG_KMS("bpp: %d\n", plane->bpp); /* configure vdma desc */ plane->vdma.dma_config.hsize = src_w * plane->bpp; plane->vdma.dma_config.vsize = src_h; plane->vdma.dma_config.stride = fb->pitches[0]; plane->vdma.dma_config.park = 1; plane->vdma.dma_config.park_frm = 0; dmaengine_device_control(plane->vdma.chan, DMA_SLAVE_CONFIG, (unsigned long)&plane->vdma.dma_config); /* set OSD dimensions */ if (plane->manager->osd) { xilinx_osd_disable_rue(plane->manager->osd); /* if a plane is private, it's for crtc */ if (plane->priv) xilinx_osd_set_dimension(plane->manager->osd, crtc_w, crtc_h); xilinx_osd_layer_set_dimension(plane->osd_layer, crtc_x, crtc_y, src_w, src_h); xilinx_osd_enable_rue(plane->manager->osd); } return 0; }