void mxr_vsync_set_update(struct mxr_device *mdev, int en) { /* block update on vsync */ mxr_write_mask(mdev, MXR_STATUS, en ? MXR_STATUS_SYNC_ENABLE : 0, MXR_STATUS_SYNC_ENABLE); vp_write(mdev, VP_SHADOW_UPDATE, en ? VP_SHADOW_UPDATE_ENABLE : 0); }
void mxr_vsync_set_update(struct mxr_device *mdev, int en) { /* block update on vsync */ mxr_write_mask(mdev, MXR_STATUS, en ? MXR_STATUS_SYNC_ENABLE : 0, MXR_STATUS_SYNC_ENABLE); #if defined(CONFIG_ARCH_EXYNOS4) vp_write(mdev, VP_SHADOW_UPDATE, en ? VP_SHADOW_UPDATE_ENABLE : 0); #endif }
void mxr_reg_vp_buffer(struct mxr_device *mdev, dma_addr_t luma_addr[2], dma_addr_t chroma_addr[2]) { u32 val = luma_addr[0] ? ~0 : 0; unsigned long flags; spin_lock_irqsave(&mdev->reg_slock, flags); mxr_vsync_set_update(mdev, MXR_DISABLE); mxr_write_mask(mdev, MXR_CFG, val, MXR_CFG_VP_ENABLE); vp_write_mask(mdev, VP_ENABLE, val, VP_ENABLE_ON); /* TODO: fix tiled mode */ vp_write(mdev, VP_TOP_Y_PTR, luma_addr[0]); vp_write(mdev, VP_TOP_C_PTR, chroma_addr[0]); vp_write(mdev, VP_BOT_Y_PTR, luma_addr[1]); vp_write(mdev, VP_BOT_C_PTR, chroma_addr[1]); mxr_vsync_set_update(mdev, MXR_ENABLE); spin_unlock_irqrestore(&mdev->reg_slock, flags); }
static inline void mxr_reg_vp_filter_set(struct mxr_device *mdev, int reg_id, const u8 *data, unsigned int size) { /* assure 4-byte align */ BUG_ON(size & 3); for (; size; size -= 4, reg_id += 4, data += 4) { u32 val = (data[0] << 24) | (data[1] << 16) | (data[2] << 8) | data[3]; vp_write(mdev, reg_id, val); } }
static void __mxr_reg_vp_reset(struct mxr_device *mdev) { int tries = 100; vp_write(mdev, VP_SRESET, VP_SRESET_PROCESSING); for (tries = 100; tries; --tries) { /* waiting until VP_SRESET_PROCESSING is 0 */ if (~vp_read(mdev, VP_SRESET) & VP_SRESET_PROCESSING) break; mdelay(10); } WARN(tries == 0, "failed to reset Video Processor\n"); }
void mxr_reg_vp_format(struct mxr_device *mdev, const struct mxr_format *fmt, const struct mxr_geometry *geo) { unsigned long flags; spin_lock_irqsave(&mdev->reg_slock, flags); mxr_vsync_set_update(mdev, MXR_DISABLE); vp_write_mask(mdev, VP_MODE, fmt->cookie, VP_MODE_FMT_MASK); /* setting size of input image */ vp_write(mdev, VP_IMG_SIZE_Y, VP_IMG_HSIZE(geo->src.full_width) | VP_IMG_VSIZE(geo->src.full_height)); /* chroma height has to reduced by 2 to avoid chroma distorions */ vp_write(mdev, VP_IMG_SIZE_C, VP_IMG_HSIZE(geo->src.full_width) | VP_IMG_VSIZE(geo->src.full_height / 2)); vp_write(mdev, VP_SRC_WIDTH, geo->src.width); vp_write(mdev, VP_SRC_HEIGHT, geo->src.height); vp_write(mdev, VP_SRC_H_POSITION, VP_SRC_H_POSITION_VAL(geo->src.x_offset)); vp_write(mdev, VP_SRC_V_POSITION, geo->src.y_offset); vp_write(mdev, VP_DST_WIDTH, geo->dst.width); vp_write(mdev, VP_DST_H_POSITION, geo->dst.x_offset); if (geo->dst.field == V4L2_FIELD_INTERLACED) { vp_write(mdev, VP_DST_HEIGHT, geo->dst.height / 2); vp_write(mdev, VP_DST_V_POSITION, geo->dst.y_offset / 2); } else { vp_write(mdev, VP_DST_HEIGHT, geo->dst.height); vp_write(mdev, VP_DST_V_POSITION, geo->dst.y_offset); } vp_write(mdev, VP_H_RATIO, geo->x_ratio); vp_write(mdev, VP_V_RATIO, geo->y_ratio); vp_write(mdev, VP_ENDIAN_MODE, VP_ENDIAN_MODE_LITTLE); mxr_vsync_set_update(mdev, MXR_ENABLE); spin_unlock_irqrestore(&mdev->reg_slock, flags); }