static int gsc_capture_subdev_s_stream(struct v4l2_subdev *sd, int enable) { struct gsc_dev *gsc = v4l2_get_subdevdata(sd); struct gsc_capture_device *cap = &gsc->cap; struct gsc_ctx *ctx = cap->ctx; if (enable) { gsc_dbg("start"); gsc_hw_set_frm_done_irq_mask(gsc, false); gsc_hw_set_overflow_irq_mask(gsc, false); gsc_hw_set_one_frm_mode(gsc, false); gsc_hw_set_gsc_irq_enable(gsc, true); if (gsc->pipeline.disp) { gsc_hw_set_sysreg_writeback(gsc); gsc_hw_set_pixelasync_reset_wb(gsc); } else { gsc_hw_set_pxlasync_camif_lo_mask(gsc, true); } gsc_hw_set_input_path(ctx); gsc_hw_set_in_size(ctx); gsc_hw_set_in_image_format(ctx); gsc_hw_set_output_path(ctx); gsc_hw_set_out_size(ctx); gsc_hw_set_out_image_format(ctx); gsc_hw_set_global_alpha(ctx); gsc_capture_scaler_info(ctx); gsc_hw_set_prescaler(ctx); gsc_hw_set_mainscaler(ctx); gsc_hw_set_h_coef(ctx); gsc_hw_set_v_coef(ctx); set_bit(ST_CAPT_PEND, &gsc->state); gsc_hw_enable_control(gsc, true); set_bit(ST_CAPT_STREAM, &gsc->state); } else { gsc_dbg("stop"); } return 0; }
static void gsc_m2m_device_run(void *priv) { struct gsc_ctx *ctx = priv; struct gsc_dev *gsc; unsigned long flags; int ret; bool is_set = false; if (WARN(!ctx, "null hardware context\n")) return; gsc = ctx->gsc_dev; spin_lock_irqsave(&gsc->slock, flags); set_bit(ST_M2M_PEND, &gsc->state); /* Reconfigure hardware if the context has changed. */ if (gsc->m2m.ctx != ctx) { pr_debug("gsc->m2m.ctx = 0x%p, current_ctx = 0x%p", gsc->m2m.ctx, ctx); ctx->state |= GSC_PARAMS; gsc->m2m.ctx = ctx; } is_set = ctx->state & GSC_CTX_STOP_REQ; if (is_set) { ctx->state &= ~GSC_CTX_STOP_REQ; ctx->state |= GSC_CTX_ABORT; wake_up(&gsc->irq_queue); goto put_device; } ret = gsc_get_bufs(ctx); if (ret) { pr_err("Wrong address"); goto put_device; } gsc_set_prefbuf(gsc, &ctx->s_frame); gsc_hw_set_input_addr(gsc, &ctx->s_frame.addr, GSC_M2M_BUF_NUM); gsc_hw_set_output_addr(gsc, &ctx->d_frame.addr, GSC_M2M_BUF_NUM); if (ctx->state & GSC_PARAMS) { gsc_hw_set_input_buf_masking(gsc, GSC_M2M_BUF_NUM, false); gsc_hw_set_output_buf_masking(gsc, GSC_M2M_BUF_NUM, false); gsc_hw_set_frm_done_irq_mask(gsc, false); gsc_hw_set_gsc_irq_enable(gsc, true); if (gsc_set_scaler_info(ctx)) { pr_err("Scaler setup error"); goto put_device; } gsc_hw_set_input_path(ctx); gsc_hw_set_in_size(ctx); gsc_hw_set_in_image_format(ctx); gsc_hw_set_output_path(ctx); gsc_hw_set_out_size(ctx); gsc_hw_set_out_image_format(ctx); gsc_hw_set_prescaler(ctx); gsc_hw_set_mainscaler(ctx); gsc_hw_set_rotation(ctx); gsc_hw_set_global_alpha(ctx); } /* update shadow registers */ gsc_hw_set_sfr_update(ctx); ctx->state &= ~GSC_PARAMS; gsc_hw_enable_control(gsc, true); spin_unlock_irqrestore(&gsc->slock, flags); return; put_device: ctx->state &= ~GSC_PARAMS; spin_unlock_irqrestore(&gsc->slock, flags); }
static void gsc_m2m_device_run(void *priv) { struct gsc_ctx *ctx = priv; struct gsc_dev *gsc; unsigned long flags; int ret; bool is_set = false; if (WARN(!ctx, "null hardware context\n")) return; gsc = ctx->gsc_dev; if (in_irq()) ret = pm_runtime_get(&gsc->pdev->dev); else ret = pm_runtime_get_sync(&gsc->pdev->dev); if (ret < 0) { gsc_err("fail to pm_runtime_get"); return; } spin_lock_irqsave(&ctx->slock, flags); /* Reconfigure hardware if the context has changed. */ if (gsc->m2m.ctx != ctx) { gsc_dbg("gsc->m2m.ctx = 0x%p, current_ctx = 0x%p", gsc->m2m.ctx, ctx); ctx->state |= GSC_PARAMS; gsc->m2m.ctx = ctx; } is_set = (ctx->state & GSC_CTX_STOP_REQ) ? 1 : 0; ctx->state &= ~GSC_CTX_STOP_REQ; if (is_set) { wake_up(&gsc->irq_queue); goto put_device; } ret = gsc_fill_addr(ctx); if (ret) { gsc_err("Wrong address"); goto put_device; } if (ctx->state & GSC_PARAMS) { gsc_hw_set_sw_reset(gsc); ret = gsc_wait_reset(gsc); if (ret < 0) { gsc_err("gscaler s/w reset timeout"); goto put_device; } gsc_hw_set_input_buf_masking(gsc, GSC_M2M_BUF_NUM, false); gsc_hw_set_output_buf_masking(gsc, GSC_M2M_BUF_NUM, false); gsc_hw_set_frm_done_irq_mask(gsc, false); gsc_hw_set_deadlock_irq_mask(gsc, false); gsc_hw_set_read_slave_error_mask(gsc, false); gsc_hw_set_write_slave_error_mask(gsc, false); gsc_hw_set_gsc_irq_enable(gsc, true); gsc_hw_set_one_frm_mode(gsc, true); gsc_hw_set_freerun_clock_mode(gsc, false); if (gsc_set_scaler_info(ctx)) { gsc_err("Scaler setup error"); goto put_device; } gsc_hw_set_input_path(ctx); gsc_hw_set_in_size(ctx); gsc_hw_set_in_image_format(ctx); gsc_hw_set_output_path(ctx); gsc_hw_set_out_size(ctx); gsc_hw_set_out_image_format(ctx); gsc_hw_set_prescaler(ctx); gsc_hw_set_mainscaler(ctx); gsc_hw_set_h_coef(ctx); gsc_hw_set_v_coef(ctx); if (ctx->scaler.is_scaled_down) gsc_hw_set_output_rotation(ctx); else gsc_hw_set_input_rotation(ctx); gsc_hw_set_global_alpha(ctx); if (is_rotation) { ret = gsc_check_rotation_size(ctx); if (ret < 0) goto put_device; } } gsc_hw_set_input_addr(gsc, &ctx->s_frame.addr, GSC_M2M_BUF_NUM); gsc_hw_set_output_addr(gsc, &ctx->d_frame.addr, GSC_M2M_BUF_NUM); ctx->state &= ~GSC_PARAMS; if (!test_and_set_bit(ST_M2M_RUN, &gsc->state)) { /* One frame mode sequence GSCALER_ON on -> GSCALER_OP_STATUS is operating -> GSCALER_ON off */ gsc_hw_enable_control(gsc, true); #ifdef GSC_PERF gsc->start_time = sched_clock(); #endif ret = gsc_wait_operating(gsc); if (ret < 0) { gsc_err("gscaler wait operating timeout"); goto put_device; } gsc->op_timer.expires = (jiffies + 2 * HZ); mod_timer(&gsc->op_timer, gsc->op_timer.expires); } spin_unlock_irqrestore(&ctx->slock, flags); iovmm_set_fault_handler(&gsc->pdev->dev, gsc_sysmmu_m2m_fault_handler, ctx); return; put_device: ctx->state &= ~GSC_PARAMS; spin_unlock_irqrestore(&ctx->slock, flags); pm_runtime_put_sync(&gsc->pdev->dev); }
static void gsc_m2m_device_run(void *priv) { struct gsc_ctx *ctx = priv; struct gsc_dev *gsc; unsigned long flags; int ret; bool is_set = false; if (WARN(!ctx, "null hardware context\n")) return; gsc = ctx->gsc_dev; if (!in_irq()) { pm_runtime_get_sync(&gsc->pdev->dev); gsc->runtime_get_cnt++; } else { pm_runtime_get(&gsc->pdev->dev); gsc->runtime_get_cnt++; gsc_info("irq context"); } #ifdef CONFIG_SOC_EXYNOS5420 if (!(readl(EXYNOS5_CLKSRC_TOP5) & (1 << 28))) { if (clk_set_parent(gsc->clock[CLK_CHILD], gsc->clock[CLK_PARENT])) { u32 reg = readl(EXYNOS5_CLKSRC_TOP5); reg |= (1 << 28); writel(reg, EXYNOS5_CLKSRC_TOP5); gsc_err("Unable to set parent of gsc"); } gsc_err("get_cnt : %d, put_cnt : %d", gsc->runtime_get_cnt, gsc->runtime_put_cnt); gsc_err("state : 0x%lx", gsc->state); } #endif spin_lock_irqsave(&ctx->slock, flags); /* Reconfigure hardware if the context has changed. */ if (gsc->m2m.ctx != ctx) { gsc_dbg("gsc->m2m.ctx = 0x%p, current_ctx = 0x%p", gsc->m2m.ctx, ctx); ctx->state |= GSC_PARAMS; gsc->m2m.ctx = ctx; } is_set = (ctx->state & GSC_CTX_STOP_REQ) ? 1 : 0; ctx->state &= ~GSC_CTX_STOP_REQ; if (is_set) { wake_up(&gsc->irq_queue); goto put_device; } ret = gsc_fill_addr(ctx); if (ret) { gsc_err("Wrong address"); goto put_device; } if (!gsc->protected_content) { struct gsc_frame *frame = &ctx->s_frame; exynos_sysmmu_set_pbuf(&gsc->pdev->dev, frame->fmt->nr_comp, ctx->prebuf); } if (ctx->state & GSC_PARAMS) { gsc_hw_set_sw_reset(gsc); ret = gsc_wait_reset(gsc); if (ret < 0) { gsc_err("gscaler s/w reset timeout"); goto put_device; } gsc_hw_set_input_buf_masking(gsc, GSC_M2M_BUF_NUM, false); gsc_hw_set_output_buf_masking(gsc, GSC_M2M_BUF_NUM, false); gsc_hw_set_frm_done_irq_mask(gsc, false); gsc_hw_set_gsc_irq_enable(gsc, true); gsc_hw_set_one_frm_mode(gsc, true); gsc_hw_set_freerun_clock_mode(gsc, false); if (gsc_set_scaler_info(ctx)) { gsc_err("Scaler setup error"); goto put_device; } gsc_hw_set_input_path(ctx); gsc_hw_set_in_size(ctx); gsc_hw_set_in_image_format(ctx); gsc_hw_set_output_path(ctx); gsc_hw_set_out_size(ctx); gsc_hw_set_out_image_format(ctx); gsc_hw_set_prescaler(ctx); gsc_hw_set_mainscaler(ctx); gsc_hw_set_h_coef(ctx); gsc_hw_set_v_coef(ctx); gsc_hw_set_rotation(ctx); gsc_hw_set_global_alpha(ctx); } gsc_hw_set_input_addr(gsc, &ctx->s_frame.addr, GSC_M2M_BUF_NUM); gsc_hw_set_output_addr(gsc, &ctx->d_frame.addr, GSC_M2M_BUF_NUM); ctx->state &= ~GSC_PARAMS; if (!test_and_set_bit(ST_M2M_RUN, &gsc->state)) { /* One frame mode sequence GSCALER_ON on -> GSCALER_OP_STATUS is operating -> GSCALER_ON off */ gsc_hw_enable_control(gsc, true); #ifdef GSC_PERF gsc->start_time = sched_clock(); #endif ret = gsc_wait_operating(gsc); if (ret < 0) { gsc_err("gscaler wait operating timeout"); goto put_device; } gsc->op_timer.expires = (jiffies + 2 * HZ); add_timer(&gsc->op_timer); } spin_unlock_irqrestore(&ctx->slock, flags); return; put_device: ctx->state &= ~GSC_PARAMS; spin_unlock_irqrestore(&ctx->slock, flags); pm_runtime_put_sync(&gsc->pdev->dev); }
static int gsc_capture_subdev_s_stream(struct v4l2_subdev *sd, int enable) { struct gsc_dev *gsc = v4l2_get_subdevdata(sd); struct gsc_capture_device *cap = &gsc->cap; struct gsc_ctx *ctx = cap->ctx; int ret; if (enable) { gsc_hw_set_local_src(gsc, true); gsc_hw_set_sysreg_writeback(gsc, true); gsc_hw_set_sw_reset(gsc); ret = gsc_wait_reset(gsc); if (ret < 0) { gsc_err("gscaler s/w reset timeout"); return ret; } ret = gsc_set_scaler_info(ctx); if (ret) { gsc_err("Scaler setup error"); return ret; } gsc_hw_set_output_buf_fixed(gsc); gsc_hw_set_output_buf_masking(gsc, 0, false); gsc_hw_set_frm_done_irq_mask(gsc, false); gsc_hw_set_deadlock_irq_mask(gsc, false); gsc_hw_set_read_slave_error_mask(gsc, false); gsc_hw_set_write_slave_error_mask(gsc, false); gsc_hw_set_overflow_irq_mask(gsc, true); gsc_hw_set_gsc_irq_enable(gsc, true); gsc_hw_set_one_frm_mode(gsc, true); gsc_hw_set_freerun_clock_mode(gsc, false); gsc_hw_set_input_path(ctx); gsc_hw_set_in_size(ctx); gsc_hw_set_in_image_format(ctx); gsc_hw_set_output_path(ctx); gsc_hw_set_out_size(ctx); gsc_hw_set_out_image_format(ctx); gsc_hw_set_prescaler(ctx); gsc_hw_set_mainscaler(ctx); gsc_hw_set_h_coef(ctx); gsc_hw_set_v_coef(ctx); gsc_hw_set_output_rotation(ctx); gsc_hw_set_global_alpha(ctx); if (is_rotation) { ret = gsc_check_rotation_size(ctx); if (ret < 0) { gsc_err("Scaler setup error"); return ret; } } gsc_hw_set_for_wb(gsc); gsc_hw_set_lookup_table(gsc); gsc_hw_set_smart_if_con(gsc, true); gsc_hw_set_buscon_realtime(gsc); gsc_hw_set_qos_enable(gsc); } return 0; }
static void gsc_m2m_device_run(void *priv) { struct gsc_ctx *ctx = priv; struct gsc_dev *gsc; unsigned long flags; int ret = 0; bool is_set = false; if (WARN(!ctx, "null hardware context\n")) return; gsc = ctx->gsc_dev; spin_lock_irqsave(&ctx->slock, flags); /* Reconfigure hardware if the context has changed. */ if (gsc->m2m.ctx != ctx) { gsc_dbg("gsc->m2m.ctx = 0x%p, current_ctx = 0x%p", gsc->m2m.ctx, ctx); ctx->state |= GSC_PARAMS; gsc->m2m.ctx = ctx; } is_set = (ctx->state & GSC_CTX_STOP_REQ) ? 1 : 0; ctx->state &= ~GSC_CTX_STOP_REQ; if (is_set) { wake_up(&gsc->irq_queue); goto put_device; } ret = gsc_get_bufs(ctx); if (ret) { gsc_err("Wrong address"); goto put_device; } gsc_set_prefbuf(gsc, ctx->s_frame); gsc_hw_set_input_addr(gsc, &ctx->s_frame.addr, GSC_M2M_BUF_NUM); gsc_hw_set_output_addr(gsc, &ctx->d_frame.addr, GSC_M2M_BUF_NUM); if (ctx->state & GSC_PARAMS) { gsc_hw_set_input_buf_masking(gsc, GSC_M2M_BUF_NUM, false); gsc_hw_set_output_buf_masking(gsc, GSC_M2M_BUF_NUM, false); gsc_hw_set_frm_done_irq_mask(gsc, false); gsc_hw_set_gsc_irq_enable(gsc, true); if (gsc_set_scaler_info(ctx)) { gsc_err("Scaler setup error"); goto put_device; } gsc_hw_set_input_path(ctx); gsc_hw_set_in_size(ctx); gsc_hw_set_in_image_format(ctx); gsc_hw_set_output_path(ctx); gsc_hw_set_out_size(ctx); gsc_hw_set_out_image_format(ctx); gsc_hw_set_prescaler(ctx); gsc_hw_set_mainscaler(ctx); gsc_hw_set_rotation(ctx); gsc_hw_set_global_alpha(ctx); } /* When you update SFRs in the middle of operating gsc_hw_set_sfr_update(ctx); */ ctx->state &= ~GSC_PARAMS; if (!test_and_set_bit(ST_M2M_RUN, &gsc->state)) { /* One frame mode sequence GSCALER_ON on -> GSCALER_OP_STATUS is operating -> GSCALER_ON off */ gsc_hw_enable_control(gsc, true); ret = gsc_wait_operating(gsc); if (ret < 0) { gsc_err("gscaler wait operating timeout"); goto put_device; } gsc_hw_enable_control(gsc, false); } spin_unlock_irqrestore(&ctx->slock, flags); return; put_device: ctx->state &= ~GSC_PARAMS; spin_unlock_irqrestore(&ctx->slock, flags); }