static int jz4780_clk_pll_recalc_freq(struct clknode *clk, uint64_t *freq) { struct jz4780_clk_pll_sc *sc; uint32_t reg, m, n, od; sc = clknode_get_softc(clk); reg = CLK_RD_4(sc, sc->clk_reg); /* Check for PLL enabled status */ if (REG_GET(reg, CGU_PLL_EN) == 0) { *freq = 0; return 0; } /* Return parent frequency if PPL is being bypassed */ if (REG_GET(reg, CGU_PLL_BP) != 0) return 0; m = REG_GET(reg, CGU_PLL_M) + 1; n = REG_GET(reg, CGU_PLL_N) + 1; od = REG_GET(reg, CGU_PLL_OD) + 1; /* Sanity check values */ if (m == 0 || n == 0 || od == 0) { *freq = 0; return (EINVAL); } *freq = ((*freq / n) * m) / od; return (0); }
static int dce_clocks_get_dp_ref_freq(struct display_clock *clk) { struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk); int dprefclk_wdivider; int dprefclk_src_sel; int dp_ref_clk_khz = 600000; int target_div = INVALID_DIVIDER; /* ASSERT DP Reference Clock source is from DFS*/ REG_GET(DPREFCLK_CNTL, DPREFCLK_SRC_SEL, &dprefclk_src_sel); ASSERT(dprefclk_src_sel == 0); /* Read the mmDENTIST_DISPCLK_CNTL to get the currently * programmed DID DENTIST_DPREFCLK_WDIVIDER*/ REG_GET(DENTIST_DISPCLK_CNTL, DENTIST_DPREFCLK_WDIVIDER, &dprefclk_wdivider); /* Convert DENTIST_DPREFCLK_WDIVIDERto actual divider*/ target_div = dce_divider_range_get_divider( clk_dce->divider_ranges, DIVIDER_RANGE_MAX, dprefclk_wdivider); if (target_div != INVALID_DIVIDER) { /* Calculate the current DFS clock, in kHz.*/ dp_ref_clk_khz = (DIVIDER_RANGE_SCALE_FACTOR * clk_dce->dentist_vco_freq_khz) / target_div; } /* SW will adjust DP REF Clock average value for all purposes * (DP DTO / DP Audio DTO and DP GTC) if clock is spread for all cases: -if SS enabled on DP Ref clock and HW de-spreading enabled with SW calculations for DS_INCR/DS_MODULO (this is planned to be default case) -if SS enabled on DP Ref clock and HW de-spreading enabled with HW calculations (not planned to be used, but average clock should still be valid) -if SS enabled on DP Ref clock and HW de-spreading disabled (should not be case with CIK) then SW should program all rates generated according to average value (case as with previous ASICs) */ if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) { struct fixed32_32 ss_percentage = dal_fixed32_32_div_int( dal_fixed32_32_from_fraction( clk_dce->dprefclk_ss_percentage, clk_dce->dprefclk_ss_divider), 200); struct fixed32_32 adj_dp_ref_clk_khz; ss_percentage = dal_fixed32_32_sub(dal_fixed32_32_one, ss_percentage); adj_dp_ref_clk_khz = dal_fixed32_32_mul_int( ss_percentage, dp_ref_clk_khz); dp_ref_clk_khz = dal_fixed32_32_floor(adj_dp_ref_clk_khz); } return dp_ref_clk_khz; }
/** * @brief 获得系统各个总线时钟的频率 * @param clockName:时钟名称 * @arg kCoreClock :内核时钟 * @arg kBusClock :总线时钟 * @arg kFlexBusClock :Flexbus总线时钟 * @arg kFlashClock :Flash总线时钟 * @retval the clock */ uint32_t GetClock(Clock_t clock) { uint32_t val; /* calualte MCGOutClock system_MKxxx.c must not modified */ val = SystemCoreClock * (REG_GET(CLKTbl, kCoreClock)+1); if(clock == kMCGOutClock) { return val; } val = val/(REG_GET(CLKTbl, clock)+1); return val; }
static bool is_hw_busy(struct dce_i2c_hw *dce_i2c_hw) { uint32_t i2c_sw_status = 0; REG_GET(DC_I2C_SW_STATUS, DC_I2C_SW_STATUS, &i2c_sw_status); if (i2c_sw_status == DC_I2C_STATUS__DC_I2C_STATUS_IDLE) return false; reset_hw_engine(dce_i2c_hw); REG_GET(DC_I2C_SW_STATUS, DC_I2C_SW_STATUS, &i2c_sw_status); return i2c_sw_status != DC_I2C_STATUS__DC_I2C_STATUS_IDLE; }
static bool is_hw_busy(struct engine *engine) { struct i2c_hw_engine_dce110 *hw_engine = FROM_ENGINE(engine); uint32_t i2c_sw_status = 0; REG_GET(DC_I2C_SW_STATUS, DC_I2C_SW_STATUS, &i2c_sw_status); if (i2c_sw_status == DC_I2C_STATUS__DC_I2C_STATUS_IDLE) return false; reset_hw_engine(engine); REG_GET(DC_I2C_SW_STATUS, DC_I2C_SW_STATUS, &i2c_sw_status); return i2c_sw_status != DC_I2C_STATUS__DC_I2C_STATUS_IDLE; }
bool bios_is_accelerated_mode( struct dc_bios *bios) { uint32_t acc_mode; REG_GET(BIOS_SCRATCH_6, S6_ACC_MODE, &acc_mode); return (acc_mode == 1); }
void dce100_i2c_hw_construct( struct dce_i2c_hw *dce_i2c_hw, struct dc_context *ctx, uint32_t engine_id, const struct dce_i2c_registers *regs, const struct dce_i2c_shift *shifts, const struct dce_i2c_mask *masks) { uint32_t xtal_ref_div = 0; dce_i2c_hw_construct(dce_i2c_hw, ctx, engine_id, regs, shifts, masks); dce_i2c_hw->buffer_size = I2C_HW_BUFFER_SIZE_DCE100; REG_GET(MICROSECOND_TIME_BASE_DIV, XTAL_REF_DIV, &xtal_ref_div); if (xtal_ref_div == 0) xtal_ref_div = 2; /*Calculating Reference Clock by divding original frequency by * XTAL_REF_DIV. * At upper level, uint32_t reference_frequency = * dal_dce_i2c_get_reference_clock(as) >> 1 * which already divided by 2. So we need x2 to get original * reference clock from ppll_info */ dce_i2c_hw->reference_frequency = (dce_i2c_hw->reference_frequency * 2) / xtal_ref_div; }
//lectura por referencia en un relativo por numero de celda // //IMPORTANTE: tengase en cuenta que si se quisieran obtener todos los registros //del archivo directo, hay que pedir al directo todas las posibles claves con un ciclo, //verificando tambien los errores sobre claves que no puedan existir int direct_read(const char * filename, int student_id) { char[20] name; void * reg = malloc(REG_SIZEOF(schema_students)); int status; int fd = D_OPEN(filename, READ); if (fd == RES_ERROR) { free(reg); return -1; } //movemos al registro de intercambio el id del estudiante que queremos encontrar REG_SET(reg, schema_students,"PADRON",student_id); status = D_READ(fd, reg); if (status == RES_NO_EXISTE || status == RES_ERROR) { free(reg); D_CLOSE(fd); return -2; } //obtenemos los datos del registro de intercambio REG_GET(reg, schema_students, "PADRON,NOMBRE", &student_id, &name); printf("%d %s", student_id, name); //liberamos recursos free(reg); D_CLOSE(fd); return 0; }
static void release_engine( struct dce_i2c_hw *dce_i2c_hw) { bool safe_to_reset; /* Restore original HW engine speed */ set_speed(dce_i2c_hw, dce_i2c_hw->original_speed); /* Release I2C */ REG_UPDATE(DC_I2C_ARBITRATION, DC_I2C_SW_DONE_USING_I2C_REG, 1); /* Reset HW engine */ { uint32_t i2c_sw_status = 0; REG_GET(DC_I2C_SW_STATUS, DC_I2C_SW_STATUS, &i2c_sw_status); /* if used by SW, safe to reset */ safe_to_reset = (i2c_sw_status == 1); } if (safe_to_reset) REG_UPDATE_2(DC_I2C_CONTROL, DC_I2C_SOFT_RESET, 1, DC_I2C_SW_STATUS_RESET, 1); else REG_UPDATE(DC_I2C_CONTROL, DC_I2C_SW_STATUS_RESET, 1); /* HW I2c engine - clock gating feature */ if (!dce_i2c_hw->engine_keep_power_up_count) REG_UPDATE_N(SETUP, 1, FN(SETUP, DC_I2C_DDC1_ENABLE), 0); }
static void process_channel_reply( struct dce_i2c_hw *dce_i2c_hw, struct i2c_payload *reply) { uint32_t length = reply->length; uint8_t *buffer = reply->data; REG_SET_3(DC_I2C_DATA, 0, DC_I2C_INDEX, dce_i2c_hw->buffer_used_write, DC_I2C_DATA_RW, 1, DC_I2C_INDEX_WRITE, 1); while (length) { /* after reading the status, * if the I2C operation executed successfully * (i.e. DC_I2C_STATUS_DONE = 1) then the I2C controller * should read data bytes from I2C circular data buffer */ uint32_t i2c_data; REG_GET(DC_I2C_DATA, DC_I2C_DATA, &i2c_data); *buffer++ = i2c_data; --length; } }
static enum i2c_channel_operation_result get_channel_status( struct i2c_engine *i2c_engine, uint8_t *returned_bytes) { uint32_t i2c_sw_status = 0; struct i2c_hw_engine_dce110 *hw_engine = FROM_I2C_ENGINE(i2c_engine); uint32_t value = REG_GET(DC_I2C_SW_STATUS, DC_I2C_SW_STATUS, &i2c_sw_status); if (i2c_sw_status == DC_I2C_STATUS__DC_I2C_STATUS_USED_BY_SW) return I2C_CHANNEL_OPERATION_ENGINE_BUSY; else if (value & hw_engine->i2c_mask->DC_I2C_SW_STOPPED_ON_NACK) return I2C_CHANNEL_OPERATION_NO_RESPONSE; else if (value & hw_engine->i2c_mask->DC_I2C_SW_TIMEOUT) return I2C_CHANNEL_OPERATION_TIMEOUT; else if (value & hw_engine->i2c_mask->DC_I2C_SW_ABORTED) return I2C_CHANNEL_OPERATION_FAILED; else if (value & hw_engine->i2c_mask->DC_I2C_SW_DONE) return I2C_CHANNEL_OPERATION_SUCCEEDED; /* * this is the case when HW used for communication, I2C_SW_STATUS * could be zero */ return I2C_CHANNEL_OPERATION_SUCCEEDED; }
static void process_channel_reply( struct i2c_engine *engine, struct i2c_reply_transaction_data *reply) { uint32_t length = reply->length; uint8_t *buffer = reply->data; struct i2c_hw_engine_dce110 *hw_engine = FROM_I2C_ENGINE(engine); REG_SET_3(DC_I2C_DATA, 0, DC_I2C_INDEX, hw_engine->buffer_used_write, DC_I2C_DATA_RW, 1, DC_I2C_INDEX_WRITE, 1); while (length) { /* after reading the status, * if the I2C operation executed successfully * (i.e. DC_I2C_STATUS_DONE = 1) then the I2C controller * should read data bytes from I2C circular data buffer */ uint32_t i2c_data; REG_GET(DC_I2C_DATA, DC_I2C_DATA, &i2c_data); *buffer++ = i2c_data; --length; } }
static ssize_t pt_mode_show(struct device *d, struct device_attribute *attr, char *buf) { REG_GET(); if (regulator_get_mode(reg) == REGULATOR_MODE_FAST) return snprintf(buf, 5, "fast\n"); else return snprintf(buf, 7, "normal\n"); }
bool hububu1_is_allow_self_refresh_enabled(struct hubbub *hubbub) { uint32_t enable = 0; REG_GET(DCHUBBUB_ARB_DRAM_STATE_CNTL, DCHUBBUB_ARB_ALLOW_SELF_REFRESH_FORCE_ENABLE, &enable); return enable ? true : false; }
static ssize_t pt_mode_set(struct device *d, struct device_attribute *attr, const char *buf, size_t size) { REG_GET(); if (buf[0] == 'f') regulator_set_mode(reg, REGULATOR_MODE_FAST); else regulator_set_mode(reg, REGULATOR_MODE_NORMAL); return size; }
static void read_dce_straps( struct dc_context *ctx, struct resource_straps *straps) { REG_GET_2(CC_DC_HDMI_STRAPS, HDMI_DISABLE, &straps->hdmi_disable, AUDIO_STREAM_NUMBER, &straps->audio_stream_number); REG_GET(DC_PINSTRAPS, DC_PINSTRAPS_AUDIO, &straps->dc_pinstraps_audio); }
static unsigned int hubp1_get_underflow_status(struct hubp *hubp) { uint32_t hubp_underflow = 0; struct dcn10_hubp *hubp1 = TO_DCN10_HUBP(hubp); REG_GET(DCHUBP_CNTL, HUBP_UNDERFLOW_STATUS, &hubp_underflow); return hubp_underflow; }
static uint32_t get_speed( const struct dce_i2c_hw *dce_i2c_hw) { uint32_t pre_scale = 0; REG_GET(SPEED, DC_I2C_DDC1_PRESCALE, &pre_scale); /* [anaumov] it seems following is unnecessary */ /*ASSERT(value.bits.DC_I2C_DDC1_PRESCALE);*/ return pre_scale ? dce_i2c_hw->reference_frequency / pre_scale : dce_i2c_hw->default_speed; }
bool hubp1_is_flip_pending(struct hubp *hubp) { uint32_t flip_pending = 0; struct dcn10_hubp *hubp1 = TO_DCN10_HUBP(hubp); struct dc_plane_address earliest_inuse_address; REG_GET(DCSURF_FLIP_CONTROL, SURFACE_FLIP_PENDING, &flip_pending); REG_GET(DCSURF_SURFACE_EARLIEST_INUSE, SURFACE_EARLIEST_INUSE_ADDRESS, &earliest_inuse_address.grph.addr.low_part); REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH, SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &earliest_inuse_address.grph.addr.high_part); if (flip_pending) return true; if (earliest_inuse_address.grph.addr.quad_part != hubp->request_address.grph.addr.quad_part) return true; return false; }
static uint32_t get_speed( const struct i2c_engine *i2c_engine) { const struct i2c_hw_engine_dce110 *hw_engine = FROM_I2C_ENGINE(i2c_engine); uint32_t pre_scale = 0; REG_GET(SPEED, DC_I2C_DDC1_PRESCALE, &pre_scale); /* [anaumov] it seems following is unnecessary */ /*ASSERT(value.bits.DC_I2C_DDC1_PRESCALE);*/ return pre_scale ? hw_engine->reference_frequency / pre_scale : hw_engine->base.default_speed; }
static int _omap_dss_wait_reset(void) { unsigned timeout = 1000; while (REG_GET(DSS_SYSSTATUS, 0, 0) == 0) { udelay(1); if (!--timeout) { DSSERR("soft reset failed\n"); return -ENODEV; } } return 0; }
static int _omap_dss_wait_reset(void) { int t = 0; while (REG_GET(DSS_SYSSTATUS, 0, 0) == 0) { if (++t > 1000) { DSSERR("soft reset failed\n"); return -ENODEV; } udelay(1); } return 0; }
static void dce110_stream_encoder_dp_blank( struct stream_encoder *enc) { struct dce110_stream_encoder *enc110 = DCE110STRENC_FROM_STRENC(enc); uint32_t retries = 0; uint32_t reg1 = 0; uint32_t max_retries = DP_BLANK_MAX_RETRY * 10; /* Note: For CZ, we are changing driver default to disable * stream deferred to next VBLANK. If results are positive, we * will make the same change to all DCE versions. There are a * handful of panels that cannot handle disable stream at * HBLANK and will result in a white line flash across the * screen on stream disable. */ REG_GET(DP_VID_STREAM_CNTL, DP_VID_STREAM_ENABLE, ®1); if ((reg1 & 0x1) == 0) /*stream not enabled*/ return; /* Specify the video stream disable point * (2 = start of the next vertical blank) */ REG_UPDATE(DP_VID_STREAM_CNTL, DP_VID_STREAM_DIS_DEFER, 2); /* Larger delay to wait until VBLANK - use max retry of * 10us*3000=30ms. This covers 16.6ms of typical 60 Hz mode + * a little more because we may not trust delay accuracy. */ max_retries = DP_BLANK_MAX_RETRY * 150; /* disable DP stream */ REG_UPDATE(DP_VID_STREAM_CNTL, DP_VID_STREAM_ENABLE, 0); /* the encoder stops sending the video stream * at the start of the vertical blanking. * Poll for DP_VID_STREAM_STATUS == 0 */ REG_WAIT(DP_VID_STREAM_CNTL, DP_VID_STREAM_STATUS, 0, 10, max_retries); ASSERT(retries <= max_retries); /* Tell the DP encoder to ignore timing from CRTC, must be done after * the polling. If we set DP_STEER_FIFO_RESET before DP stream blank is * complete, stream status will be stuck in video stream enabled state, * i.e. DP_VID_STREAM_STATUS stuck at 1. */ REG_UPDATE(DP_STEER_FIFO, DP_STEER_FIFO_RESET, true); }
void hubbub1_toggle_watermark_change_req(struct hubbub *hubbub) { uint32_t watermark_change_req; REG_GET(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL, DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, &watermark_change_req); if (watermark_change_req) watermark_change_req = 0; else watermark_change_req = 1; REG_UPDATE(DCHUBBUB_ARB_WATERMARK_CHANGE_CNTL, DCHUBBUB_ARB_WATERMARK_CHANGE_REQUEST, watermark_change_req); }
static ssize_t pt_timeout_set(struct device *d, struct device_attribute *attr, const char *buf, size_t size) { int i, ret; REG_GET(); ret = sscanf(buf, "%u", &i); if (ret != 1) return -EINVAL; mutex_lock(&run_mutex); timer_delay = 1000*i ; mutex_unlock(&run_mutex); return size; }
static int hdmi_audio_start(struct device *dev) { struct omap_hdmi *hd = dev_get_drvdata(dev); WARN_ON(!hdmi_mode_has_audio(&hd->cfg)); WARN_ON(!hd->display_enabled); /* No-idle while playing audio, store the old value */ hd->wp_idlemode = REG_GET(hdmi.wp.base, HDMI_WP_SYSCONFIG, 3, 2); REG_FLD_MOD(hdmi.wp.base, HDMI_WP_SYSCONFIG, 1, 3, 2); hdmi_wp_audio_enable(&hd->wp, true); hdmi_wp_audio_core_req_enable(&hd->wp, true); return 0; }
/* PHY_PWR_CMD */ int hdmi_wp_set_phy_pwr(struct hdmi_wp_data *wp, enum hdmi_phy_pwr val) { /* Return if already the state */ if (REG_GET(wp->base, HDMI_WP_PWR_CTRL, 5, 4) == val) return 0; /* Command for power control of HDMI PHY */ REG_FLD_MOD(wp->base, HDMI_WP_PWR_CTRL, val, 7, 6); /* Status of the power control of HDMI PHY */ if (hdmi_wait_for_bit_change(wp->base, HDMI_WP_PWR_CTRL, 5, 4, val) != val) { DSSERR("Failed to set PHY power mode to %d\n", val); return -ETIMEDOUT; } return 0; }
static void construct( struct i2c_hw_engine_dce110 *hw_engine, const struct i2c_hw_engine_dce110_create_arg *arg) { uint32_t xtal_ref_div = 0; dal_i2c_hw_engine_construct(&hw_engine->base, arg->ctx); hw_engine->base.base.base.funcs = &engine_funcs; hw_engine->base.base.funcs = &i2c_engine_funcs; hw_engine->base.funcs = &i2c_hw_engine_funcs; hw_engine->base.default_speed = arg->default_speed; hw_engine->regs = arg->regs; hw_engine->i2c_shift = arg->i2c_shift; hw_engine->i2c_mask = arg->i2c_mask; hw_engine->engine_id = arg->engine_id; hw_engine->buffer_used_bytes = 0; hw_engine->transaction_count = 0; hw_engine->engine_keep_power_up_count = 1; REG_GET(MICROSECOND_TIME_BASE_DIV, XTAL_REF_DIV, &xtal_ref_div); if (xtal_ref_div == 0) { DC_LOG_WARNING("Invalid base timer divider [%s]\n", __func__); xtal_ref_div = 2; } /*Calculating Reference Clock by divding original frequency by * XTAL_REF_DIV. * At upper level, uint32_t reference_frequency = * dal_i2caux_get_reference_clock(as) >> 1 * which already divided by 2. So we need x2 to get original * reference clock from ppll_info */ hw_engine->reference_frequency = (arg->reference_frequency * 2) / xtal_ref_div; }
static int hdmi_core_ddc_init(struct hdmi_core_data *core) { void __iomem *base = core->base; /* Turn on CLK for DDC */ REG_FLD_MOD(base, HDMI_CORE_AV_DPD, 0x7, 2, 0); /* IN_PROG */ if (REG_GET(base, HDMI_CORE_DDC_STATUS, 4, 4) == 1) { /* Abort transaction */ REG_FLD_MOD(base, HDMI_CORE_DDC_CMD, 0xf, 3, 0); /* IN_PROG */ if (hdmi_wait_for_bit_change(base, HDMI_CORE_DDC_STATUS, 4, 4, 0) != 0) { DSSERR("Timeout aborting DDC transaction\n"); return -ETIMEDOUT; } } /* Clk SCL Devices */ REG_FLD_MOD(base, HDMI_CORE_DDC_CMD, 0xA, 3, 0); /* HDMI_CORE_DDC_STATUS_IN_PROG */ if (hdmi_wait_for_bit_change(base, HDMI_CORE_DDC_STATUS, 4, 4, 0) != 0) { DSSERR("Timeout starting SCL clock\n"); return -ETIMEDOUT; } /* Clear FIFO */ REG_FLD_MOD(base, HDMI_CORE_DDC_CMD, 0x9, 3, 0); /* HDMI_CORE_DDC_STATUS_IN_PROG */ if (hdmi_wait_for_bit_change(base, HDMI_CORE_DDC_STATUS, 4, 4, 0) != 0) { DSSERR("Timeout clearing DDC fifo\n"); return -ETIMEDOUT; } return 0; }
static ssize_t pt_val_set(struct device *d, struct device_attribute *attr, const char *buf, size_t size) { int i, ret; REG_GET(); ret = sscanf(buf, "%u", &i); if (ret != 1) return -EINVAL; mutex_lock(&run_mutex); if (!regulator_set_current_limit(reg, i, i)) { mod_timer(&pt_timer, jiffies + msecs_to_jiffies(timer_delay)); return size; } else { mutex_unlock(&run_mutex); return -EPERM; } }