void hw_i2c_register_int(HW_I2C_ID id, hw_i2c_interrupt_cb cb, uint16_t mask) { struct i2c *i2c = get_i2c(id); i2c->intr_cb = cb; IBA(id)->I2C_INTR_MASK_REG = mask; }
static void notify_on_dma_write_end_no_stop_cb(void *user_data, uint16 len) { HW_I2C_ID id = (HW_I2C_ID) user_data; struct i2c *i2c = get_i2c(id); /* disable I2C DMA */ IBA(id)->I2C_DMA_CR_REG = 0; dma_tx_reply(id, len == i2c->tx_state.len); }
/* * Interrupt handler used by hw_i2c_prepare_dma_ex() to handle STOP and ABORT for DMA writes */ static void intr_write_buffer_dma_handler(HW_I2C_ID id, uint16_t mask) { struct i2c *i2c = get_i2c(id); struct tx_state *txs = &i2c->tx_state; /* Must provide a valid (> 0) mask */ ASSERT_WARNING(mask != 0); if (mask & HW_I2C_INT_TX_ABORT) { /* disable I2C DMA */ IBA(id)->I2C_DMA_CR_REG = 0; dma_tx_reply(id, false); /* clear abort */ hw_i2c_reset_int_tx_abort(id); return; } if (mask & HW_I2C_INT_STOP_DETECTED) { if (IBA(id)->I2C_DMA_CR_REG != 0) { hw_i2c_reset_int_stop_detected(id); /* * A STOP while DMA is still enabled is caused by a NACK from the slave. * While servicing the STOP_DETECTED interrupt we don't need to call the * reply callback. This will be done when servicing the TX_ABORT interrupt * that will follow. */ return; } dma_tx_reply(id, txs->num == txs->len); hw_i2c_reset_int_stop_detected(id); return; } /* */ }
static void notify_on_dma_read_end_cb(void *user_data, uint16 len) { HW_I2C_ID id = (HW_I2C_ID) user_data; struct i2c *i2c = get_i2c(id); struct rx_state *rxs = &i2c->rx_state; rxs->num = len; /* disable I2C DMA */ IBA(id)->I2C_DMA_CR_REG = 0; dma_rx_reply(id, rxs->num == rxs->len); }
static void notify_on_dma_write_end_cb(void *user_data, uint16 len) { HW_I2C_ID id = (HW_I2C_ID) user_data; struct i2c *i2c = get_i2c(id); /* * store len, to pass to user's cb when STOP/ABORT is detected */ i2c->tx_state.num = len; /* disable I2C DMA */ IBA(id)->I2C_DMA_CR_REG = 0; }
static void hw_i2c_dma_cb(void *user_data, uint16_t len) { HW_I2C_ID id = (HW_I2C_ID) user_data; struct i2c *i2c = get_i2c(id); if (i2c->dma_state.cb) { i2c->dma_state.cb(id, i2c->dma_state.cb_data, len, false); i2c->dma_state.cb = NULL; } /* disable I2C DMA */ IBA(id)->I2C_DMA_CR_REG = 0; }
/* * Interrupt handler used by hw_i2c_prepare_dma_ex() to handle ABORT for DMA writes */ static void intr_write_buffer_dma_no_stop_handler(HW_I2C_ID id, uint16_t mask) { /* Must provide a valid (> 0) mask */ ASSERT_WARNING(mask != 0); if (mask & HW_I2C_INT_TX_ABORT) { /* disable I2C DMA */ IBA(id)->I2C_DMA_CR_REG = 0; dma_tx_reply(id, false); /* clear abort */ hw_i2c_reset_int_tx_abort(id); return; } }
void hw_i2c_configure(HW_I2C_ID id, const i2c_config *cfg) { /* * we always perform configuration of I2C clock (SCL) since it's essential for I2C controller * to work properly and in case it's not provided by caller, we just set recommended values * from datasheet */ if (!cfg || (!cfg->clock_cfg.ss_hcnt && !cfg->clock_cfg.ss_lcnt)) { IBA(id)->I2C_SS_SCL_HCNT_REG = 0x48; IBA(id)->I2C_SS_SCL_LCNT_REG = 0x4F; } else { IBA(id)->I2C_SS_SCL_HCNT_REG = cfg->clock_cfg.ss_hcnt; IBA(id)->I2C_SS_SCL_LCNT_REG = cfg->clock_cfg.ss_lcnt; } if (!cfg || (!cfg->clock_cfg.ss_hcnt && !cfg->clock_cfg.ss_lcnt)) { IBA(id)->I2C_FS_SCL_HCNT_REG = 0x08; IBA(id)->I2C_FS_SCL_LCNT_REG = 0x17; } else { IBA(id)->I2C_FS_SCL_HCNT_REG = cfg->clock_cfg.fs_hcnt; IBA(id)->I2C_FS_SCL_LCNT_REG = cfg->clock_cfg.fs_lcnt; } if (!cfg) { return; } hw_i2c_set_speed(id, cfg->speed); hw_i2c_set_mode(id, cfg->mode); if (cfg->mode == HW_I2C_MODE_MASTER) { hw_i2c_setup_master(id, cfg->addr_mode, cfg->address); } else { hw_i2c_setup_slave(id, cfg->addr_mode, cfg->address, cfg->event_cb); } }
void hw_i2c_init(HW_I2C_ID id, const i2c_config *cfg) { IRQn_Type irq_type = I2C_IRQn; int enable_loop_cnt = 0; if (id == HW_I2C2) { irq_type = I2C2_IRQn; } else if (id != HW_I2C1) { /* Requested ID must be one of HW_I2C1 or HW_I2C2 */ ASSERT_ERROR(0); } struct i2c *i2c = get_i2c(id); memset(i2c, 0, sizeof(*i2c)); GLOBAL_INT_DISABLE(); uint32_t clk_per_reg_local = CRG_PER->CLK_PER_REG; REG_SET_FIELD(CRG_PER, CLK_PER_REG, I2C_CLK_SEL, clk_per_reg_local, 0); REG_SET_FIELD(CRG_PER, CLK_PER_REG, I2C_ENABLE, clk_per_reg_local, 1); CRG_PER->CLK_PER_REG = clk_per_reg_local; GLOBAL_INT_RESTORE(); hw_i2c_disable(id); while (hw_i2c_get_enable_status(id) & I2C_I2C_ENABLE_STATUS_REG_IC_EN_Msk) { hw_cpm_delay_usec(500); enable_loop_cnt++; /* we shouldn't get stuck here, the HW I2C block should eventually be enabled */ ASSERT_ERROR(enable_loop_cnt < I2C_ENABLE_LOOP_LIMIT); } IBA(id)->I2C_INTR_MASK_REG = 0x0000; hw_i2c_configure(id, cfg); NVIC_EnableIRQ(irq_type); }
static int __init vr41xx_pciu_init(void) { struct pci_controller_unit_setup *setup; struct pci_master_address_conversion *master; struct pci_target_address_conversion *target; struct pci_mailbox_address *mailbox; struct pci_target_address_window *window; unsigned long vtclock, pci_clock_max; uint32_t val; setup = &vr41xx_pci_controller_unit_setup; if (request_mem_region(PCIU_BASE, PCIU_SIZE, "PCIU") == NULL) return -EBUSY; pciu_base = ioremap(PCIU_BASE, PCIU_SIZE); if (pciu_base == NULL) { release_mem_region(PCIU_BASE, PCIU_SIZE); return -EBUSY; } /* Disable PCI interrupt */ vr41xx_disable_pciint(); /* Supply VTClock to PCIU */ vr41xx_supply_clock(PCIU_CLOCK); /* Dummy write, waiting for supply of VTClock. */ vr41xx_disable_pciint(); /* Select PCI clock */ if (setup->pci_clock_max != 0) pci_clock_max = setup->pci_clock_max; else pci_clock_max = PCI_CLOCK_MAX; vtclock = vr41xx_get_vtclock_frequency(); if (vtclock < pci_clock_max) pciu_write(PCICLKSELREG, EQUAL_VTCLOCK); else if ((vtclock / 2) < pci_clock_max) pciu_write(PCICLKSELREG, HALF_VTCLOCK); else if (current_cpu_data.processor_id >= PRID_VR4131_REV2_1 && (vtclock / 3) < pci_clock_max) pciu_write(PCICLKSELREG, ONE_THIRD_VTCLOCK); else if ((vtclock / 4) < pci_clock_max) pciu_write(PCICLKSELREG, QUARTER_VTCLOCK); else { printk(KERN_ERR "PCI Clock is over 33MHz.\n"); iounmap(pciu_base); return -EINVAL; } /* Supply PCI clock by PCI bus */ vr41xx_supply_clock(PCI_CLOCK); if (setup->master_memory1 != NULL) { master = setup->master_memory1; val = IBA(master->bus_base_address) | MASTER_MSK(master->address_mask) | WINEN | PCIA(master->pci_base_address); pciu_write(PCIMMAW1REG, val); } else { val = pciu_read(PCIMMAW1REG); val &= ~WINEN; pciu_write(PCIMMAW1REG, val); } if (setup->master_memory2 != NULL) { master = setup->master_memory2; val = IBA(master->bus_base_address) | MASTER_MSK(master->address_mask) | WINEN | PCIA(master->pci_base_address); pciu_write(PCIMMAW2REG, val); } else { val = pciu_read(PCIMMAW2REG); val &= ~WINEN; pciu_write(PCIMMAW2REG, val); } if (setup->target_memory1 != NULL) { target = setup->target_memory1; val = TARGET_MSK(target->address_mask) | WINEN | ITA(target->bus_base_address); pciu_write(PCITAW1REG, val); } else { val = pciu_read(PCITAW1REG); val &= ~WINEN; pciu_write(PCITAW1REG, val); } if (setup->target_memory2 != NULL) { target = setup->target_memory2; val = TARGET_MSK(target->address_mask) | WINEN | ITA(target->bus_base_address); pciu_write(PCITAW2REG, val); } else { val = pciu_read(PCITAW2REG); val &= ~WINEN; pciu_write(PCITAW2REG, val); } if (setup->master_io != NULL) { master = setup->master_io; val = IBA(master->bus_base_address) | MASTER_MSK(master->address_mask) | WINEN | PCIIA(master->pci_base_address); pciu_write(PCIMIOAWREG, val); } else { val = pciu_read(PCIMIOAWREG); val &= ~WINEN; pciu_write(PCIMIOAWREG, val); } if (setup->exclusive_access == CANNOT_LOCK_FROM_DEVICE) pciu_write(PCIEXACCREG, UNLOCK); else pciu_write(PCIEXACCREG, 0); if (current_cpu_type() == CPU_VR4122) pciu_write(PCITRDYVREG, TRDYV(setup->wait_time_limit_from_irdy_to_trdy)); pciu_write(LATTIMEREG, MLTIM(setup->master_latency_timer)); if (setup->mailbox != NULL) { mailbox = setup->mailbox; val = MBADD(mailbox->base_address) | TYPE_32BITSPACE | MSI_MEMORY | PREF_APPROVAL; pciu_write(MAILBAREG, val); } if (setup->target_window1) { window = setup->target_window1; val = PMBA(window->base_address) | TYPE_32BITSPACE | MSI_MEMORY | PREF_APPROVAL; pciu_write(PCIMBA1REG, val); } if (setup->target_window2) { window = setup->target_window2; val = PMBA(window->base_address) | TYPE_32BITSPACE | MSI_MEMORY | PREF_APPROVAL; pciu_write(PCIMBA2REG, val); } val = pciu_read(RETVALREG); val &= ~RTYVAL_MASK; val |= RTYVAL(setup->retry_limit); pciu_write(RETVALREG, val); val = pciu_read(PCIAPCNTREG); val &= ~(TKYGNT | PAPC); switch (setup->arbiter_priority_control) { case PCI_ARBITRATION_MODE_ALTERNATE_0: val |= PAPC_ALTERNATE_0; break; case PCI_ARBITRATION_MODE_ALTERNATE_B: val |= PAPC_ALTERNATE_B; break; default: val |= PAPC_FAIR; break; } if (setup->take_away_gnt_mode == PCI_TAKE_AWAY_GNT_ENABLE) val |= TKYGNT_ENABLE; pciu_write(PCIAPCNTREG, val); pciu_write(COMMANDREG, PCI_COMMAND_IO | PCI_COMMAND_MEMORY | PCI_COMMAND_MASTER | PCI_COMMAND_PARITY | PCI_COMMAND_SERR); /* Clear bus error */ pciu_read(BUSERRADREG); pciu_write(PCIENREG, PCIU_CONFIG_DONE); if (setup->mem_resource != NULL) vr41xx_pci_controller.mem_resource = setup->mem_resource; if (setup->io_resource != NULL) { vr41xx_pci_controller.io_resource = setup->io_resource; } else { set_io_port_base(IO_PORT_BASE); ioport_resource.start = IO_PORT_RESOURCE_START; ioport_resource.end = IO_PORT_RESOURCE_END; } if (setup->master_io) { void __iomem *io_map_base; struct resource *res = vr41xx_pci_controller.io_resource; master = setup->master_io; io_map_base = ioremap(master->bus_base_address, resource_size(res)); if (!io_map_base) return -EBUSY; vr41xx_pci_controller.io_map_base = (unsigned long)io_map_base; } register_pci_controller(&vr41xx_pci_controller); return 0; }
void hw_i2c_dma_start(HW_I2C_ID id) { IBA(id)->I2C_DMA_CR_REG = (1 << I2C_I2C_DMA_CR_REG_TDMAE_Pos) | (1 << I2C_I2C_DMA_CR_REG_RDMAE_Pos); }
void hw_i2c_prepare_dma_ex(HW_I2C_ID id, uint8_t channel, uint16_t *data, uint16_t len, HW_I2C_DMA_TRANSFER type, hw_i2c_complete_cb cb, void *cb_data, bool notify_on_stop) { static volatile uint16_t read_cmd = 0x100; /* must be in RAM for faster access */ DMA_setup dma; struct i2c *i2c = get_i2c(id); /* for sanity so even if channel is set to odd number, we'll use proper pair */ channel &= 0xfe; /* make sure I2C DMA is off so it's not unexpectedly triggered when channels are enabled */ IBA(id)->I2C_DMA_CR_REG = 0; i2c->dma_state.cb = cb; i2c->dma_state.cb_data = cb_data; /* RX channel, not used only when writing data */ if (type != HW_I2C_DMA_TRANSFER_WRITE) { dma.channel_number = channel; dma.bus_width = HW_DMA_BW_BYTE; dma.irq_enable = HW_DMA_IRQ_STATE_ENABLED; dma.irq_nr_of_trans = 0; dma.dreq_mode = HW_DMA_DREQ_TRIGGERED; dma.a_inc = HW_DMA_AINC_FALSE; dma.b_inc = HW_DMA_BINC_TRUE; dma.circular = HW_DMA_MODE_NORMAL; /* * Set DMA priority to highest; see Tx channel setup below for explanation. */ dma.dma_prio = HW_DMA_PRIO_7; dma.dma_idle = HW_DMA_IDLE_INTERRUPTING_MODE; /* Not used by the HW in this case */ dma.dma_init = HW_DMA_INIT_AX_BX_AY_BY; dma.dma_req_mux = id == HW_I2C2 ? HW_DMA_TRIG_I2C2_RXTX : HW_DMA_TRIG_I2C_RXTX; dma.src_address = (uint32) &IBA(id)->I2C_DATA_CMD_REG; dma.dest_address = (uint32_t) data; dma.length = len; dma.callback = notify_on_dma_read_end_cb; i2c->rx_state.num = 0; i2c->rx_state.len = len; dma.user_data = (void *) id; hw_dma_channel_initialization(&dma); hw_dma_channel_enable(channel, HW_DMA_STATE_ENABLED); } /* * TX channel * used also when reading as master since we need to trigger read by writing read command * to TX FIFO */ if (type != HW_I2C_DMA_TRANSFER_SLAVE_READ) { bool is_rx = (type != HW_I2C_DMA_TRANSFER_WRITE); dma.channel_number = channel + 1; dma.bus_width = HW_DMA_BW_HALFWORD; dma.irq_enable = HW_DMA_IRQ_STATE_ENABLED; dma.irq_nr_of_trans = 0; dma.dreq_mode = HW_DMA_DREQ_TRIGGERED; /* for RX no need to increment Ax, we read single value only */ dma.a_inc = is_rx ? HW_DMA_AINC_FALSE : HW_DMA_AINC_TRUE; dma.b_inc = HW_DMA_BINC_FALSE; dma.circular = HW_DMA_MODE_NORMAL; /* * Set DMA priority to highest, to avoid case of bus starvation due to a * higher-priority DMA transaction, which will drain the FIFO and * introduce a STOP bit. * If both I2C and I2C2 are transmitting via DMA, their relative priority * will be defined by the DMA channels they are assigned. * However, the I2C bus frequency is much lower than the frequency that the * DMA controller runs at, so it is not expected that the DMA for I2C will * cause bus starvation to the DMA for I2C2 (and vice versa). */ dma.dma_prio = HW_DMA_PRIO_7; dma.dma_idle = HW_DMA_IDLE_INTERRUPTING_MODE; /* Not used by the HW in this case */ /* * We don't use HW_DMA_INIT_AX_BX_BY because it will lock the bus until * the DMA transaction is finished, which might cause bus starvation to * other peripherals. */ dma.dma_init = HW_DMA_INIT_AX_BX_AY_BY; dma.dma_req_mux = id == HW_I2C2 ? HW_DMA_TRIG_I2C2_RXTX : HW_DMA_TRIG_I2C_RXTX; /* for RX we store read command separately */ dma.src_address = (uint32_t) (is_rx ? &read_cmd : data); dma.dest_address = (uint32) &IBA(id)->I2C_DATA_CMD_REG; dma.length = len; dma.user_data = (void *) id; if (type == HW_I2C_DMA_TRANSFER_WRITE) { uint16_t int_mask = HW_I2C_INT_TX_ABORT; hw_i2c_reset_int_tx_abort(id); i2c->tx_state.num = 0; i2c->tx_state.len = len; if (notify_on_stop) { int_mask |= HW_I2C_INT_STOP_DETECTED; hw_i2c_reset_int_stop_detected(id); dma.callback = notify_on_dma_write_end_cb; /* * install an interrupt handler to detect STOP or ABORT, * which will trigger user's cb */ hw_i2c_register_int(id, intr_write_buffer_dma_handler, int_mask); /* we want TX_EMPTY as soon as FIFO is empty */ hw_i2c_set_tx_fifo_threshold(id, 0); } else { dma.callback = notify_on_dma_write_end_no_stop_cb; /* * install an interrupt handler to detect ABORT, * which will disable I2C DMA, which will trigger user's cb */ hw_i2c_register_int(id, intr_write_buffer_dma_no_stop_handler, int_mask); } } else { /* Rx DMA has been taken care of already */ dma.callback = NULL; } hw_dma_channel_initialization(&dma); hw_dma_channel_enable(channel + 1, HW_DMA_STATE_ENABLED); } /* we can set both, does not matter than one of them won't be used */ IBA(id)->I2C_DMA_TDLR_REG = 2; IBA(id)->I2C_DMA_RDLR_REG = 0; }
uint16_t hw_i2c_get_int_mask(HW_I2C_ID id) { return IBA(id)->I2C_INTR_MASK_REG; }
void hw_i2c_set_int_mask(HW_I2C_ID id, uint16_t mask) { IBA(id)->I2C_INTR_MASK_REG = mask; }