/** * queue_run - run the chain * * @priv: private data. */ int queue_run(void *priv) { struct gpmi_perchip_data *g = priv; if (!g->d_tail) return 0; stmp3xxx_dma_reset_channel(g->dma_ch); stmp3xxx_dma_clear_interrupt(g->dma_ch); stmp3xxx_dma_enable_interrupt(g->dma_ch); g->d[g->d_tail-1].command->cmd &= ~(BM_APBH_CHn_CMD_NANDLOCK | BM_APBH_CHn_CMD_CHAIN); g->d[g->d_tail-1].command->cmd |= BM_APBH_CHn_CMD_IRQONCMPLT ; g->d[g->d_tail-1].command->pio_words[0] &= ~BM_GPMI_CTRL0_LOCK_CS; #ifdef DEBUG /*stmp37cc_dma_print_chain(&g->chain);*/ #endif init_completion(&g->done); stmp3xxx_dma_go(g->dma_ch, g->d, 1); wait_for_completion(&g->done); g->d_tail = 0; return 0; }
static void stmp_appuart_on(struct platform_device *dev) { struct stmp_appuart_port *s = platform_get_drvdata(dev); if (!pio_mode) { /* Tell DMA to select UART. Both DMA channels are shared between app UART and IrDA. Target id of 0 means UART, 1 means IrDA */ stmp3xxx_dma_set_alt_target(s->dma_rx, 0); stmp3xxx_dma_set_alt_target(s->dma_tx, 0); /* Reset DMA channels */ stmp3xxx_dma_reset_channel(s->dma_rx); stmp3xxx_dma_reset_channel(s->dma_tx); stmp3xxx_dma_enable_interrupt(s->dma_rx); stmp3xxx_dma_enable_interrupt(s->dma_tx); } }
/* Allocate and initialise the DMA chains */ static int stmp3xxx_mmc_dma_init(struct stmp3xxx_mmc_host *host, int reset) { int ret; if (!reset) { /* Allocate DMA channel */ ret = stmp3xxx_dma_request(host->dmach, host->dev, "STMP37XX MMC/SD"); if (ret) { dev_err(host->dev, "Unable to request DMA channel\n"); return ret; } host->dma_buf = dma_alloc_coherent(host->dev, SSP_BUFFER_SIZE, &host->dma_buf_phys, GFP_DMA); if (host->dma_buf == NULL) { dev_err(host->dev, "Unable to allocate DMA memory\n"); ret = -ENOMEM; goto out_mem; } ret = stmp3xxx_dma_allocate_command(host->dmach, &host->dma_desc); if (ret) { dev_err(host->dev, "Unable to allocate DMA descriptor\n"); goto out_cmd; } host->dma_desc.command->next = (u32) host->dma_desc.handle; host->dma_desc.command->buf_ptr = (u32) host->dma_buf_phys; host->dma_desc.virtual_buf_ptr = host->dma_buf; } /* Reset DMA channel */ stmp3xxx_dma_reset_channel(host->dmach); /* Enable DMA interrupt */ stmp3xxx_dma_clear_interrupt(host->dmach); stmp3xxx_dma_enable_interrupt(host->dmach); return 0; out_cmd: dma_free_coherent(host->dev, SSP_BUFFER_SIZE, host->dma_buf, host->dma_buf_phys); out_mem: stmp3xxx_dma_release(host->dmach); return ret; }
int stmp3xxx_lcdif_dma_init(struct device *dev, dma_addr_t phys, int memsize, int lcd_master) { int ret = 0; stmp378x_lcd_master = lcd_master; if (lcd_master) { stmp3xxx_setl(BM_LCDIF_CTRL_LCDIF_MASTER, REGS_LCDIF_BASE + HW_LCDIF_CTRL); __raw_writel(phys, REGS_LCDIF_BASE + HW_LCDIF_CUR_BUF); __raw_writel(phys, REGS_LCDIF_BASE + HW_LCDIF_NEXT_BUF); } else { ret = stmp3xxx_dma_request(STMP3XXX_DMA (LCD_DMA_CHANNEL, STMP3XXX_BUS_APBH), dev, "lcdif"); if (ret) { dev_err(dev, "stmp3xxx_dma_request failed: error %d\n", ret); goto out; } stmp3xxx_dma_reset_channel(STMP3XXX_DMA (LCD_DMA_CHANNEL, STMP3XXX_BUS_APBH)); stmp3xxx_dma_clear_interrupt(STMP3XXX_DMA (LCD_DMA_CHANNEL, STMP3XXX_BUS_APBH)); stmp3xxx_dma_enable_interrupt(STMP3XXX_DMA (LCD_DMA_CHANNEL, STMP3XXX_BUS_APBH)); dotclk_dma_chain_init(memsize, phys, video_dma_descriptor, dma_chain_info, &dma_chain_info_pos); } out: return ret; }