int s3c2410_dma_ctrl(unsigned int channel, enum s3c2410_chan_op op) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; switch (op) { case S3C2410_DMAOP_START: return s3c2410_dma_start(chan); case S3C2410_DMAOP_STOP: return s3c2410_dma_dostop(chan); case S3C2410_DMAOP_PAUSE: case S3C2410_DMAOP_RESUME: return -ENOENT; case S3C2410_DMAOP_FLUSH: return s3c2410_dma_flush(chan); case S3C2410_DMAOP_STARTED: return s3c2410_dma_started(chan); case S3C2410_DMAOP_TIMEOUT: return 0; } return -ENOENT; /* unknown, don't bother */ }
int s3c2410_dma_ctrl(dmach_t channel, enum s3c_chan_op op) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; switch (op) { case S3C2410_DMAOP_START: return s3c_dma_start(chan); case S3C2410_DMAOP_STOP: return s3c_dma_dostop(chan); case S3C2410_DMAOP_PAUSE: case S3C2410_DMAOP_RESUME: return -ENOENT; case S3C2410_DMAOP_FLUSH: return s3c_dma_flush(chan); case S3C2410_DMAOP_STARTED: return s3c_dma_started(chan); case S3C2410_DMAOP_TIMEOUT: return 0; } printk("Invalid operation entered \n"); return -ENOENT; /* unknown, don't bother */ }
/* s3c2410_dma_config * * xfersize: size of unit in bytes (1,2,4) * dcon: base value of the DCONx register */ int s3c2410_dma_config(dmach_t channel, int xferunit, int dcon) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); pr_debug("%s: chan=%d, xfer_unit=%d, dcon=%08x\n", __FUNCTION__, channel, xferunit, dcon); if (chan == NULL) return -EINVAL; pr_debug("%s: Initial dcon is %08x\n", __FUNCTION__, dcon); dcon |= chan->dcon & dma_sel.dcon_mask; pr_debug("%s: New dcon is %08x\n", __FUNCTION__, dcon); switch (xferunit) { case 1: dcon |= S3C_DMACONTROL_SRC_WIDTH_BYTE; dcon |= S3C_DMACONTROL_DEST_WIDTH_BYTE; break; case 2: dcon |= S3C_DMACONTROL_SRC_WIDTH_HWORD; dcon |= S3C_DMACONTROL_DEST_WIDTH_HWORD; break; case 4: dcon |= S3C_DMACONTROL_SRC_WIDTH_WORD; dcon |= S3C_DMACONTROL_DEST_WIDTH_WORD; break; case 8: dcon |= S3C_DMACONTROL_SRC_WIDTH_DWORD; dcon |= S3C_DMACONTROL_DEST_WIDTH_DWORD; break; default: printk("%s: Bad transfer size %d\n", __FUNCTION__, xferunit); return -EINVAL; } pr_debug("%s: DMA Channel control : %08x\n", __FUNCTION__, dcon); dcon |= chan->control_flags; pr_debug("%s: dcon now %08x\n", __FUNCTION__, dcon); /* For DMCCxControl 0 */ chan->dcon = dcon; /* For DMACCxControl 1 : xferunit means transfer width.*/ chan->xfer_unit = xferunit; return 0; }
int s3c2410_dma_devconfig(int channel, enum s3c2410_dmasrc source, int hwcfg, unsigned long devaddr) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; pr_debug("%s: source=%d, hwcfg=%08x, devaddr=%08lx\n", __func__, (int)source, hwcfg, devaddr); chan->source = source; chan->dev_addr = devaddr; chan->hw_cfg = hwcfg; switch (source) { case S3C2410_DMASRC_HW: /* source is hardware */ pr_debug("%s: hw source, devaddr=%08lx, hwcfg=%d\n", __func__, devaddr, hwcfg); dma_wrreg(chan, S3C2410_DMA_DISRCC, hwcfg & 3); dma_wrreg(chan, S3C2410_DMA_DISRC, devaddr); dma_wrreg(chan, S3C2410_DMA_DIDSTC, (0<<1) | (0<<0)); chan->addr_reg = dma_regaddr(chan, S3C2410_DMA_DIDST); break; case S3C2410_DMASRC_MEM: /* source is memory */ pr_debug("%s: mem source, devaddr=%08lx, hwcfg=%d\n", __func__, devaddr, hwcfg); dma_wrreg(chan, S3C2410_DMA_DISRCC, (0<<1) | (0<<0)); dma_wrreg(chan, S3C2410_DMA_DIDST, devaddr); dma_wrreg(chan, S3C2410_DMA_DIDSTC, hwcfg & 3); chan->addr_reg = dma_regaddr(chan, S3C2410_DMA_DISRC); break; default: printk(KERN_ERR "dma%d: invalid source type (%d)\n", channel, source); return -EINVAL; } if (dma_sel.direction != NULL) (dma_sel.direction)(chan, chan->map, source); return 0; }
int s3c2410_dma_set_buffdone_fn(unsigned int channel, s3c2410_dma_cbfn_t rtn) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; pr_debug("%s: chan=%p, callback rtn=%p\n", __func__, chan, rtn); chan->callback_fn = rtn; return 0; }
int s3c2410_dma_set_opfn(unsigned int channel, s3c2410_dma_opfn_t rtn) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; pr_debug("%s: chan=%p, op rtn=%p\n", __func__, chan, rtn); chan->op_fn = rtn; return 0; }
int s3c2410_dma_setflags(unsigned int channel, unsigned int flags) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; pr_debug("%s: chan=%p, flags=%08x\n", __func__, chan, flags); chan->flags = flags; return 0; }
int s3c2410_dma_getposition(unsigned int channel, dma_addr_t *src, dma_addr_t *dst) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; if (src != NULL) *src = dma_rdreg(chan, S3C2410_DMA_DCSRC); if (dst != NULL) *dst = dma_rdreg(chan, S3C2410_DMA_DCDST); return 0; }
/* * s3c2410_dma_getposition * returns the current transfer points for the dma source and destination */ int s3c2410_dma_getposition(dmach_t channel, dma_addr_t *src, dma_addr_t *dst) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; if (src != NULL) *src = dma_rdreg(chan->dma_con, S3C_DMAC_SA(chan->number)); if (dst != NULL) *dst = dma_rdreg(chan->dma_con, S3C_DMAC_DA(chan->number)); return 0; }
/* * s3c_dma_getposition * returns the current transfer points for the dma source and destination */ int s3c2410_dma_getposition(dmach_t channel, dma_addr_t *src, dma_addr_t *dst) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; if (src != NULL) *src = dma_rdreg(chan, S3C_DMAC_CxSRCADDR); if (dst != NULL) *dst = dma_rdreg(chan, S3C_DMAC_CxDESTADDR); return 0; }
int s3c2410_dma_config(unsigned int channel, int xferunit, int dcon) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); pr_debug("%s: chan=%d, xfer_unit=%d, dcon=%08x\n", __func__, channel, xferunit, dcon); if (chan == NULL) return -EINVAL; pr_debug("%s: Initial dcon is %08x\n", __func__, dcon); dcon |= chan->dcon & dma_sel.dcon_mask; pr_debug("%s: New dcon is %08x\n", __func__, dcon); switch (xferunit) { case 1: dcon |= S3C2410_DCON_BYTE; break; case 2: dcon |= S3C2410_DCON_HALFWORD; break; case 4: dcon |= S3C2410_DCON_WORD; break; default: pr_debug("%s: bad transfer size %d\n", __func__, xferunit); return -EINVAL; } dcon |= S3C2410_DCON_HWTRIG; dcon |= S3C2410_DCON_INTREQ; pr_debug("%s: dcon now %08x\n", __func__, dcon); chan->dcon = dcon; chan->xfer_unit = xferunit; return 0; }
/* s3c2410_dma_free * * release the given channel back to the system, will stop and flush * any outstanding transfers, and ensure the channel is ready for the * next claimant. * * Note, although a warning is currently printed if the freeing client * info is not the same as the registrant's client info, the free is still * allowed to go through. */ int s3c2410_dma_free(dmach_t channel, struct s3c2410_dma_client *client) { unsigned long flags; struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); pr_debug("%s: DMA channel %d will be stopped\n", __FUNCTION__, chan->number); if (chan == NULL) return -EINVAL; local_irq_save(flags); if (chan->client != client) { printk(KERN_WARNING "DMA CH %d: possible free from different client (channel %p, passed %p)\n", channel, chan->client, client); } /* sort out stopping and freeing the channel */ if (chan->state != S3C_DMA_IDLE) { pr_debug("%s: need to stop dma channel %p\n", __FUNCTION__, chan); /* possibly flush the channel */ s3c2410_dma_ctrl(channel, S3C2410_DMAOP_STOP); } chan->client = NULL; chan->in_use = 0; chan->dma_con->in_use--; if (chan->irq_claimed) free_irq(chan->irq, (void *)chan->dma_con); chan->irq_claimed = 0; if (!(channel & DMACH_LOW_LEVEL)) dma_chan_map[channel] = NULL; local_irq_restore(flags); return 0; }
int s3c2410_dma_enqueue(unsigned int channel, void *id, dma_addr_t data, int size) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); struct s3c2410_dma_buf *buf; unsigned long flags; if (chan == NULL) return -EINVAL; pr_debug("%s: id=%p, data=%08x, size=%d\n", __func__, id, (unsigned int)data, size); buf = kmem_cache_alloc(dma_kmem, GFP_ATOMIC); if (buf == NULL) { pr_debug("%s: out of memory (%ld alloc)\n", __func__, (long)sizeof(*buf)); return -ENOMEM; } //pr_debug("%s: new buffer %p\n", __func__, buf); //dbg_showchan(chan); buf->next = NULL; buf->data = buf->ptr = data; buf->size = size; buf->id = id; buf->magic = BUF_MAGIC; local_irq_save(flags); if (chan->curr == NULL) { /* we've got nothing loaded... */ pr_debug("%s: buffer %p queued onto empty channel\n", __func__, buf); chan->curr = buf; chan->end = buf; chan->next = NULL; } else { pr_debug("dma%d: %s: buffer %p queued onto non-empty channel\n", chan->number, __func__, buf); if (chan->end == NULL) pr_debug("dma%d: %s: %p not empty, and chan->end==NULL?\n", chan->number, __func__, chan); chan->end->next = buf; chan->end = buf; } /* if necessary, update the next buffer field */ if (chan->next == NULL) chan->next = buf; /* check to see if we can load a buffer */ if (chan->state == S3C2410_DMA_RUNNING) { if (chan->load_state == S3C2410_DMALOAD_1LOADED && 1) { if (s3c2410_dma_waitforload(chan, __LINE__) == 0) { printk(KERN_ERR "dma%d: loadbuffer:" "timeout loading buffer\n", chan->number); dbg_showchan(chan); local_irq_restore(flags); return -EINVAL; } } while (s3c2410_dma_canload(chan) && chan->next != NULL) { s3c2410_dma_loadbuffer(chan, chan->next); } } else if (chan->state == S3C2410_DMA_IDLE) { if (chan->flags & S3C2410_DMAF_AUTOSTART) { s3c2410_dma_ctrl(chan->number | DMACH_LOW_LEVEL, S3C2410_DMAOP_START); } } local_irq_restore(flags); return 0; }
/* s3c2410_dma_enqueue * * queue an given buffer for dma transfer. * * id the device driver's id information for this buffer * data the physical address of the buffer data * size the size of the buffer in bytes * * If the channel is not running, then the flag S3C2410_DMAF_AUTOSTART * is checked, and if set, the channel is started. If this flag isn't set, * then an error will be returned. * * It is possible to queue more than one DMA buffer onto a channel at * once, and the code will deal with the re-loading of the next buffer * when necessary. */ int s3c2410_dma_enqueue(unsigned int channel, void *id, dma_addr_t data, int size) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); struct s3c_dma_buf *buf; unsigned long flags; pr_debug("%s: id=%p, data=%08x, size=%d\n", __FUNCTION__, id, (unsigned int) data, size); buf = kmem_cache_alloc(dma_kmem, GFP_ATOMIC); if (buf == NULL) { printk(KERN_ERR "dma <%d> no memory for buffer\n", channel); return -ENOMEM; } pr_debug("%s: new buffer %p\n", __FUNCTION__, buf); buf->next = NULL; buf->data = buf->ptr = data; buf->size = size; buf->id = id; buf->magic = BUF_MAGIC; local_irq_save(flags); buf->mcptr_cpu = dma_alloc_coherent(NULL, SIZE_OF_MICRO_CODES, &buf->mcptr, GFP_ATOMIC); if (buf->mcptr_cpu == NULL) { printk(KERN_ERR "%s: failed to allocate memory for micro codes\n", __FUNCTION__); return -ENOMEM; } if (chan->curr == NULL) { /* we've got nothing loaded... */ pr_debug("%s: buffer %p queued onto empty channel\n", __FUNCTION__, buf); chan->curr = buf; chan->end = buf; chan->next = NULL; } else { pr_debug("dma CH %d: %s: buffer %p queued onto non-empty channel\n", chan->number, __FUNCTION__, buf); if (chan->end == NULL) /* In case of flushing */ pr_debug("dma CH %d: %s: %p not empty, and chan->end==NULL?\n", chan->number, __FUNCTION__, chan); else { chan->end->next = buf; chan->end = buf; } } /* if necessary, update the next buffer field */ if (chan->next == NULL) chan->next = buf; /* check to see if we can load a buffer */ if (chan->state == S3C_DMA_RUNNING) { if (chan->load_state == S3C_DMALOAD_1LOADED && 1) { if (s3c_dma_waitforload(chan, __LINE__) == 0) { printk(KERN_ERR "dma CH %d: loadbuffer:" "timeout loading buffer\n", chan->number); dbg_showchan(chan); local_irq_restore(flags); return -EINVAL; } } } else if (chan->state == S3C_DMA_IDLE) { if (chan->flags & S3C2410_DMAF_AUTOSTART) { s3c2410_dma_ctrl(channel, S3C2410_DMAOP_START); } else { pr_debug("loading onto stopped channel\n"); } } local_irq_restore(flags); return 0; }
int s3c2410_dma_devconfig(int channel, enum s3c2410_dmasrc source, int hwcfg, unsigned long devaddr) { struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; pr_debug("%s: source=%d, hwcfg=%08x, devaddr=%08lx\n", __FUNCTION__, (int)source, hwcfg, devaddr); chan->source = source; chan->dev_addr = devaddr; switch (source) { case S3C2410_DMASRC_MEM: /* source is Memory : Mem-to-Peri ( Write into FIFO) */ chan->config_flags = chan->map->hw_addr.to; hwcfg = S3C_DMACONTROL_DBSIZE(1)|S3C_DMACONTROL_SBSIZE(1); chan->control_flags = S3C_DMACONTROL_DP_NON_SECURE|S3C_DMACONTROL_DEST_FIXED| S3C_DMACONTROL_SP_NON_SECURE|S3C_DMACONTROL_SRC_INC| hwcfg; //chan->control_flags = hwcfg; return 0; case S3C2410_DMASRC_HW: /* source is peripheral : Peri-to-Mem ( Read from FIFO) */ chan->config_flags = chan->map->hw_addr.from; hwcfg = S3C_DMACONTROL_DBSIZE(1)|S3C_DMACONTROL_SBSIZE(1); chan->control_flags = S3C_DMACONTROL_DP_NON_SECURE|S3C_DMACONTROL_DEST_INC| S3C_DMACONTROL_SP_NON_SECURE|S3C_DMACONTROL_SRC_FIXED| hwcfg; //chan->control_flags = hwcfg; return 0; case S3C_DMA_MEM2MEM: chan->config_flags = 0; hwcfg = S3C_DMACONTROL_DBSIZE(16)|S3C_DMACONTROL_SBSIZE(16); chan->control_flags = S3C_DMACONTROL_DP_NON_SECURE|S3C_DMACONTROL_DEST_INC| S3C_DMACONTROL_SP_NON_SECURE|S3C_DMACONTROL_SRC_INC| hwcfg; //chan->control_flags = hwcfg; return 0; case S3C_DMA_MEM2MEM_SET: chan->config_flags = 0; hwcfg = S3C_DMACONTROL_DBSIZE(16)|S3C_DMACONTROL_SBSIZE(16); chan->control_flags = S3C_DMACONTROL_DP_NON_SECURE|S3C_DMACONTROL_DEST_INC| S3C_DMACONTROL_SP_NON_SECURE|S3C_DMACONTROL_SRC_FIXED| hwcfg; //chan->control_flags = hwcfg; return 0; case S3C_DMA_PER2PER: printk("Peripheral-to-Peripheral DMA NOT YET implemented !! \n"); return -EINVAL; default: printk(KERN_ERR "DMA CH :%d - invalid source type ()\n", channel); printk("Unsupported DMA configuration from the device driver using DMA driver \n"); return -EINVAL; } }
int s3c2410_dma_devconfig(int channel, enum s3c2410_dmasrc source, int hwcfg, unsigned long devaddr) { unsigned long tmp; struct s3c2410_dma_chan *chan = lookup_dma_channel(channel); if (chan == NULL) return -EINVAL; pr_debug("%s: source=%d, hwcfg=%08x, devaddr=%08lx\n", __FUNCTION__, (int)source, hwcfg, devaddr); chan->source = source; chan->dev_addr = devaddr; switch (source) { case S3C2410_DMASRC_MEM: /* source is Memory : Mem-to-Peri ( Write into FIFO) */ tmp = S3C_DMACONFIG_TCMASK | S3C_DMACONFIG_FLOWCTRL_MEM2PER | (chan->map->hw_addr.to) << S3C_DEST_SHIFT | S3C_DMACONFIG_CHANNEL_ENABLE; chan->config_flags = tmp; /* TODO : Now, Scatter&Gather DMA NOT supported */ dma_wrreg(chan, S3C_DMAC_CxLLI, 0); /* devaddr : Periperal address (destination) */ dma_wrreg(chan, S3C_DMAC_CxDESTADDR, devaddr); /* source address : memory(buffer) address */ chan->addr_reg = dma_regaddr(chan, S3C_DMAC_CxSRCADDR); chan->control_flags = S3C_DMACONTROL_SRC_INC | S3C_DMACONTROL_DEST_AXI_PERI ; //chan->control_flags = hwcfg; return 0; case S3C2410_DMASRC_HW: /* source is peripheral : Peri-to-Mem ( Read from FIFO) */ tmp = S3C_DMACONFIG_TCMASK | S3C_DMACONFIG_FLOWCTRL_PER2MEM | (chan->map->hw_addr.from) << S3C_SRC_SHIFT | S3C_DMACONFIG_CHANNEL_ENABLE; chan->config_flags = tmp; /* TODO : Now, Scatter&Gather DMA NOT supported */ dma_wrreg(chan, S3C_DMAC_CxLLI, 0); /* devaddr : Periperal address (source) */ dma_wrreg(chan, S3C_DMAC_CxSRCADDR, devaddr); /* destination address : memory(buffer) address */ chan->addr_reg = dma_regaddr(chan, S3C_DMAC_CxDESTADDR); chan->control_flags = S3C_DMACONTROL_DEST_INC | S3C_DMACONTROL_SRC_AXI_PERI; //chan->control_flags = hwcfg; return 0; case S3C_DMA_MEM2MEM: /* this is temporary for G3D */ tmp = S3C_DMACONFIG_TCMASK | S3C_DMACONFIG_FLOWCTRL_MEM2MEM | S3C_DMACONFIG_CHANNEL_ENABLE; chan->config_flags = tmp; /* TODO : Now, Scatter&Gather DMA NOT YET supported */ dma_wrreg(chan, S3C_DMAC_CxLLI, 0); /* devaddr : memory/onenand address (source) */ dma_wrreg(chan, S3C_DMAC_CxSRCADDR, devaddr); /* destination address : memory(buffer) address */ chan->addr_reg = dma_regaddr(chan, S3C_DMAC_CxDESTADDR); chan->control_flags |= (S3C_DMACONTROL_SRC_INC | S3C_DMACONTROL_DEST_AXI_PERI | S3C_DMACONTROL_SBSIZE_4 | S3C_DMACONTROL_DBSIZE_4); //chan->control_flags = hwcfg; return 0; case S3C_DMA_MEM2MEM_P: /* source is memory : Memory-to-Mem ( Read/Write) */ tmp = S3C_DMACONFIG_TCMASK | S3C_DMACONFIG_FLOWCTRL_MEM2MEM | S3C_DMACONFIG_CHANNEL_ENABLE; //if(chan->map->hw_addr.from == S3C_DMA0_ONENAND_RX) { //tmp |= S3C_DMACONFIG_ONENANDMODESRC; //} chan->config_flags = tmp; /* TODO : Now, Scatter&Gather DMA NOT YET supported */ dma_wrreg(chan, S3C_DMAC_CxLLI, 0); /* devaddr : memory/onenand address (source) */ dma_wrreg(chan, S3C_DMAC_CxSRCADDR, devaddr); /* destination address : memory(buffer) address */ chan->addr_reg = dma_regaddr(chan, S3C_DMAC_CxDESTADDR); chan->control_flags |= (S3C_DMACONTROL_SRC_INC | S3C_DMACONTROL_DEST_INC | S3C_DMACONTROL_SBSIZE_4 | S3C_DMACONTROL_DBSIZE_4); //chan->control_flags = hwcfg; return 0; case S3C_DMA_PER2PER: printk("Peripheral-to-Peripheral DMA NOT YET implemented !! \n"); return -EINVAL; default: printk(KERN_ERR "DMA CH :%d - invalid source type ()\n", channel); printk("Unsupported DMA configuration from the device driver using DMA driver \n"); return -EINVAL; } }