status_t iw_start_record_dma(interwave_dev *iw) { uint16 sample_count; iwprintf("iw_start_record_dma"); // * stop the record path iw_enable_record(iw,false); // * clear interrupts //iw_clear_codec_interrupts(iw); // FIXME: might conflict with already runing playback DMA // * set the iw's sample counters to half the record area // (see playback notes above) sample_count = iw->pcm.rd_size/4 - 1; // MUST SUBTRACT 1 // FIXME!!! (check sample size) iwprintf("record sample count:%d",sample_count); iw_poke(iw,CLRCTI,sample_count & 0xff); // low byte (must be written first, per IWPG) iw_poke(iw,CURCTI,sample_count >> 8); // high byte // * start auto-init ISA DMA ! // (see playback notes above) start_dma(iw->dma1, (void *)(iw->low_phys + (iw->pcm.rd_1 - iw->low_mem)), // virtual->physical mapping iw->pcm.rd_size, 0x54, // download, auto-init mode 0); // extended mode flags ? // * start the record path iw_enable_record(iw,true); return B_OK; }
/** * Main run time processing loop. */ void loop(void) { static int8_t increment[3] = {5, 0, 0}; static uint8_t red = 0, green = 0, blue = 0; /* 5mm through-hole LEDs are RGB, not GRB. */ set_pixel_color(0, red, green, blue); PORTD.OUTSET = PIN5_bm; start_dma(); PORTD.OUTCLR = PIN5_bm; if (red == 255) { increment[0] = -5; } else if (green == 255) { increment[1] = -5; } else if (blue == 255) { increment[2] = -5; } else if (red == 0 && increment[0] == -5) { increment[0] = 0; increment[1] = 5; } else if (green == 0 && increment[1] == -5) { increment[1] = 0; increment[2] = 5; } else if (blue == 0 && increment[2] == -5) { increment[2] = 0; increment[0] = 5; } red += increment[0]; green += increment[1]; blue += increment[2]; _delay_ms(20); }
void tim1_cc_isr(void) { if(timer_get_flag(TIM1, TIM_SR_CC2IF)) { int ccr = TIM_CCR2(TIM1); int save_n_overflow = n_overflow; n_overflow = 0; int delta = (ccr + (save_n_overflow<<16)) - last_ccr; if(dma_enabled) start_dma(); timer_clear_flag(TIM1, TIM_SR_CC2IF); //gpio_set(DEBUG0_OUT_PORT, DEBUG0_OUT_PIN); //gpio_clear(DEBUG0_OUT_PORT, DEBUG0_OUT_PIN); last_ccr = ccr; int speed_delta = motor_ctrl_step(delta); // Wait for motor speed to get stable. if(!dma_enabled && speed_delta > -10 && speed_delta < 10) { if(motor_ok == 0) { set_status(LED_GREEN, 1); set_status(LED_RED, 0); dma_enabled = 1; } else { motor_ok--; } } if(dma_enabled && (speed_delta < -100 || speed_delta > 100)) { set_status(LED_GREEN, 0); set_status(LED_RED, 1); dma_enabled = 0; motor_ok = 16; } if(delta < 100) { gpio_set(GPIOB, GPIO0); __asm("nop"); __asm("nop"); __asm("nop"); __asm("nop"); gpio_clear(GPIOB, GPIO0); } } }
static void dma_advance(struct goku_udc *dev, struct goku_ep *ep) { struct goku_request *req; struct goku_udc_regs __iomem *regs = ep->dev->regs; u32 master; master = readl(®s->dma_master); if (unlikely(list_empty(&ep->queue))) { stop: if (ep->is_in) dev->int_enable &= ~INT_MSTRDEND; else dev->int_enable &= ~(INT_MSTWREND|INT_MSTWRTMOUT); writel(dev->int_enable, ®s->int_enable); return; } req = list_entry(ep->queue.next, struct goku_request, queue); /* normal hw dma completion (not abort) */ if (likely(ep->is_in)) { if (unlikely(master & MST_RD_ENA)) return; req->req.actual = readl(®s->in_dma_current); } else { if (unlikely(master & MST_WR_ENA)) return; /* hardware merges short packets, and also hides packet * overruns. a partial packet MAY be in the fifo here. */ req->req.actual = readl(®s->out_dma_current); } req->req.actual -= req->req.dma; req->req.actual++; #ifdef USB_TRACE VDBG(dev, "done %s %s dma, %u/%u bytes, req %p\n", ep->ep.name, ep->is_in ? "IN" : "OUT", req->req.actual, req->req.length, req); #endif done(ep, req, 0); if (list_empty(&ep->queue)) goto stop; req = list_entry(ep->queue.next, struct goku_request, queue); (void) start_dma(ep, req); }
static int tx_stream_start(struct stream *stream, struct device *dev) { const struct i2s_stm32_cfg *cfg = DEV_CFG(dev); struct i2s_stm32_data *const dev_data = DEV_DATA(dev); size_t mem_block_size; int ret; ret = queue_get(&stream->mem_block_queue, &stream->mem_block, &mem_block_size); if (ret < 0) { return ret; } k_sem_give(&stream->sem); /* Assure cache coherency before DMA read operation */ DCACHE_CLEAN(stream->mem_block, mem_block_size); if (stream->master) { LL_I2S_SetTransferMode(cfg->i2s, LL_I2S_MODE_MASTER_TX); } else { LL_I2S_SetTransferMode(cfg->i2s, LL_I2S_MODE_SLAVE_TX); } /* remember active TX DMA channel (used in callback) */ active_dma_tx_channel[stream->dma_channel] = dev; ret = start_dma(dev_data->dev_dma, stream->dma_channel, &stream->dma_cfg, stream->mem_block, (void *)LL_SPI_DMA_GetRegAddr(cfg->i2s), stream->cfg.block_size); if (ret < 0) { LOG_ERR("Failed to start TX DMA transfer: %d", ret); return ret; } LL_I2S_EnableDMAReq_TX(cfg->i2s); LL_I2S_EnableIT_ERR(cfg->i2s); LL_I2S_Enable(cfg->i2s); return 0; }
status_t iw_start_playback_dma(interwave_dev *iw) { uint16 sample_count; iwprintf("iw_start_playback_dma"); // * stop the playback path iw_enable_playback(iw,false); // * clear interrupts iw_clear_codec_interrupts(iw); // * set the iw's sample counters to half the playback area // The InterWave's sample count interrupt (what we use to know a half of the // buffer has been played) fires when the given sample count is *exceeded*, // not *reached*. Hence the VITAL -1 below. // Also, at 16bit in stereo, a sample is 4 bytes. sample_count = iw->pcm.wr_size/4 - 1; // FIXME!!! (check sample size) iwprintf("playback sample count:%d",sample_count); iw_poke(iw,CLPCTI,sample_count & 0xff); // low byte (must be written first, per IWPG) iw_poke(iw,CUPCTI,sample_count >> 8); // high byte // * start auto-init ISA DMA ! // The buffer passed below is actually 2*pcm.wr_size bytes, which // converted to words (this is 16-bit DMA) yields... pcm.wr_size. // Be careful. // Also, Trial And Error(TM) determined that start_isa_dma() takes care of // subtracting 1 from this count (which is needed by the DMAC). start_dma(iw->dma2, (void *)(iw->low_phys + (iw->pcm.wr_1 - iw->low_mem)), // virtual->physical mapping (2*iw->pcm.wr_size)/2, 0x58, // upload, auto-init mode 0); // uh ? // * start the playback path iw_enable_playback(iw,true); return B_OK; }
static int rx_stream_start(struct stream *stream, struct device *dev) { const struct i2s_stm32_cfg *cfg = DEV_CFG(dev); struct i2s_stm32_data *const dev_data = DEV_DATA(dev); int ret; ret = k_mem_slab_alloc(stream->cfg.mem_slab, &stream->mem_block, K_NO_WAIT); if (ret < 0) { return ret; } if (stream->master) { LL_I2S_SetTransferMode(cfg->i2s, LL_I2S_MODE_MASTER_RX); } else { LL_I2S_SetTransferMode(cfg->i2s, LL_I2S_MODE_SLAVE_RX); } /* remember active RX DMA channel (used in callback) */ active_dma_rx_channel[stream->dma_channel] = dev; ret = start_dma(dev_data->dev_dma, stream->dma_channel, &stream->dma_cfg, (void *)LL_SPI_DMA_GetRegAddr(cfg->i2s), stream->mem_block, stream->cfg.block_size); if (ret < 0) { LOG_ERR("Failed to start RX DMA transfer: %d", ret); return ret; } LL_I2S_EnableDMAReq_RX(cfg->i2s); LL_I2S_EnableIT_ERR(cfg->i2s); LL_I2S_Enable(cfg->i2s); return 0; }
static int hd_read_udma(block_dev_t *bdev, char *buffer, size_t count, blkno_t blkno) { hd_t *hd; hdc_t *hdc; int sectsleft; int nsects; int result = 0; char *bufp; if (count == 0) { return 0; } bufp = (char *) buffer; hd = (hd_t *) bdev->privdata; hdc = hd->hdc; sectsleft = count / SECTOR_SIZE; while (sectsleft > 0) { /* Select drive */ ide_select_drive(hd); /* Wait for controller ready */ result = ide_wait(hdc, HDCS_DRDY, HDTIMEOUT_DRDY); if (result != 0) { result = -EIO; break; } /* Calculate maximum number of sectors we can transfer */ if (sectsleft > 256) { nsects = 256; } else { nsects = sectsleft; } if (nsects > MAX_DMA_XFER_SIZE / SECTOR_SIZE) { nsects = MAX_DMA_XFER_SIZE / SECTOR_SIZE; } /* Prepare transfer */ result = 0; hdc->dir = HD_XFER_DMA; hdc->active = hd; hd_setup_transfer(hd, blkno, nsects); /* Setup DMA */ setup_dma(hdc, bufp, nsects * SECTOR_SIZE, BM_CR_WRITE); /* Start read */ outb(HDCMD_READDMA, hdc->iobase + HDC_COMMAND); start_dma(hdc); /* Stop DMA channel and check DMA status */ result = stop_dma(hdc); if (result < 0) { break; } /* Check controller status */ if (hdc->status & HDCS_ERR) { result = -EIO; break; } /* Advance to next */ sectsleft -= nsects; bufp += nsects * SECTOR_SIZE; } /* Cleanup */ hdc->dir = HD_XFER_IDLE; hdc->active = NULL; return result == 0 ? count : result; }