void scu_timer_all_disable(void) { uint32_t t1md; t1md = MEMORY_READ(32, SCU(T1MD)); t1md &= ~0x00000001; /* Write to memory */ MEMORY_WRITE(32, SCU(T1MD), t1md); }
uint32_t scu_ic_status_get(void) { uint32_t ist; ist = MEMORY_READ(32, SCU(IST)); return ist; }
static void scu_dma_level_2_end(void) { g_counting = false; state.st_status = ST_STATUS_END; MEMORY_WRITE(32, SCU(D0EN), 0x0); }
static void scu_dma_illegal(void) { g_counting = false; state.st_status = ST_STATUS_ILLEGAL; MEMORY_WRITE(32, SCU(D0EN), 0x0); }
static void scu_dma_level(int level __unused) { struct dma_level_cfg cfg; if(state.st_level[level].level_mode == DMA_MODE_DIRECT) { cfg.mode.direct.src = (void *)0x06040000; // High work RAM cfg.mode.direct.dst = (void *)VRAM_ADDR_4MBIT(0, 0x0); // VDP2 if(level == DMA_LEVEL_0) cfg.mode.direct.len = 0x1000-1; else cfg.mode.direct.len = 0x1000-1; } else { // in this case in the dma list desctiption buffer defines 3 elements of 3 longs (<24 bytes) so aligned on 32 bytes cfg.mode.indirect.nelems= 6; /*dma_tbl_type table[3] __attribute__((aligned(32))) = { { .len = 0x1000-1, .dst = (void *)VRAM_ADDR_4MBIT(0, 0x0), .src = (const void *)0x06040000 }, { .len = 0x1000-1, .dst = (void *)VRAM_ADDR_4MBIT(0, 0x1000), .src = (const void *)0x06041000 }, { .len = 0x1000-1, .dst = (void *)VRAM_ADDR_4MBIT(0, 0x2000), .src = (const void *)((1 << 31) | 0x06042000) } };*/ uint32_t tbl[] __attribute__((aligned(32))) = { 0x1000-1, (uint32_t)VRAM_ADDR_4MBIT(0, 0x0), (uint32_t) (0x06040000), 0x1000-1, (uint32_t)VRAM_ADDR_4MBIT(0, 0x1000), (uint32_t) ((1 << 31) | 0x06041000)}; cfg.mode.indirect.tbl = (void *) tbl; } // generic parameters cfg.starting_factor = state.st_level[level].level_sf; cfg.add = 3; // sattech, need to be 001 if update bits set cfg.update = 0; //DMA_MODE_UPDATE_RUP | DMA_MODE_UPDATE_WUP; // update Read and Write addr each time, no save g_dma_counter = 0; if(state.st_level[level].level_sf == DMA_MODE_START_FACTOR_ENABLE) { scu_dma_cpu_level_set(level, state.st_level[level].level_mode, &cfg); scu_dma_cpu_level_start(level); // only needed for this starting factor g_counting = true; } else { scu_dma_cpu_level_set(level, state.st_level[level].level_mode, &cfg); switch (level) { case DMA_LEVEL_0: MEMORY_WRITE(32, SCU(D0EN), 0x00000100); return; case DMA_LEVEL_1: MEMORY_WRITE(32, SCU(D1EN), 0x00000100); return; case DMA_LEVEL_2: MEMORY_WRITE(32, SCU(D2EN), 0x00000100); } g_counting = true; } }
void scu_timer_1_set(uint16_t set) { MEMORY_WRITE(32, SCU(T1S), set); }
/* * Activate DMA from the Master CPU * * Keep in mind that * DMA transfers does not begin until it is explictily started * DMA transfer level 2 will block the CPU during operation of level 1 * * SCU DMA is for transfers between different buses: * Work RAM-H <-> A-bus * Work RAM-H <-> B-bus * A-bus <-> B-bus */ void scu_dma_cpu_level_set(enum dma_level lvl, enum dma_mode mode, struct dma_level_cfg *cfg) { uint32_t dst; uint32_t src; size_t len; uint32_t add; /* * Panic if either the source or destination is within the VDP2 * region. */ if ((scu_dma_cpu_level_sanitize(cfg, mode)) < 0) { return; } switch (mode) { case DMA_MODE_DIRECT: /* The absolute address must not be cached. */ dst = 0x20000000 | (uint32_t)cfg->mode.direct.dst; /* The absolute address must not be cached. */ src = 0x20000000 | (uint32_t)cfg->mode.direct.src; len = cfg->mode.direct.len; break; case DMA_MODE_INDIRECT: src = 0x00000000; /* The absolute address must not be cached. */ dst = 0x20000000 | (uint32_t)cfg->mode.indirect.tbl; len = 0x00000000; break; default: return; } add = 0x00000100 | (common_log2_down(cfg->add) & 0x7); switch (lvl) { case DMA_LEVEL_0: /* Highest priority */ /* Level 0 is able to transfer 1MiB */ assert(len < 0x100000); /* Cannot modify registers while in operation */ MEMORY_WRITE(32, SCU(D0R), src); MEMORY_WRITE(32, SCU(D0W), dst); /* Read of transfer byte count in DMA transfer register prohibited */ MEMORY_WRITE(32, SCU(D0C), len); MEMORY_WRITE(32, SCU(D0AD), add); /* Keep DMA level off (disable and keep off) */ MEMORY_WRITE(32, SCU(D0EN), 0); MEMORY_WRITE(32, SCU(D0MD), (mode << 24) | cfg->starting_factor | cfg->update); return; case DMA_LEVEL_1: /* Level 1 is able transfer 4KiB */ assert(len < 0x1000); /* Cannot modify registers while in operation */ MEMORY_WRITE(32, SCU(D1R), src); MEMORY_WRITE(32, SCU(D1W), dst); /* Read of transfer byte count in DMA transfer register prohibited */ MEMORY_WRITE(32, SCU(D1C), len); MEMORY_WRITE(32, SCU(D1AD), add); /* Keep DMA level off (disable and keep off) */ MEMORY_WRITE(32, SCU(D1EN), 0x00000000); MEMORY_WRITE(32, SCU(D1MD), (mode << 24) | cfg->starting_factor | cfg->update); return; case DMA_LEVEL_2: /* * KLUDGE * * An operation error may occur if DMA level 2 is * activated during DMA level 1 activation. * * To prevent such operation errors, do not activate DMA * level 2 during DMA level 1 operation. */ /* Level 1 is able transfer 4KiB */ assert(len < 0x1000); /* Spin until level 2 and level 1 are no longer * activated. * * Level 2 cannot modify registers while in operation */ MEMORY_WRITE(32, SCU(D2R), src); MEMORY_WRITE(32, SCU(D2W), dst); /* Read of transfer byte count in DMA transfer register prohibited */ MEMORY_WRITE(32, SCU(D2C), len); MEMORY_WRITE(32, SCU(D2AD), add); /* Keep DMA level off (disable and keep off) */ MEMORY_WRITE(32, SCU(D2EN), 0x00000000); MEMORY_WRITE(32, SCU(D2MD), (mode << 24) | cfg->starting_factor | cfg->update); default: return; } }