void smpc_init(void) { irq_mux_t *vbo; uint32_t mask; /* Set both ports to "SMPC" control mode. */ MEMORY_WRITE(8, SMPC(EXLE1), 0x00); MEMORY_WRITE(8, SMPC(IOSEL1), 0x00); MEMORY_WRITE(8, SMPC(DDR1), 0x00); MEMORY_WRITE(8, SMPC(PDR1), 0x00); /* Disable interrupts */ cpu_intc_disable(); mask = IC_MASK_SYSTEM_MANAGER | IC_MASK_TIMER_0; scu_ic_mask_chg(IC_MASK_ALL, mask); scu_ic_interrupt_set(IC_INTERRUPT_TIMER_0, &smpc_peripheral_data); scu_ic_interrupt_set(IC_INTERRUPT_SYSTEM_MANAGER, &smpc_peripheral_system_manager); scu_ic_mask_chg(IC_MASK_ALL & ~mask, IC_MASK_NONE); scu_timer_0_set(5); scu_timer_1_set(0); scu_timer_1_mode_set(/* sp_line = */ true); /* Add to VDP2 VBLANK-OUT mux */ vbo = vdp2_tvmd_vblank_out_irq_get(); irq_mux_handle_add(vbo, smpc_peripheral_parse, NULL); /* Enable interrupts */ cpu_intc_enable(); }
void cpu_dmac_channel_stop(void) { uint32_t dmaor; uint32_t chcr0; uint32_t chcr1; /* When the AE bit is set to 1, DMA transfer cannot be enabled * even if the DE bit in the DMA channel control register is set * * When the NMIF bit is set to 1, DMA transfer cannot be enabled * even if the DE bit in the DMA channel control register is * set */ dmaor = MEMORY_READ(32, CPU(DMAOR)); dmaor &= ~0x0000000E; MEMORY_WRITE(32, CPU(DMAOR), dmaor); /* Immediately disable channels */ chcr0 = MEMORY_READ(32, CPU(CHCR0)); chcr0 &= ~0x00000001; MEMORY_WRITE(32, CPU(CHCR0), chcr0); chcr1 = MEMORY_READ(32, CPU(CHCR1)); chcr1 &= ~0x00000001; MEMORY_WRITE(32, CPU(CHCR1), chcr1); }
void cpu_frt_oca_set(uint16_t count, void (*ihr)(void)) { /* Disable interrupt */ MEMORY_WRITE_AND(8, CPU(TIER), ~0x08); MEMORY_WRITE_AND(8, CPU(FTCSR), ~0x08); /* Select OCRA register and select output compare A match */ MEMORY_WRITE_AND(8, CPU(TOCR), ~0x12); MEMORY_WRITE(8, CPU(OCRAH), 0); MEMORY_WRITE(8, CPU(OCRAL), 0); _frt_oc_ihr_table[FRT_IHR_INDEX_OCAI] = _default_ihr; if ((count > 0) && (ihr != NULL)) { MEMORY_WRITE_AND(8, CPU(TOCR), ~0x10); MEMORY_WRITE(8, CPU(OCRAH), (uint8_t)(count >> 8)); MEMORY_WRITE(8, CPU(OCRAL), (uint8_t)(count & 0xFF)); /* Compare on match A */ MEMORY_WRITE_OR(8, CPU(TOCR), 0x02); MEMORY_WRITE_OR(8, CPU(TIER), 0x08); _frt_oc_ihr_table[FRT_IHR_INDEX_OCAI] = ihr; }
static void handler_system_manager(void) { static uint32_t offset = 0; /* We don't have much time in the critical section. Just * buffer the registers */ uint32_t oreg; for (oreg = 0; oreg < SMPC_OREGS; oreg++, offset++) { OREG_SET(offset, MEMORY_READ(8, OREG(oreg))); } uint8_t sr; sr = MEMORY_READ(8, SMPC(SR)); if ((sr & 0x80) == 0x80) { if ((sr & NPE) == 0x00) { /* Mark that SMPC status and peripheral data * collection is complete */ _collection_complete = true; offset = 0; /* Issue a "BREAK" for the "INTBACK" command */ MEMORY_WRITE(8, IREG(0), BR); return; } } /* Issue a "CONTINUE" for the "INTBACK" command */ MEMORY_WRITE(8, IREG(0), CONT); }
/* * Public Functions * */ void init(void) { /* We want to be in VBLANK-IN (retrace) */ vdp2_tvmd_display_clear(); // Set Color mode to mode 0 (1KWord Color RAM), 2 banks MEMORY_WRITE(16, VDP2(RAMCTL), 0x1300); // Enable color cal on NBG0 and NB2 only MEMORY_WRITE(16, VDP2(CCCTL), 0); MEMORY_WRITE(16, VDP2(CCCTL), 0x5 /* (1 << 2) | (1 << 0)*/); MEMORY_WRITE(16, VDP2(CCRNA), 0x0); MEMORY_WRITE(16, VDP2(CCRNB), 0x1F); /* DMA Indirect list, aligned on 64 bytes due to more than 24bytes size (6*4*3=72) */ uint32_t dma_tbl[] __attribute__((aligned(64))) = { (uint32_t)sizeof(PLANE0_CD), (uint32_t)_nbg0_cell_data, (uint32_t)PLANE0_CD, (uint32_t)sizeof(PLANE0_CP), (uint32_t)_nbg0_color_palette, (uint32_t)PLANE0_CP, (uint32_t)sizeof(PLANE2_CD), (uint32_t)_nbg2_cell_data, (uint32_t)PLANE2_CD, (uint32_t)sizeof(PLANE2_CP), (uint32_t)_nbg2_color_palette, (uint32_t)PLANE2_CP }; scu_dma_listcpy(dma_tbl, 4*3); while(scu_dma_get_status(SCU_DMA_ALL_CH) == SCU_DMA_STATUS_WAIT); /* set all other stuff */ init_scrollscreen_nbg0(); init_scrollscreen_nbg2(); set_VRAM_access(); g_cc_NBG0 = 0x0; g_cc_NBG2 = 0x1F; }
void vdp1_init(void) { /* Check if boundaries are correct */ STATIC_ASSERT((VDP1_CMDT_MEMORY_SIZE + VDP1_GST_MEMORY_SIZE + VDP1_TEXURE_MEMORY_SIZE + VDP1_CLUT_MEMORY_SIZE) == VDP1_VRAM_SIZE); /* Initialize the processor to sane values */ MEMORY_WRITE(16, VDP1(TVMR), 0x0000); MEMORY_WRITE(16, VDP1(ENDR), 0x0000); MEMORY_WRITE(16, VDP1(FBCR), 0x0000); MEMORY_WRITE(16, VDP1(PTMR), 0x0000); MEMORY_WRITE(16, VDP1(EWDR), 0x0000); MEMORY_WRITE(16, VDP1(EWLR), 0x0000); MEMORY_WRITE(16, VDP1(EWRR), (uint16_t)(((320 / 8) << 9) | (223))); vdp2_tvmd_vblank_in_wait(); /* Stop processing command tables */ uint32_t cmdt_idx; for (cmdt_idx = 0; cmdt_idx < VDP1_CMDT_COUNT_MAX; cmdt_idx++) { struct vdp1_cmdt *cmdt; cmdt = (struct vdp1_cmdt *)CMD_TABLE(cmdt_idx, 0); cmdt->cmd_ctrl = 0x8000; } vdp1_cmdt_list_init(); MEMORY_WRITE(16, VDP1(PTMR), 0x0002); }
void computer_writemem_byte(int cpu, int addr, int value) { int oldcpu = cpu_getactivecpu(); memory_set_context(cpu); MEMORY_WRITE(cpu, addr, value); if (oldcpu != cpu) memory_set_context(oldcpu); }
void vdp2_scrn_mosaic_clear(void) { vdp2_state.buffered_regs.mzctl &= 0xFFE0; /* Write to memory */ MEMORY_WRITE(16, VDP2(MZCTL), vdp2_state.buffered_regs.mzctl); }
void vdp2_scrn_rp_mode_set(enum scrn_rp_mode_type mode) { vdp2_regs.rpmd &= 0xFFFE; vdp2_regs.rpmd |= mode; /* Write to memory. */ MEMORY_WRITE(16, VDP2(RPMD), vdp2_regs.rpmd); }
void smpc_peripheral_init(void) { memb_init(&peripherals); smpc_peripheral_port_1.peripheral = peripheral_alloc(); TAILQ_INIT(&smpc_peripheral_port_1.peripherals); smpc_peripheral_port_2.peripheral = peripheral_alloc(); TAILQ_INIT(&smpc_peripheral_port_2.peripherals); /* Disablo interrupts */ cpu_intc_disable(); /* Set both ports to "SMPC" control mode */ MEMORY_WRITE(8, SMPC(EXLE1), 0x00); MEMORY_WRITE(8, SMPC(IOSEL1), 0x00); MEMORY_WRITE(8, SMPC(DDR1), 0x00); MEMORY_WRITE(8, SMPC(PDR1), 0x00); /* Send INTBACK at start of VBLANK-IN */ irq_mux_t *vblank_in; vblank_in = vdp2_tvmd_vblank_in_irq_get(); irq_mux_handle_add(vblank_in, irq_mux_vblank_in, NULL); /* Parse at start of VBLANK-OUT */ irq_mux_t *vblank_out; vblank_out = vdp2_tvmd_vblank_out_irq_get(); irq_mux_handle_add(vblank_out, irq_mux_vblank_out, NULL); uint32_t mask; mask = IC_MASK_SYSTEM_MANAGER; scu_ic_mask_chg(IC_MASK_ALL, mask); scu_ic_interrupt_set(IC_INTERRUPT_SYSTEM_MANAGER, &handler_system_manager); scu_ic_mask_chg(IC_MASK_ALL & ~mask, IC_MASK_NONE); /* Enable interrupts */ cpu_intc_enable(); }
void vdp1_fbcr_bpp_set(uint8_t bpp) { uint16_t tvmr; tvmr = MEMORY_READ(16, VDP1(MODR)); /* Write to memory. */ MEMORY_WRITE(16, VDP1(TVMR), (tvmr & 0x7) | ((bpp == 8) ? 1 : 0)); }
void vdp1_fbcr_interlace_set(enum fbcr_interlace_type mode) { uint16_t modr; /* Only NTSC and PAL format is able to have interlace mode. */ modr = MEMORY_READ(16, VDP1(MODR)); if ((modr & 0x0006) == 0x0000) MEMORY_WRITE(16, VDP1(FBCR), mode); }
void vdp1_fbcr_rotate_set(void) { uint16_t tvmr; tvmr = MEMORY_READ(16, VDP1(MODR)); /* Write to memory. */ MEMORY_WRITE(16, VDP1(TVMR), (tvmr & 0x0007) | 0x0002); }
void vdp2_tvmd_display_set(void) { uint16_t tvmd; tvmd = MEMORY_READ(16, VDP2(TVMD)); tvmd |= 0x8000; MEMORY_WRITE(16, VDP2(TVMD), tvmd); }
void vdp2_tvmd_display_clear(void) { _state_vdp2()->regs.tvmd &= 0x7FFF; /* Change the DISP bit during VBLANK */ vdp2_tvmd_vblank_in_wait(); MEMORY_WRITE(16, VDP2(TVMD), _state_vdp2()->regs.tvmd); }
void scu_timer_all_disable(void) { uint32_t t1md; t1md = MEMORY_READ(32, SCU(T1MD)); t1md &= ~0x00000001; /* Write to memory */ MEMORY_WRITE(32, SCU(T1MD), t1md); }
void vdp2_tvmd_display_clear(void) { uint16_t tvmd; tvmd = MEMORY_READ(16, VDP2(TVMD)); tvmd &= 0x7FFF; /* Change the DISP bit during VBLANK */ vdp2_tvmd_vblank_in_wait(); MEMORY_WRITE(16, VDP2(TVMD), tvmd); }
void cpu_dmac_channel_start(uint8_t ch) { uint32_t chcr0; uint32_t chcr1; uint32_t dmaor; cpu_dmac_channel_stop(); /* Read after stopping all DMA channels */ dmaor = MEMORY_READ(32, CPU(DMAOR)); switch (ch) { case CPU_DMAC_CHANNEL(0): chcr0 = MEMORY_READ(32, CPU(CHCR0)); chcr0 |= 0x00000201; /* DMA transfers enabled on all channels */ dmaor |= 0x00000001; /* Write to memory */ MEMORY_WRITE(32, CPU(CHCR0), chcr0); break; case CPU_DMAC_CHANNEL(1): chcr1 = MEMORY_READ(32, CPU(CHCR1)); chcr1 |= 0x00000201; /* Write to memory */ MEMORY_WRITE(32, CPU(CHCR1), chcr1); break; default: assert((ch == CPU_DMAC_CHANNEL(0)) || (ch == CPU_DMAC_CHANNEL(1))); /* NOTREACHED */ } /* Write to memory */ MEMORY_WRITE(32, CPU(DMAOR), dmaor); }
void vdp2_scrn_display_clear(uint8_t scrn, bool no_trans) { uint16_t trans_scrn; /* Enable and disable scroll screens. */ vdp2_regs.bgon &= ~(1 << scrn); if (no_trans) { trans_scrn = scrn + 8; vdp2_regs.bgon &= ~(1 << trans_scrn); } /* Write to register. */ MEMORY_WRITE(16, VDP2(BGON), vdp2_regs.bgon); }
void cpu_frt_init(uint8_t clock_div) { MEMORY_WRITE_AND(8, CPU(TIER), ~0x8E); MEMORY_WRITE_AND(8, CPU(FTCSR), ~0x8F); MEMORY_WRITE(16, CPU(VCRC), (INTC_INTERRUPT_FRT_ICI << 8) | INTC_INTERRUPT_FRT_OCI); MEMORY_WRITE(16, CPU(VCRD), INTC_INTERRUPT_FRT_OVI << 8); cpu_frt_interrupt_priority_set(15); /* Set internal clock (divisor) */ MEMORY_WRITE_AND(8, CPU(TCR), ~0x83); MEMORY_WRITE_OR(8, CPU(TCR), clock_div & 0x03); cpu_frt_oca_clear(); cpu_frt_ocb_clear(); cpu_frt_ovi_clear(); cpu_intc_ihr_set(INTC_INTERRUPT_FRT_OCI, _frt_oci_handler); cpu_intc_ihr_set(INTC_INTERRUPT_FRT_OVI, _frt_ovi_handler); cpu_frt_count_set(0); }
void vdp2_scrn_scroll_y_set(uint8_t scrn, fix16_t scroll) { #ifdef DEBUG /* Check if the background passed is valid */ assert((scrn == SCRN_NBG0) || (scrn == SCRN_RBG1) || (scrn == SCRN_NBG1) || (scrn == SCRN_NBG2) || (scrn == SCRN_NBG3) || (scrn == SCRN_RBG1)); #endif /* DEBUG */ /* All screen scroll values must be identified as positive * values */ uint16_t in; uint16_t dn; switch (scrn) { case SCRN_RBG1: case SCRN_NBG0: _set_fixed_point_scroll(&vdp2_state.nbg0.scroll.y, scroll, &in, &dn); /* Write to memory */ MEMORY_WRITE(16, VDP2(SCYIN0), in); MEMORY_WRITE(16, VDP2(SCYDN0), dn); break; case SCRN_NBG1: _set_fixed_point_scroll(&vdp2_state.nbg1.scroll.y, scroll, &in, &dn); /* Write to memory */ MEMORY_WRITE(16, VDP2(SCYIN1), in); MEMORY_WRITE(16, VDP2(SCYDN1), dn); break; case SCRN_NBG2: _set_integer_scroll(&vdp2_state.nbg3.scroll.y, scroll, &in); /* Write to memory */ MEMORY_WRITE(16, VDP2(SCYN2), in); break; case SCRN_NBG3: _set_integer_scroll(&vdp2_state.nbg3.scroll.y, scroll, &in); /* Write to memory */ MEMORY_WRITE(16, VDP2(SCYN3), in); break; default: return; } }
void vdp1_fbcr_erase_coordinates_set(uint16_t x1, uint16_t y1, uint16_t x3, uint16_t y3, uint16_t color) { uint16_t bpp; uint16_t modr; /* Obtain the bit depth of the frame buffer. */ modr = MEMORY_READ(16, VDP1(MODR)); bpp = ((modr & 0x0001) == 0x0001) ? 4 : 3; /* Upper-left coordinates. */ MEMORY_WRITE(16, VDP1(EWLR), ((x1 >> bpp) << 9) | (y1 - 1)); /* Lower-right coordinates. */ MEMORY_WRITE(16, VDP1(EWLR), ((x3 >> bpp) << 9) | (y3 - 1)); MEMORY_WRITE(16, VDP1(EWDR), color); }
void read_digital_pad(void) { if (g_digital.connected == 1) { joyUp = g_digital.pressed.button.up; joyDown = g_digital.pressed.button.down; joyRight = g_digital.pressed.button.right; joyLeft = g_digital.pressed.button.left; joyA = g_digital.released.button.a; joyB = g_digital.released.button.b; joyL = g_digital.released.button.l; joyR = g_digital.released.button.r; joyX = g_digital.released.button.x; joyY = g_digital.released.button.y; if (joyDown) { if(g_cc_NBG2 < 0x1F) g_cc_NBG2++; } else if (joyUp) { if(g_cc_NBG2 > 0x0) g_cc_NBG2--; } else if (joyRight) { if(g_cc_NBG0 > 0x0) g_cc_NBG0--; } else if (joyLeft) { if(g_cc_NBG0 < 0x1F) g_cc_NBG0++; } else if (joyA) { MEMORY_WRITE(16, VDP2(CCCTL), 0x0); MEMORY_WRITE(16, VDP2(CCCTL), (1 << 15) | (1 << 14) | (1 << 12) | 0x5); } else if (joyB) { MEMORY_WRITE(16, VDP2(CCCTL), 0x0); MEMORY_WRITE(16, VDP2(CCCTL), 0x5); } else if (joyL) { MEMORY_WRITE(16, VDP2(CCCTL), 0x0); MEMORY_WRITE(16, VDP2(CCCTL), (1 << 10) | 0x5); g_ecc_NBG0 = (g_ecc_NBG0 + 1) & 0x3; MEMORY_WRITE(16, VDP2(SFCCMD), g_ecc_NBG0); } else if (joyR) { MEMORY_WRITE(16, VDP2(CCCTL), 0x0); MEMORY_WRITE(16, VDP2(CCCTL), 0x5); } else if (joyX) { uint16_t reg = MEMORY_READ(16, VDP2(CCCTL)); MEMORY_WRITE(16, VDP2(CCCTL), reg | (1 << 9)); } else if (joyY) { uint16_t reg = MEMORY_READ(16, VDP2(CCCTL)); MEMORY_WRITE(16, VDP2(CCCTL), reg & (0xFDFF)); } // exit if(g_digital.pressed.button.start) abort(); MEMORY_WRITE(16, VDP2(CCRNA), g_cc_NBG0 & 0x1F); MEMORY_WRITE(16, VDP2(CCRNB), g_cc_NBG2 & 0x1F); } }
/* * Activate DMA from the Master CPU * * Keep in mind that * DMA transfers does not begin until it is explictily started * DMA transfer level 2 will block the CPU during operation of level 1 * * SCU DMA is for transfers between different buses: * Work RAM-H <-> A-bus * Work RAM-H <-> B-bus * A-bus <-> B-bus */ void scu_dma_cpu_level_set(enum dma_level lvl, enum dma_mode mode, struct dma_level_cfg *cfg) { uint32_t dst; uint32_t src; size_t len; uint32_t add; /* * Panic if either the source or destination is within the VDP2 * region. */ if ((scu_dma_cpu_level_sanitize(cfg, mode)) < 0) { return; } switch (mode) { case DMA_MODE_DIRECT: /* The absolute address must not be cached. */ dst = 0x20000000 | (uint32_t)cfg->mode.direct.dst; /* The absolute address must not be cached. */ src = 0x20000000 | (uint32_t)cfg->mode.direct.src; len = cfg->mode.direct.len; break; case DMA_MODE_INDIRECT: src = 0x00000000; /* The absolute address must not be cached. */ dst = 0x20000000 | (uint32_t)cfg->mode.indirect.tbl; len = 0x00000000; break; default: return; } add = 0x00000100 | (common_log2_down(cfg->add) & 0x7); switch (lvl) { case DMA_LEVEL_0: /* Highest priority */ /* Level 0 is able to transfer 1MiB */ assert(len < 0x100000); /* Cannot modify registers while in operation */ MEMORY_WRITE(32, SCU(D0R), src); MEMORY_WRITE(32, SCU(D0W), dst); /* Read of transfer byte count in DMA transfer register prohibited */ MEMORY_WRITE(32, SCU(D0C), len); MEMORY_WRITE(32, SCU(D0AD), add); /* Keep DMA level off (disable and keep off) */ MEMORY_WRITE(32, SCU(D0EN), 0); MEMORY_WRITE(32, SCU(D0MD), (mode << 24) | cfg->starting_factor | cfg->update); return; case DMA_LEVEL_1: /* Level 1 is able transfer 4KiB */ assert(len < 0x1000); /* Cannot modify registers while in operation */ MEMORY_WRITE(32, SCU(D1R), src); MEMORY_WRITE(32, SCU(D1W), dst); /* Read of transfer byte count in DMA transfer register prohibited */ MEMORY_WRITE(32, SCU(D1C), len); MEMORY_WRITE(32, SCU(D1AD), add); /* Keep DMA level off (disable and keep off) */ MEMORY_WRITE(32, SCU(D1EN), 0x00000000); MEMORY_WRITE(32, SCU(D1MD), (mode << 24) | cfg->starting_factor | cfg->update); return; case DMA_LEVEL_2: /* * KLUDGE * * An operation error may occur if DMA level 2 is * activated during DMA level 1 activation. * * To prevent such operation errors, do not activate DMA * level 2 during DMA level 1 operation. */ /* Level 1 is able transfer 4KiB */ assert(len < 0x1000); /* Spin until level 2 and level 1 are no longer * activated. * * Level 2 cannot modify registers while in operation */ MEMORY_WRITE(32, SCU(D2R), src); MEMORY_WRITE(32, SCU(D2W), dst); /* Read of transfer byte count in DMA transfer register prohibited */ MEMORY_WRITE(32, SCU(D2C), len); MEMORY_WRITE(32, SCU(D2AD), add); /* Keep DMA level off (disable and keep off) */ MEMORY_WRITE(32, SCU(D2EN), 0x00000000); MEMORY_WRITE(32, SCU(D2MD), (mode << 24) | cfg->starting_factor | cfg->update); default: return; } }
static void scu_dma_level_2_end(void) { g_counting = false; state.st_status = ST_STATUS_END; MEMORY_WRITE(32, SCU(D0EN), 0x0); }
static void scu_dma_illegal(void) { g_counting = false; state.st_status = ST_STATUS_ILLEGAL; MEMORY_WRITE(32, SCU(D0EN), 0x0); }
static void scu_dma_level(int level __unused) { struct dma_level_cfg cfg; if(state.st_level[level].level_mode == DMA_MODE_DIRECT) { cfg.mode.direct.src = (void *)0x06040000; // High work RAM cfg.mode.direct.dst = (void *)VRAM_ADDR_4MBIT(0, 0x0); // VDP2 if(level == DMA_LEVEL_0) cfg.mode.direct.len = 0x1000-1; else cfg.mode.direct.len = 0x1000-1; } else { // in this case in the dma list desctiption buffer defines 3 elements of 3 longs (<24 bytes) so aligned on 32 bytes cfg.mode.indirect.nelems= 6; /*dma_tbl_type table[3] __attribute__((aligned(32))) = { { .len = 0x1000-1, .dst = (void *)VRAM_ADDR_4MBIT(0, 0x0), .src = (const void *)0x06040000 }, { .len = 0x1000-1, .dst = (void *)VRAM_ADDR_4MBIT(0, 0x1000), .src = (const void *)0x06041000 }, { .len = 0x1000-1, .dst = (void *)VRAM_ADDR_4MBIT(0, 0x2000), .src = (const void *)((1 << 31) | 0x06042000) } };*/ uint32_t tbl[] __attribute__((aligned(32))) = { 0x1000-1, (uint32_t)VRAM_ADDR_4MBIT(0, 0x0), (uint32_t) (0x06040000), 0x1000-1, (uint32_t)VRAM_ADDR_4MBIT(0, 0x1000), (uint32_t) ((1 << 31) | 0x06041000)}; cfg.mode.indirect.tbl = (void *) tbl; } // generic parameters cfg.starting_factor = state.st_level[level].level_sf; cfg.add = 3; // sattech, need to be 001 if update bits set cfg.update = 0; //DMA_MODE_UPDATE_RUP | DMA_MODE_UPDATE_WUP; // update Read and Write addr each time, no save g_dma_counter = 0; if(state.st_level[level].level_sf == DMA_MODE_START_FACTOR_ENABLE) { scu_dma_cpu_level_set(level, state.st_level[level].level_mode, &cfg); scu_dma_cpu_level_start(level); // only needed for this starting factor g_counting = true; } else { scu_dma_cpu_level_set(level, state.st_level[level].level_mode, &cfg); switch (level) { case DMA_LEVEL_0: MEMORY_WRITE(32, SCU(D0EN), 0x00000100); return; case DMA_LEVEL_1: MEMORY_WRITE(32, SCU(D1EN), 0x00000100); return; case DMA_LEVEL_2: MEMORY_WRITE(32, SCU(D2EN), 0x00000100); } g_counting = true; } }
void scu_timer_1_set(uint16_t set) { MEMORY_WRITE(32, SCU(T1S), set); }