/* I/O write */ static void nand_dev_write(void *opaque, target_phys_addr_t offset, uint32_t value) { nand_dev_state *s = (nand_dev_state *)opaque; switch (offset) { case NAND_DEV: s->dev = value; if(s->dev >= nand_dev_count) { cpu_abort(cpu_single_env, "nand_dev_write: Bad dev %x\n", value); } break; case NAND_ADDR_HIGH: s->addr_high = value; break; case NAND_ADDR_LOW: s->addr_low = value; break; case NAND_TRANSFER_SIZE: s->transfer_size = value; break; case NAND_DATA: s->data = value; break; case NAND_COMMAND: s->result = nand_dev_do_cmd(s, value); break; default: cpu_abort(cpu_single_env, "nand_dev_write: Bad offset %x\n", offset); break; } }
/* I/O write */ static void nand_dev_write(void *opaque, target_phys_addr_t offset, uint32_t value) { nand_dev_controller_state *s = (nand_dev_controller_state *)opaque; switch (offset) { case NAND_DEV: s->dev = value; if(s->dev >= nand_dev_count) { cpu_abort(cpu_single_env, "nand_dev_write: Bad dev %x\n", value); } break; case NAND_ADDR_HIGH: s->addr_high = value; break; case NAND_ADDR_LOW: s->addr_low = value; break; case NAND_BATCH_ADDR_LOW: s->batch_addr_low = value; break; case NAND_BATCH_ADDR_HIGH: s->batch_addr_high = value; break; case NAND_TRANSFER_SIZE: s->transfer_size = value; break; case NAND_DATA: s->data = value; break; case NAND_COMMAND: s->result = nand_dev_do_cmd(s, value); if (value == NAND_CMD_WRITE_BATCH || value == NAND_CMD_READ_BATCH || value == NAND_CMD_ERASE_BATCH) { struct batch_data bd; uint64_t bd_addr = ((uint64_t)s->batch_addr_high << 32) | s->batch_addr_low; bd.result = s->result; cpu_physical_memory_write(bd_addr, (void*)&bd, sizeof(struct batch_data)); } break; default: cpu_abort(cpu_single_env, "nand_dev_write: Bad offset %x\n", offset); break; } }