static u32 *setup_restore_regs_v0(u32 *ptr, const struct hwctx_reginfo *regs, unsigned int nr_regs) { const struct hwctx_reginfo *rend = regs + nr_regs; for ( ; regs != rend; ++regs) { u32 offset = regs->offset; u32 count = regs->count; u32 indoff = offset + 1; switch (regs->type) { case HWCTX_REGINFO_DIRECT: nvhost_3dctx_restore_direct(ptr, offset, count); ptr += RESTORE_DIRECT_SIZE; break; case HWCTX_REGINFO_INDIRECT_4X: ++indoff; /* fall through */ case HWCTX_REGINFO_INDIRECT: nvhost_3dctx_restore_indirect(ptr, offset, 0, indoff, count); ptr += RESTORE_INDIRECT_SIZE; break; } ptr += count; } return ptr; }
static void __init save_indirect_v1(u32 *ptr, u32 offset_reg, u32 offset, u32 data_reg, u32 count) { ptr[0] = nvhost_opcode_setclass(NV_GRAPHICS_3D_CLASS_ID, 0, 0); ptr[1] = nvhost_opcode_nonincr(AR3D_DW_MEMORY_OUTPUT_DATA, RESTORE_INDIRECT_SIZE); nvhost_3dctx_restore_indirect(ptr + 2, offset_reg, offset, data_reg, count); ptr += RESTORE_INDIRECT_SIZE; ptr[2] = nvhost_opcode_imm(offset_reg, offset); ptr[3] = nvhost_opcode_setclass(NV_HOST1X_CLASS_ID, NV_CLASS_HOST_INDOFF, 1); ptr[4] = nvhost_class_host_indoff_reg_read(NV_HOST_MODULE_GR3D, data_reg, false); ptr[5] = nvhost_opcode_nonincr(NV_CLASS_HOST_INDDATA, count); }
static void save_indirect_v1(u32 *ptr, u32 offset_reg, u32 offset, u32 data_reg, u32 count) { ptr[0] = nvhost_opcode_setclass(NV_GRAPHICS_3D_CLASS_ID, 0, 0); ptr[1] = nvhost_opcode_nonincr(AR3D_DW_MEMORY_OUTPUT_DATA, RESTORE_INDIRECT_SIZE); nvhost_3dctx_restore_indirect(ptr + 2, offset_reg, offset, data_reg, count); ptr += RESTORE_INDIRECT_SIZE; ptr[2] = nvhost_opcode_imm(offset_reg, offset); ptr[3] = nvhost_opcode_setclass(NV_HOST1X_CLASS_ID, host1x_uclass_indoff_r(), 1); ptr[4] = nvhost_class_host_indoff_reg_read( host1x_uclass_indoff_indmodid_gr3d_v(), data_reg, false); ptr[5] = nvhost_opcode_nonincr(host1x_uclass_inddata_r(), count); }