static void xgpu_ai_mailbox_send_ack(struct amdgpu_device *adev) { u32 reg; int timeout = AI_MAILBOX_TIMEDOUT; u32 mask = REG_FIELD_MASK(BIF_BX_PF0_MAILBOX_CONTROL, RCV_MSG_VALID); reg = RREG32_NO_KIQ(SOC15_REG_OFFSET(NBIO, 0, mmBIF_BX_PF0_MAILBOX_CONTROL)); reg = REG_SET_FIELD(reg, BIF_BX_PF0_MAILBOX_CONTROL, RCV_MSG_ACK, 1); WREG32_NO_KIQ(SOC15_REG_OFFSET(NBIO, 0, mmBIF_BX_PF0_MAILBOX_CONTROL), reg); /*Wait for RCV_MSG_VALID to be 0*/ reg = RREG32_NO_KIQ(SOC15_REG_OFFSET(NBIO, 0, mmBIF_BX_PF0_MAILBOX_CONTROL)); while (reg & mask) { if (timeout <= 0) { pr_err("RCV_MSG_VALID is not cleared\n"); break; } mdelay(1); timeout -=1; reg = RREG32_NO_KIQ(SOC15_REG_OFFSET(NBIO, 0, mmBIF_BX_PF0_MAILBOX_CONTROL)); } }
static int xgpu_vi_poll_ack(struct amdgpu_device *adev) { int r = 0, timeout = VI_MAILBOX_TIMEDOUT; u32 mask = REG_FIELD_MASK(MAILBOX_CONTROL, TRN_MSG_ACK); u32 reg; reg = RREG32_NO_KIQ(mmMAILBOX_CONTROL); while (!(reg & mask)) { if (timeout <= 0) { pr_err("Doesn't get ack from pf.\n"); r = -ETIME; break; } mdelay(5); timeout -= 5; reg = RREG32_NO_KIQ(mmMAILBOX_CONTROL); } return r; }
static int xgpu_vi_mailbox_rcv_msg(struct amdgpu_device *adev, enum idh_event event) { u32 reg; u32 mask = REG_FIELD_MASK(MAILBOX_CONTROL, RCV_MSG_VALID); /* workaround: host driver doesn't set VALID for CMPL now */ if (event != IDH_FLR_NOTIFICATION_CMPL) { reg = RREG32_NO_KIQ(mmMAILBOX_CONTROL); if (!(reg & mask)) return -ENOENT; } reg = RREG32_NO_KIQ(mmMAILBOX_MSGBUF_RCV_DW0); if (reg != event) return -ENOENT; /* send ack to PF */ xgpu_vi_mailbox_send_ack(adev); return 0; }
static int xgpu_ai_mailbox_rcv_msg(struct amdgpu_device *adev, enum idh_event event) { u32 reg; u32 mask = REG_FIELD_MASK(BIF_BX_PF0_MAILBOX_CONTROL, RCV_MSG_VALID); if (event != IDH_FLR_NOTIFICATION_CMPL) { reg = RREG32_NO_KIQ(SOC15_REG_OFFSET(NBIO, 0, mmBIF_BX_PF0_MAILBOX_CONTROL)); if (!(reg & mask)) return -ENOENT; } reg = RREG32_NO_KIQ(SOC15_REG_OFFSET(NBIO, 0, mmBIF_BX_PF0_MAILBOX_MSGBUF_RCV_DW0)); if (reg != event) return -ENOENT; xgpu_ai_mailbox_send_ack(adev); return 0; }
static int xgpu_ai_poll_ack(struct amdgpu_device *adev) { int r = 0, timeout = AI_MAILBOX_TIMEDOUT; u32 mask = REG_FIELD_MASK(BIF_BX_PF0_MAILBOX_CONTROL, TRN_MSG_ACK); u32 reg; reg = RREG32_NO_KIQ(SOC15_REG_OFFSET(NBIO, 0, mmBIF_BX_PF0_MAILBOX_CONTROL)); while (!(reg & mask)) { if (timeout <= 0) { pr_err("Doesn't get ack from pf.\n"); r = -ETIME; break; } msleep(1); timeout -= 1; reg = RREG32_NO_KIQ(SOC15_REG_OFFSET(NBIO, 0, mmBIF_BX_PF0_MAILBOX_CONTROL)); } return r; }
static void uvd_v6_0_set_uvd_dynamic_clock_mode(struct amdgpu_device *adev, bool swmode) { u32 data, data1 = 0, data2; /* Always un-gate UVD REGS bit */ data = RREG32(mmUVD_CGC_GATE); data &= ~(UVD_CGC_GATE__REGS_MASK); WREG32(mmUVD_CGC_GATE, data); data = RREG32(mmUVD_CGC_CTRL); data &= ~(UVD_CGC_CTRL__CLK_OFF_DELAY_MASK | UVD_CGC_CTRL__CLK_GATE_DLY_TIMER_MASK); data |= UVD_CGC_CTRL__DYN_CLOCK_MODE_MASK | 1 << REG_FIELD_SHIFT(UVD_CGC_CTRL, CLK_GATE_DLY_TIMER) | 4 << REG_FIELD_SHIFT(UVD_CGC_CTRL, CLK_OFF_DELAY); data2 = RREG32(mmUVD_SUVD_CGC_CTRL); if (swmode) { data &= ~(UVD_CGC_CTRL__UDEC_RE_MODE_MASK | UVD_CGC_CTRL__UDEC_CM_MODE_MASK | UVD_CGC_CTRL__UDEC_IT_MODE_MASK | UVD_CGC_CTRL__UDEC_DB_MODE_MASK | UVD_CGC_CTRL__UDEC_MP_MODE_MASK | UVD_CGC_CTRL__SYS_MODE_MASK | UVD_CGC_CTRL__UDEC_MODE_MASK | UVD_CGC_CTRL__MPEG2_MODE_MASK | UVD_CGC_CTRL__REGS_MODE_MASK | UVD_CGC_CTRL__RBC_MODE_MASK | UVD_CGC_CTRL__LMI_MC_MODE_MASK | UVD_CGC_CTRL__LMI_UMC_MODE_MASK | UVD_CGC_CTRL__IDCT_MODE_MASK | UVD_CGC_CTRL__MPRD_MODE_MASK | UVD_CGC_CTRL__MPC_MODE_MASK | UVD_CGC_CTRL__LBSI_MODE_MASK | UVD_CGC_CTRL__LRBBM_MODE_MASK | UVD_CGC_CTRL__WCB_MODE_MASK | UVD_CGC_CTRL__VCPU_MODE_MASK | UVD_CGC_CTRL__JPEG_MODE_MASK | UVD_CGC_CTRL__SCPU_MODE_MASK); data1 |= UVD_CGC_CTRL2__DYN_OCLK_RAMP_EN_MASK | UVD_CGC_CTRL2__DYN_RCLK_RAMP_EN_MASK; data1 &= ~UVD_CGC_CTRL2__GATER_DIV_ID_MASK; data1 |= 7 << REG_FIELD_SHIFT(UVD_CGC_CTRL2, GATER_DIV_ID); data2 &= ~(UVD_SUVD_CGC_CTRL__SRE_MODE_MASK | UVD_SUVD_CGC_CTRL__SIT_MODE_MASK | UVD_SUVD_CGC_CTRL__SMP_MODE_MASK | UVD_SUVD_CGC_CTRL__SCM_MODE_MASK | UVD_SUVD_CGC_CTRL__SDB_MODE_MASK); } else { data |= UVD_CGC_CTRL__UDEC_RE_MODE_MASK | UVD_CGC_CTRL__UDEC_CM_MODE_MASK | UVD_CGC_CTRL__UDEC_IT_MODE_MASK | UVD_CGC_CTRL__UDEC_DB_MODE_MASK | UVD_CGC_CTRL__UDEC_MP_MODE_MASK | UVD_CGC_CTRL__SYS_MODE_MASK | UVD_CGC_CTRL__UDEC_MODE_MASK | UVD_CGC_CTRL__MPEG2_MODE_MASK | UVD_CGC_CTRL__REGS_MODE_MASK | UVD_CGC_CTRL__RBC_MODE_MASK | UVD_CGC_CTRL__LMI_MC_MODE_MASK | UVD_CGC_CTRL__LMI_UMC_MODE_MASK | UVD_CGC_CTRL__IDCT_MODE_MASK | UVD_CGC_CTRL__MPRD_MODE_MASK | UVD_CGC_CTRL__MPC_MODE_MASK | UVD_CGC_CTRL__LBSI_MODE_MASK | UVD_CGC_CTRL__LRBBM_MODE_MASK | UVD_CGC_CTRL__WCB_MODE_MASK | UVD_CGC_CTRL__VCPU_MODE_MASK | UVD_CGC_CTRL__SCPU_MODE_MASK; data2 |= UVD_SUVD_CGC_CTRL__SRE_MODE_MASK | UVD_SUVD_CGC_CTRL__SIT_MODE_MASK | UVD_SUVD_CGC_CTRL__SMP_MODE_MASK | UVD_SUVD_CGC_CTRL__SCM_MODE_MASK | UVD_SUVD_CGC_CTRL__SDB_MODE_MASK; } WREG32(mmUVD_CGC_CTRL, data); WREG32(mmUVD_SUVD_CGC_CTRL, data2); data = RREG32_UVD_CTX(ixUVD_CGC_CTRL2); data &= ~(REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_OCLK_RAMP_EN) | REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_RCLK_RAMP_EN) | REG_FIELD_MASK(UVD_CGC_CTRL2, GATER_DIV_ID)); data1 &= (REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_OCLK_RAMP_EN) | REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_RCLK_RAMP_EN) | REG_FIELD_MASK(UVD_CGC_CTRL2, GATER_DIV_ID)); data |= data1; WREG32_UVD_CTX(ixUVD_CGC_CTRL2, data); }