static int pcie_mdioop(struct pcicore_info *pi, uint physmedia, uint regaddr, bool write, uint *val) { uint mdiodata; uint i = 0; uint pcie_serdes_spinwait = 10; /* enable mdio access to SERDES */ bcma_write32(pi->core, PCIEREGOFFS(mdiocontrol), MDIOCTL_PREAM_EN | MDIOCTL_DIVISOR_VAL); if (ai_get_buscorerev(pi->sih) >= 10) { /* new serdes is slower in rw, * using two layers of reg address mapping */ if (!pcie_mdiosetblock(pi, physmedia)) return 1; mdiodata = ((MDIODATA_DEV_ADDR << MDIODATA_DEVADDR_SHF) | (regaddr << MDIODATA_REGADDR_SHF)); pcie_serdes_spinwait *= 20; } else { mdiodata = ((physmedia << MDIODATA_DEVADDR_SHF_OLD) | (regaddr << MDIODATA_REGADDR_SHF_OLD)); } if (!write) mdiodata |= (MDIODATA_START | MDIODATA_READ | MDIODATA_TA); else mdiodata |= (MDIODATA_START | MDIODATA_WRITE | MDIODATA_TA | *val); bcma_write32(pi->core, PCIEREGOFFS(mdiodata), mdiodata); pr28829_delay(); /* retry till the transaction is complete */ while (i < pcie_serdes_spinwait) { if (bcma_read32(pi->core, PCIEREGOFFS(mdiocontrol)) & MDIOCTL_ACCESS_DONE) { if (!write) { pr28829_delay(); *val = (bcma_read32(pi->core, PCIEREGOFFS(mdiodata)) & MDIODATA_MASK); } /* Disable mdio access to SERDES */ bcma_write32(pi->core, PCIEREGOFFS(mdiocontrol), 0); return 0; } udelay(1000); i++; } /* Timed out. Disable mdio access to SERDES. */ bcma_write32(pi->core, PCIEREGOFFS(mdiocontrol), 0); return 1; }
static int pcie_mdioop(struct pcicore_info *pi, uint physmedia, uint regaddr, bool write, uint *val) { uint mdiodata; uint i = 0; uint pcie_serdes_spinwait = 10; bcma_write32(pi->core, PCIEREGOFFS(mdiocontrol), MDIOCTL_PREAM_EN | MDIOCTL_DIVISOR_VAL); if (ai_get_buscorerev(pi->sih) >= 10) { if (!pcie_mdiosetblock(pi, physmedia)) return 1; mdiodata = ((MDIODATA_DEV_ADDR << MDIODATA_DEVADDR_SHF) | (regaddr << MDIODATA_REGADDR_SHF)); pcie_serdes_spinwait *= 20; } else { mdiodata = ((physmedia << MDIODATA_DEVADDR_SHF_OLD) | (regaddr << MDIODATA_REGADDR_SHF_OLD)); } if (!write) mdiodata |= (MDIODATA_START | MDIODATA_READ | MDIODATA_TA); else mdiodata |= (MDIODATA_START | MDIODATA_WRITE | MDIODATA_TA | *val); bcma_write32(pi->core, PCIEREGOFFS(mdiodata), mdiodata); pr28829_delay(); while (i < pcie_serdes_spinwait) { if (bcma_read32(pi->core, PCIEREGOFFS(mdiocontrol)) & MDIOCTL_ACCESS_DONE) { if (!write) { pr28829_delay(); *val = (bcma_read32(pi->core, PCIEREGOFFS(mdiodata)) & MDIODATA_MASK); } bcma_write32(pi->core, PCIEREGOFFS(mdiocontrol), 0); return 0; } udelay(1000); i++; } bcma_write32(pi->core, PCIEREGOFFS(mdiocontrol), 0); return 1; }
static void pcie_extendL1timer(struct pcicore_info *pi, bool extend) { u32 w; struct si_pub *sih = pi->sih; if (ai_get_buscoretype(sih) != PCIE_CORE_ID || ai_get_buscorerev(sih) < 7) return; w = pcie_readreg(pi->core, PCIE_PCIEREGS, PCIE_DLLP_PMTHRESHREG); if (extend) w |= PCIE_ASPMTIMER_EXTEND; else w &= ~PCIE_ASPMTIMER_EXTEND; pcie_writereg(pi->core, PCIE_PCIEREGS, PCIE_DLLP_PMTHRESHREG, w); w = pcie_readreg(pi->core, PCIE_PCIEREGS, PCIE_DLLP_PMTHRESHREG); }
/* centralized clkreq control policy */ static void pcie_clkreq_upd(struct pcicore_info *pi, uint state) { struct si_pub *sih = pi->sih; switch (state) { case SI_DOATTACH: if (PCIE_ASPM(sih)) pcie_clkreq(pi, 1, 0); break; case SI_PCIDOWN: /* turn on serdes PLL down */ if (ai_get_buscorerev(sih) == 6) { ai_cc_reg(sih, offsetof(struct chipcregs, chipcontrol_addr), ~0, 0); ai_cc_reg(sih, offsetof(struct chipcregs, chipcontrol_data), ~0x40, 0); } else if (pi->pcie_pr42767) {