/* * Handle the given receive status queue entry */ void patm_rx(struct patm_softc *sc, struct idt_rsqe *rsqe) { struct mbuf *m; void *buf; u_int stat, cid, w, cells, len, h; struct patm_vcc *vcc; struct atm_pseudohdr aph; u_char *trail; cid = le32toh(rsqe->cid); stat = le32toh(rsqe->stat); h = le32toh(rsqe->handle); cid = PATM_CID(sc, IDT_RSQE_VPI(cid), IDT_RSQE_VCI(cid)); vcc = sc->vccs[cid]; if (IDT_RSQE_TYPE(stat) == IDT_RSQE_IDLE) { /* connection has gone idle */ if (stat & IDT_RSQE_BUF) patm_rcv_free(sc, patm_rcv_handle(sc, h), h); w = rct_read(sc, cid, 0); if (w != 0 && !(w & IDT_RCT_OPEN)) rct_write(sc, cid, 0, 0); if (vcc != NULL && (vcc->vflags & PATM_VCC_RX_CLOSING)) { patm_debug(sc, VCC, "%u.%u RX closed", vcc->vcc.vpi, vcc->vcc.vci); vcc->vflags &= ~PATM_VCC_RX_CLOSING; if (vcc->vcc.flags & ATMIO_FLAG_ASYNC) { patm_rx_vcc_closed(sc, vcc); if (!(vcc->vflags & PATM_VCC_OPEN)) patm_vcc_closed(sc, vcc); } else cv_signal(&sc->vcc_cv); } return; } buf = patm_rcv_handle(sc, h); if (vcc == NULL || (vcc->vflags & PATM_VCC_RX_OPEN) == 0) { patm_rcv_free(sc, buf, h); return; } cells = IDT_RSQE_CNT(stat); KASSERT(cells > 0, ("zero cell count")); if (vcc->vcc.aal == ATMIO_AAL_0) { /* deliver this packet as it is */ if ((m = patm_rcv_mbuf(sc, buf, h, 1)) == NULL) return; m->m_len = cells * 48; m->m_pkthdr.len = m->m_len; m->m_pkthdr.rcvif = sc->ifp; } else if (vcc->vcc.aal == ATMIO_AAL_34) { /* XXX AAL3/4 */ patm_rcv_free(sc, buf, h); return; } else if (vcc->vcc.aal == ATMIO_AAL_5) { if (stat & IDT_RSQE_CRC) { if_inc_counter(sc->ifp, IFCOUNTER_IERRORS, 1); if (vcc->chain != NULL) { m_freem(vcc->chain); vcc->chain = vcc->last = NULL; } return; } /* append to current chain */ if (vcc->chain == NULL) { if ((m = patm_rcv_mbuf(sc, buf, h, 1)) == NULL) return; m->m_len = cells * 48; m->m_pkthdr.len = m->m_len; m->m_pkthdr.rcvif = sc->ifp; vcc->chain = vcc->last = m; } else { if ((m = patm_rcv_mbuf(sc, buf, h, 0)) == NULL) return; m->m_len = cells * 48; vcc->last->m_next = m; vcc->last = m; vcc->chain->m_pkthdr.len += m->m_len; } if (!(stat & IDT_RSQE_EPDU)) return; trail = mtod(m, u_char *) + m->m_len - 6; len = (trail[0] << 8) + trail[1]; if ((u_int)vcc->chain->m_pkthdr.len < len + 8) { patm_printf(sc, "%s: bad aal5 lengths %u %u\n", __func__, (u_int)m->m_pkthdr.len, len); m_freem(vcc->chain); vcc->chain = vcc->last = NULL; return; } m->m_len -= vcc->chain->m_pkthdr.len - len; KASSERT(m->m_len >= 0, ("bad last mbuf")); m = vcc->chain; vcc->chain = vcc->last = NULL; m->m_pkthdr.len = len; } else
/* * Start the card. This assumes the mutex to be held */ void patm_initialize(struct patm_softc *sc) { uint32_t cfg; u_int i; patm_debug(sc, ATTACH, "configuring..."); /* clear SRAM */ for (i = 0; i < sc->mmap->sram * 1024; i += 4) patm_sram_write4(sc, i, 0, 0, 0, 0); patm_scd_init(sc); /* configuration register. Setting NOIDLE makes the timing wrong! */ cfg = IDT_CFG_TXFIFO9 | IDT_CFG_RXQ512 | PATM_CFG_VPI | /* IDT_CFG_NOIDLE | */ sc->mmap->rxtab; if (!(sc->flags & PATM_UNASS)) cfg |= IDT_CFG_IDLECLP; patm_nor_write(sc, IDT_NOR_CFG, cfg); /* clean all the status queues and the Raw handle */ memset(sc->tsq, 0, sc->sq_size); /* initialize RSQ */ patm_debug(sc, ATTACH, "RSQ %llx", (unsigned long long)sc->rsq_phy); patm_nor_write(sc, IDT_NOR_RSQB, sc->rsq_phy); patm_nor_write(sc, IDT_NOR_RSQT, sc->rsq_phy); patm_nor_write(sc, IDT_NOR_RSQH, 0); sc->rsq_last = PATM_RSQ_SIZE - 1; /* initialize TSTB */ patm_nor_write(sc, IDT_NOR_TSTB, sc->mmap->tst1base << 2); patm_tst_init(sc); /* initialize TSQ */ for (i = 0; i < IDT_TSQ_SIZE; i++) sc->tsq[i].stamp = htole32(IDT_TSQE_EMPTY); patm_nor_write(sc, IDT_NOR_TSQB, sc->tsq_phy); patm_nor_write(sc, IDT_NOR_TSQH, 0); patm_nor_write(sc, IDT_NOR_TSQT, 0); sc->tsq_next = sc->tsq; /* GP */ #if BYTE_ORDER == BIG_ENDIAN && 0 patm_nor_write(sc, IDT_NOR_GP, IDT_GP_BIGE); #else patm_nor_write(sc, IDT_NOR_GP, 0); #endif /* VPM */ patm_nor_write(sc, IDT_NOR_VPM, 0); /* RxFIFO */ patm_nor_write(sc, IDT_NOR_RXFD, IDT_RXFD(sc->mmap->rxfifo_addr, sc->mmap->rxfifo_code)); patm_nor_write(sc, IDT_NOR_RXFT, 0); patm_nor_write(sc, IDT_NOR_RXFH, 0); /* RAWHND */ patm_debug(sc, ATTACH, "RWH %llx", (unsigned long long)sc->rawhnd_phy); patm_nor_write(sc, IDT_NOR_RAWHND, sc->rawhnd_phy); /* ABRSTD */ patm_nor_write(sc, IDT_NOR_ABRSTD, IDT_ABRSTD(sc->mmap->abrstd_addr, sc->mmap->abrstd_code)); for (i = 0; i < sc->mmap->abrstd_size; i++) patm_sram_write(sc, sc->mmap->abrstd_addr + i, 0); patm_nor_write(sc, IDT_NOR_ABRRQ, 0); patm_nor_write(sc, IDT_NOR_VBRRQ, 0); /* rate tables */ if (sc->flags & PATM_25M) { for (i = 0; i < patm_rtables_size; i++) patm_sram_write(sc, sc->mmap->rtables + i, patm_rtables25[i]); } else { for (i = 0; i < patm_rtables_size; i++) patm_sram_write(sc, sc->mmap->rtables + i, patm_rtables155[i]); } patm_nor_write(sc, IDT_NOR_RTBL, sc->mmap->rtables << 2); /* Maximum deficit */ patm_nor_write(sc, IDT_NOR_MXDFCT, 32 | IDT_MDFCT_LCI | IDT_MDFCT_LNI); /* Free buffer queues */ patm_nor_write(sc, IDT_NOR_FBQP0, 0); patm_nor_write(sc, IDT_NOR_FBQP1, 0); patm_nor_write(sc, IDT_NOR_FBQP2, 0); patm_nor_write(sc, IDT_NOR_FBQP3, 0); patm_nor_write(sc, IDT_NOR_FBQWP0, 0); patm_nor_write(sc, IDT_NOR_FBQWP1, 0); patm_nor_write(sc, IDT_NOR_FBQWP2, 0); patm_nor_write(sc, IDT_NOR_FBQWP3, 0); patm_nor_write(sc, IDT_NOR_FBQS0, (SMBUF_THRESHOLD << 28) | (SMBUF_NI_THRESH << 24) | (SMBUF_CI_THRESH << 20) | SMBUF_CELLS); patm_nor_write(sc, IDT_NOR_FBQS1, (LMBUF_THRESHOLD << 28) | (LMBUF_NI_THRESH << 24) | (LMBUF_CI_THRESH << 20) | LMBUF_CELLS); patm_nor_write(sc, IDT_NOR_FBQS2, (VMBUF_THRESHOLD << 28) | VMBUF_CELLS); patm_nor_write(sc, IDT_NOR_FBQS3, 0); /* make SCD0 for UBR0 */ if ((sc->scd0 = patm_scd_alloc(sc)) == NULL) { patm_printf(sc, "cannot create UBR0 SCD\n"); patm_reset(sc); return; } sc->scd0->q.ifq_maxlen = PATM_DLFT_MAXQ; patm_scd_setup(sc, sc->scd0); patm_tct_setup(sc, sc->scd0, NULL); patm_debug(sc, ATTACH, "go..."); sc->utopia.flags &= ~UTP_FL_POLL_CARRIER; sc->ifp->if_drv_flags |= IFF_DRV_RUNNING; /* enable interrupts, Tx and Rx paths */ cfg |= IDT_CFG_RXPTH | IDT_CFG_RXIIMM | IDT_CFG_RAWIE | IDT_CFG_RQFIE | IDT_CFG_TIMOIE | IDT_CFG_FBIE | IDT_CFG_TXENB | IDT_CFG_TXINT | IDT_CFG_TXUIE | IDT_CFG_TXSFI | IDT_CFG_PHYIE; patm_nor_write(sc, IDT_NOR_CFG, cfg); for (i = 0; i < sc->mmap->max_conn; i++) if (sc->vccs[i] != NULL) patm_load_vc(sc, sc->vccs[i], 1); ATMEV_SEND_IFSTATE_CHANGED(IFP2IFATM(sc->ifp), sc->utopia.carrier == UTP_CARR_OK); }