void ath_cont_data(struct ath_softc *sc ,int val,ath_callback ath_draintxq, ath_callback ath_stoprecv) { static struct sk_buff *skb = NULL; struct ieee80211_frame *hdr; static struct ieee80211com *ic; static struct ath_buf *bf,*prev,*first; static struct ath_desc *ds; static struct ath_hal *ah; static STAILQ_HEAD(tpc_buf,ath_buf) tmp_q; static int is_inited=0; struct ath_txq *txq; const HAL_RATE_TABLE *rt; u_int8_t *p; u_int32_t flags, txrate, r,i; u_int16_t hdrlen, framelen, dmalen,delay=0; #define MIN(a,b) ((a) < (b) ? (a) : (b)) if(ath_hal_getdiagstate(sc->sc_ah, 19,0,10,NULL,NULL) == AH_FALSE) { printk("HAL does not support TX99 mode \n"); printk("compile HAL with AH_PRIVATE_DIAG turned on \n"); return; } if(is_inited == 0) { STAILQ_INIT(&tmp_q); is_inited=1; } /* enter CONT_DATA mode */ if (val && skb==NULL) { skb = ath_alloc_skb(4096, 32); if (skb == NULL) goto out; /* build output packet */ hdr = (struct ieee80211_frame *)skb_put(skb, sizeof(*hdr)); IEEE80211_ADDR_COPY(&hdr->i_addr1, test_addr); IEEE80211_ADDR_COPY(&hdr->i_addr2, test_addr); IEEE80211_ADDR_COPY(&hdr->i_addr3, test_addr); hdr->i_dur[0] = 0x0; hdr->i_dur[1] = 0x0; hdr->i_seq[0] = 0x5a; hdr->i_seq[1] = 0x5a; hdr->i_fc[0] = IEEE80211_FC0_TYPE_DATA; hdr->i_fc[1] = 0; hdrlen = sizeof(*hdr); for(r=0; r<2000; ) { p = skb_put(skb, sizeof(PN9Data)); memcpy(p, PN9Data, sizeof(PN9Data)); r += sizeof(PN9Data); } framelen = hdrlen + r + IEEE80211_CRC_LEN; ic = &sc->sc_ic; ah = sc->sc_ah; rt = sc->sc_currates; if (rt==NULL) { printk("no rate table\n"); goto out; } txrate = rt->info[rt->rateCount-1].rateCode; /* send at highest rate */ { int rix; if (sc->sc_txrx99.txrate==0) sc->sc_txrx99.txrate = 6000; for(rix=0; rix<rt->rateCount; rix++) { if (rt->info[rix].rateKbps==sc->sc_txrx99.txrate) { txrate = rt->info[rix].rateCode; printk("txrate set to %dKbps\n", sc->sc_txrx99.txrate); break; } } } ath_draintxq(sc); prev=first=NULL; printk("txpower set to %d\n", sc->sc_txrx99.txpower); /* send 20 frames for the Power Amp to settle down */ for(i=0;i<20;++i) { ATH_TXBUF_LOCK_BH(sc); bf = STAILQ_FIRST(&sc->sc_txbuf); if (bf != NULL) { STAILQ_REMOVE_HEAD(&sc->sc_txbuf,bf_list); } ATH_TXBUF_UNLOCK_BH(sc); if (bf==NULL) { printk("no tx buf\n"); goto out; } if(!i) first=bf; framelen = skb->len + IEEE80211_CRC_LEN; dmalen = skb->len; txq = sc->sc_ac2q[WME_AC_VO]; bf->bf_skbaddr = bus_map_single(sc->sc_bdev, skb->data, framelen, BUS_DMA_TODEVICE); bf->bf_skb = skb; bf->bf_node = 0; flags = HAL_TXDESC_CLRDMASK; ds = bf->bf_desc; if(prev) prev->bf_desc->ds_link = bf->bf_daddr; /* link from prev desc */ ds->ds_data = bf->bf_skbaddr; r = ath_hal_setuptxdesc(ah, ds, framelen, hdrlen, HAL_PKT_TYPE_NORMAL, sc->sc_txrx99.txpower, txrate, /* tx rate */ 1, /* max retries */ HAL_TXKEYIX_INVALID, /* no WEP */ 1, /* select Omni Antenna 0 */ flags, 0, /* rts/cts rate */ 0 /* rts/cts duration */ ); if (r==AH_FALSE) { printk("fail setuptxdesc r(%d)\n", r); goto out; } r = ath_hal_filltxdesc(ah, ds, skb->len, AH_TRUE, AH_TRUE,ds); if (r==AH_FALSE) { printk("fail fill tx desc r(%d)\n", r); goto out; } ath_hal_setupxtxdesc(ah, ds , txrate, 15 /* series 1 */ , txrate, 15 /* series 2 */ , txrate, 15 /* series 3 */ ); /* insert the buffers in to tmp_q */ STAILQ_INSERT_HEAD(&tmp_q,bf,bf_list); prev=bf; } ath_hal_intrset(ah, 0); /* disable interrupts */ //sc->sc_imask = HAL_INT_RX | HAL_INT_TX // | HAL_INT_RXEOL | HAL_INT_RXORN // | HAL_INT_FATAL | HAL_INT_GLOBAL; sc->sc_imask = 0; //ath_hal_intrset(ah, sc->sc_imask); bf->bf_desc->ds_link = 0; r = ath_hal_puttxbuf(ah, txq->axq_qnum, first->bf_daddr); ath_hal_txstart(ah, txq->axq_qnum); while(ath_hal_txprocdesc(ah,bf->bf_desc) == HAL_EINPROGRESS) { udelay(2000); ++delay; } /* sleep for 20ms */ udelay(20000); printk("took %d msec to transmit the 20 frames \n",2*delay); /* start TX99 mode */ ath_stoprecv(sc); /* stop recv side */ bf->bf_desc->ds_link = first->bf_daddr; /* link to self */ ath_hal_getdiagstate(ah, 19,(void *) sc->sc_txrx99.prefetch,9,NULL,NULL); ath_hal_getdiagstate(ah, 19, (void *)txq->axq_qnum, val,NULL,NULL); r = ath_hal_puttxbuf(ah, txq->axq_qnum, first->bf_daddr); ath_hal_txstart(ah, txq->axq_qnum); } /* leave CONT_DATA mode, reset the chip */ if (val==0 && skb) { int j=0; ath_hal_getdiagstate(ah, 19, 0, 0,NULL,NULL); /* insert the buffers back into txbuf list */ ATH_TXBUF_LOCK_BH(sc); bf = STAILQ_FIRST(&tmp_q); while(bf) { bf->bf_skb=NULL; STAILQ_REMOVE_HEAD(&tmp_q,bf_list); STAILQ_INSERT_HEAD(&sc->sc_txbuf,bf,bf_list); bf = STAILQ_FIRST(&tmp_q); ++j; } ATH_TXBUF_UNLOCK_BH(sc); printk("inserted back %d buffers \n",j); ic->ic_reset(ic->ic_dev); skb = NULL; bf = NULL; } if (val==7 && skb) { ath_hal_getdiagstate(ah, 19, ds, 7,NULL,NULL); } sc->sc_txrx99.tx99mode=val; out: return; #undef MIN }
/* * pause traffic of a vap from a specified queue. * if vap is null all the traffic will be paused. */ static void ath_tx_vap_pause_txq(struct ath_softc *sc, struct ath_txq *txq, struct ath_vap *avp) { struct ath_buf *bf, *lastbf; ath_bufhead bf_head, bf_stage; struct ath_node *an,*an_uapsd_head; ath_bufhead vap_mcast_stage_q[ATH_VAPSIZE]; /* temprorary per vap staging queue for cabq traffic */ struct ath_vap *mcast_vap_q[ATH_VAPSIZE]; u_int32_t i; struct ieee80211_frame *wh; if (txq == sc->sc_cabq) { for (i=0;i<ATH_VAPSIZE;++i) { TAILQ_INIT(&vap_mcast_stage_q[i]); mcast_vap_q[i] = NULL; } } an_uapsd_head=NULL; TAILQ_INIT(&bf_stage); /* * NB: this assumes output has been stopped and * we do not need to block ath_tx_tasklet */ for (;;) { ATH_TXQ_LOCK(txq); if (sc->sc_enhanceddmasupport) { bf = TAILQ_FIRST(&txq->axq_fifo[txq->axq_tailindex]); if (bf == NULL) { if (txq->axq_headindex != txq->axq_tailindex) printk("ath_tx_draintxq: ERR head %d tail %d\n", txq->axq_headindex, txq->axq_tailindex); txq->axq_headindex = 0; txq->axq_tailindex = 0; ATH_TXQ_UNLOCK(txq); break; } } else { bf = TAILQ_FIRST(&txq->axq_q); if (bf == NULL) { txq->axq_link = NULL; txq->axq_linkbuf = NULL; ATH_TXQ_UNLOCK(txq); break; } if (bf->bf_status & ATH_BUFSTATUS_STALE) { ATH_TXQ_REMOVE_STALE_HEAD(txq, bf, bf_list); ATH_TXQ_UNLOCK(txq); #ifdef ATH_SUPPORT_UAPSD if (bf->bf_qosnulleosp) { ath_tx_uapsdqnulbf_complete(sc, bf, false); } else #endif { ATH_TXBUF_LOCK(sc); sc->sc_txbuf_free++; #if ATH_TX_BUF_FLOW_CNTL if(bf) { txq->axq_num_buf_used--; } #endif TAILQ_INSERT_TAIL(&sc->sc_txbuf, bf, bf_list); #if TRACE_TX_LEAK TAILQ_REMOVE(&sc->sc_tx_trace_head,bf,bf_tx_trace_list); #endif //TRACE_TX_LEAK ATH_TXBUF_UNLOCK(sc); #if ATH_SUPPORT_FLOWMAC_MODULE if (sc->sc_osnetif_flowcntrl) { ath_netif_wake_queue(sc); } #endif } continue; } } lastbf = bf->bf_lastbf; TAILQ_INIT(&bf_head); /* remove ath_buf's of the same mpdu from txq */ if (sc->sc_enhanceddmasupport) { if (txq == sc->sc_cabq || txq == sc->sc_uapsdq) { ATH_EDMA_MCASTQ_MOVE_HEAD_UNTIL(txq, &bf_head, lastbf, bf_list); } else { ATH_EDMA_TXQ_MOVE_HEAD_UNTIL(txq, &bf_head, lastbf, bf_list); } } else { ATH_TXQ_MOVE_HEAD_UNTIL(txq, &bf_head, lastbf, bf_list); } txq->axq_totalqueued --; if (bf->bf_isaggr) { txq->axq_aggr_depth--; } #if ATH_SUPPORT_CFEND if (txq == sc->sc_cfendq) { /* process CF End packet */ if (bf->bf_state.bfs_iscfend) { ath_tx_cfend_complete (sc, bf, &bf_head); ATH_TXQ_UNLOCK(txq); continue; /* process rest of the buffers */ } } #endif ATH_TXQ_UNLOCK(txq); #ifdef AR_DEBUG if (!sc->sc_enhanceddmasupport && CHK_SC_DEBUG(sc, ATH_DEBUG_RESET)) /* Legacy only as the enhanced DMA txprocdesc() * will move the tx status ring tail pointer. */ ath_printtxbuf(bf, ath_hal_txprocdesc(sc->sc_ah, bf->bf_desc) == HAL_OK); #endif /* AR_DEBUG */ an = bf->bf_node; /* * if the node belongs to the vap being paused (or) if the request * is to pause all vaps (avp is NULL) * then put it back in to the nodes queue. */ if (!avp || (avp && an->an_avp == avp) ) { #ifdef ATH_SUPPORT_UAPSD if (txq == sc->sc_uapsdq) { /* * if the node is not on the UAPSD node list then put it on the list. * alwasys put it on the head of the list. */ if (!an->an_temp_next && (an != an_uapsd_head)) { if(an_uapsd_head){ an->an_temp_next = an_uapsd_head; } an_uapsd_head = an; } if (TAILQ_FIRST(&bf_head) == NULL ) { DPRINTF(sc, ATH_DEBUG_ANY,"#####%s : %d bf_head is empty \n",__func__, __LINE__); } else { ath_tx_stage_queue_uapsd(sc,an, &bf_head); } continue; } #endif if (txq == sc->sc_cabq) { ath_bufhead *mcast_stage_q = NULL; /* * get the mcast staging queue for this vap and * add the frame to the mcast staging queue. */ for (i=0;i<ATH_VAPSIZE;++i) { if (mcast_vap_q[i] == avp) { mcast_stage_q = &vap_mcast_stage_q[i]; } else if (mcast_vap_q[i] == NULL) { mcast_stage_q = &vap_mcast_stage_q[i]; mcast_vap_q[i] = avp; } if (mcast_stage_q ) { break; } } if (mcast_stage_q == NULL) { DPRINTF(sc, ATH_DEBUG_ANY, "%s: mcat_stage_q is NULL \n", __func__); continue; } TAILQ_CONCAT(mcast_stage_q, &bf_head, bf_list); continue; } if (bf->bf_isampdu) { if (!bf->bf_isaggr) { __11nstats(sc,tx_unaggr_comperror); } ath_tx_mark_aggr_rifs_done(sc, txq, bf, &bf_head, &((struct ath_desc *)(lastbf->bf_desc))->ds_txstat, 0); } else { #ifdef ATH_SWRETRY if (sc->sc_enhanceddmasupport) { /* * Decrement of swr_num_eligible_frms for AMPDU is done * above in ath_tx-complete_aggr_rifs. */ if (!bf->bf_isampdu && bf->bf_isdata) { struct ath_node *an = bf->bf_node; if (an) { struct ath_swretry_info *pInfo = &an->an_swretry_info[txq->axq_qnum]; ATH_NODE_SWRETRY_TXBUF_LOCK(an); ASSERT(pInfo->swr_num_eligible_frms); pInfo->swr_num_eligible_frms --; ATH_NODE_SWRETRY_TXBUF_UNLOCK(an); } } } #endif if (bf->bf_isbar) { DPRINTF(sc, ATH_DEBUG_RESET, "*****%s: BAR frame \n", __func__); #ifdef ATH_SUPPORT_TxBF ath_tx_complete_buf(sc, bf, &bf_head, 0, 0, 0); #else ath_tx_complete_buf(sc, bf, &bf_head, 0); #endif } else { /* * Non Aggregates, put them at the head of the tid queue (if node is still avail,) */ atomic_inc(&an->an_active_tx_cnt); /* Make sure that Node is still alive and not temporary node */ if ((an->an_flags & (ATH_NODE_TEMP | ATH_NODE_CLEAN)) == 0) { struct ath_atx_tid *tid = ATH_AN_2_TID(an, bf->bf_tidno); TAILQ_INSERTQ_HEAD(&tid->buf_q, &bf_head, bf_list); atomic_dec(&an->an_active_tx_cnt); } else { if ((an->an_flags & ATH_NODE_TEMP) != 0) { DPRINTF(sc, ATH_DEBUG_ANY, "%s: an=0x%p is Temp-node.\n", __func__, an); } if ((an->an_flags & ATH_NODE_CLEAN) != 0) { DPRINTF(sc, ATH_DEBUG_ANY, "%s: an=0x%p is already CLEAN.\n", __func__, an); } atomic_dec(&an->an_active_tx_cnt); // Free these buffers. #ifdef ATH_SUPPORT_TxBF ath_tx_complete_buf(sc, bf, &bf_head, 0, 0, 0); #else ath_tx_complete_buf(sc, bf, &bf_head, 0); #endif } } } } else { /* * if the frame does not need to be paused * then put it on to a staging queue. */ TAILQ_CONCAT(&bf_stage, &bf_head, bf_list); } } #ifdef ATH_SUPPORT_UAPSD while(an_uapsd_head) { an=an_uapsd_head; an_uapsd_head = an->an_temp_next; an->an_temp_next=NULL; ath_tx_prepend_uapsd_stage_queue(sc,an); } #endif /* prepend the staging queue back to vap mcast queue */ if (txq == sc->sc_cabq) { ath_bufhead *mcast_stage_q = NULL; for (i=0;i<ATH_VAPSIZE;++i) { mcast_stage_q = &vap_mcast_stage_q[i]; /* * prepend only if the mcast staging queue is not empty */ if (TAILQ_FIRST(mcast_stage_q)) { /* * need to prepend the frames from staging queue to the vap mcast queue. * do it in 2 steps. * move the frames from the vap mcast queue to the * end of the staging queue and move all the frames from staging queue * to the vaps mcast queue. */ TAILQ_CONCAT(mcast_stage_q, &mcast_vap_q[i]->av_mcastq.axq_q, bf_list); mcast_vap_q[i]->av_mcastq.axq_depth=0; mcast_vap_q[i]->av_mcastq.axq_totalqueued = 0; mcast_vap_q[i]->av_mcastq.axq_linkbuf = 0; mcast_vap_q[i]->av_mcastq.axq_link = NULL; bf = TAILQ_FIRST(mcast_stage_q); while (bf) { /* * Remove a single ath_buf from the staging queue and add it to * the mcast queue. */ lastbf = bf->bf_lastbf; wh = (struct ieee80211_frame *)wbuf_header(bf->bf_mpdu); DPRINTF(sc, ATH_DEBUG_ANY, "%s: queue mcast frame back seq # %d \n", __func__, le16toh(*(u_int16_t *)wh->i_seq) >> IEEE80211_SEQ_SEQ_SHIFT); TAILQ_REMOVE_HEAD_UNTIL(mcast_stage_q, &bf_head, lastbf, bf_list); if (ath_tx_mcastqaddbuf(sc, mcast_vap_q[i], &bf_head) != EOK) { /* failed to queue the buf, complete it with an error */ #ifdef ATH_SUPPORT_TxBF ath_tx_complete_buf(sc,bf,&bf_head,0,0, 0); #else ath_tx_complete_buf(sc,bf,&bf_head,0); #endif } bf = TAILQ_FIRST(mcast_stage_q); } } } }