Home | History | Annotate | Download | only in pci

Lines Matching defs:rxr

1924 	struct ixl_rx_ring *rxr = qp->qp_rxr;
1926 ixl_wr(sc, I40E_PFINT_DYN_CTLN(rxr->rxr_qid),
1936 struct ixl_rx_ring *rxr = qp->qp_rxr;
1938 ixl_wr(sc, I40E_PFINT_DYN_CTLN(rxr->rxr_qid),
1966 struct ixl_rx_ring *rxr;
1981 rxr = sc->sc_qps[i].qp_rxr;
1984 ixl_rxr_config(sc, rxr);
1992 rxr = sc->sc_qps[i].qp_rxr;
1999 ixl_wr(sc, rxr->rxr_tail, rxr->rxr_prod);
2002 mutex_enter(&rxr->rxr_lock);
2003 ixl_rxfill(sc, rxr);
2004 mutex_exit(&rxr->rxr_lock);
2009 if (ixl_rxr_enabled(sc, rxr) != 0)
2179 struct ixl_rx_ring *rxr;
2191 rxr = sc->sc_qps[i].qp_rxr;
2206 rxr = sc->sc_qps[i].qp_rxr;
2214 mutex_enter(&rxr->rxr_lock);
2218 mutex_exit(&rxr->rxr_lock);
2227 rxr = sc->sc_qps[i].qp_rxr;
2236 mutex_enter(&rxr->rxr_lock);
2237 if (ixl_rxr_disabled(sc, rxr) != 0) {
2238 mutex_exit(&rxr->rxr_lock);
2241 mutex_exit(&rxr->rxr_lock);
2252 rxr = sc->sc_qps[i].qp_rxr;
2258 mutex_enter(&rxr->rxr_lock);
2259 ixl_rxr_unconfig(sc, rxr);
2260 mutex_exit(&rxr->rxr_lock);
2263 ixl_rxr_clean(sc, rxr);
2939 struct ixl_rx_ring *rxr = NULL;
2943 rxr = kmem_zalloc(sizeof(*rxr), KM_SLEEP);
2947 if (ixl_dmamem_alloc(sc, &rxr->rxr_mem,
2963 rxr->rxr_cons = rxr->rxr_prod = 0;
2964 rxr->rxr_m_head = NULL;
2965 rxr->rxr_m_tail = &rxr->rxr_m_head;
2966 rxr->rxr_maps = maps;
2968 rxr->rxr_tail = I40E_QRX_TAIL(qid);
2969 rxr->rxr_qid = qid;
2970 mutex_init(&rxr->rxr_lock, MUTEX_DEFAULT, IPL_NET);
2972 return rxr;
2984 ixl_dmamem_free(sc, &rxr->rxr_mem);
2987 kmem_free(rxr, sizeof(*rxr));
2993 ixl_rxr_clean(struct ixl_softc *sc, struct ixl_rx_ring *rxr)
2999 maps = rxr->rxr_maps;
3015 m_freem(rxr->rxr_m_head);
3016 rxr->rxr_m_head = NULL;
3017 rxr->rxr_m_tail = &rxr->rxr_m_head;
3019 rxr->rxr_prod = rxr->rxr_cons = 0;
3023 ixl_rxr_enabled(struct ixl_softc *sc, struct ixl_rx_ring *rxr)
3025 bus_size_t ena = I40E_QRX_ENA(rxr->rxr_qid);
3041 ixl_rxr_disabled(struct ixl_softc *sc, struct ixl_rx_ring *rxr)
3043 bus_size_t ena = I40E_QRX_ENA(rxr->rxr_qid);
3047 KASSERT(mutex_owned(&rxr->rxr_lock));
3061 ixl_rxr_config(struct ixl_softc *sc, struct ixl_rx_ring *rxr)
3073 rxq.head = htole16(rxr->rxr_cons);
3074 rxq.base = htole64(IXL_DMA_DVA(&rxr->rxr_mem) / IXL_HMC_RXQ_BASE_UNIT);
3091 hmc = ixl_hmc_kva(sc, IXL_HMC_LAN_RX, rxr->rxr_qid);
3098 ixl_rxr_unconfig(struct ixl_softc *sc, struct ixl_rx_ring *rxr)
3102 hmc = ixl_hmc_kva(sc, IXL_HMC_LAN_RX, rxr->rxr_qid);
3104 rxr->rxr_cons = rxr->rxr_prod = 0;
3108 ixl_rxr_free(struct ixl_softc *sc, struct ixl_rx_ring *rxr)
3113 maps = rxr->rxr_maps;
3120 ixl_dmamem_free(sc, &rxr->rxr_mem);
3121 mutex_destroy(&rxr->rxr_lock);
3123 kmem_free(rxr, sizeof(*rxr));
3174 ixl_rxeof(struct ixl_softc *sc, struct ixl_rx_ring *rxr, u_int rxlimit)
3187 KASSERT(mutex_owned(&rxr->rxr_lock));
3192 prod = rxr->rxr_prod;
3193 cons = rxr->rxr_cons;
3198 bus_dmamap_sync(sc->sc_dmat, IXL_DMA_MAP(&rxr->rxr_mem),
3199 0, IXL_DMA_LEN(&rxr->rxr_mem),
3202 ring = IXL_DMA_KVA(&rxr->rxr_mem);
3220 rxm = &rxr->rxr_maps[cons];
3237 *rxr->rxr_m_tail = m;
3238 rxr->rxr_m_tail = &m->m_next;
3240 m = rxr->rxr_m_head;
3267 rxr->rxr_m_head = NULL;
3268 rxr->rxr_m_tail = &rxr->rxr_m_head;
3278 rxr->rxr_cons = cons;
3279 if (ixl_rxfill(sc, rxr) == -1)
3285 bus_dmamap_sync(sc->sc_dmat, IXL_DMA_MAP(&rxr->rxr_mem),
3286 0, IXL_DMA_LEN(&rxr->rxr_mem),
3293 ixl_rxfill(struct ixl_softc *sc, struct ixl_rx_ring *rxr)
3304 KASSERT(mutex_owned(&rxr->rxr_lock));
3306 prod = rxr->rxr_prod;
3307 slots = ixl_rxr_unrefreshed(rxr->rxr_prod, rxr->rxr_cons,
3310 ring = IXL_DMA_KVA(&rxr->rxr_mem);
3317 rxm = &rxr->rxr_maps[prod];
3321 rxr->rxr_mgethdr_failed.ev_count++;
3328 rxr->rxr_mgetcl_failed.ev_count++;
3341 rxr->rxr_mbuf_load_failed.ev_count++;
3365 rxr->rxr_prod = prod;
3366 ixl_wr(sc, rxr->rxr_tail, prod);
3378 struct ixl_rx_ring *rxr = qp->qp_rxr;
3387 mutex_enter(&rxr->rxr_lock);
3389 rxmore = ixl_rxeof(sc, rxr, rxlimit);
3390 mutex_exit(&rxr->rxr_lock);
3412 struct ixl_rx_ring *rxr;
3441 rxr = sc->sc_qps[i].qp_rxr;
3445 IXL_TXRX_PROCESS_UNLIMIT, &rxr->rxr_intr);
3458 struct ixl_rx_ring *rxr = qp->qp_rxr;
3468 txlimit, &txr->txr_intr, rxlimit, &rxr->rxr_intr);
3499 struct ixl_rx_ring *rxr = qp->qp_rxr;
3507 txlimit, &txr->txr_defer, rxlimit, &rxr->rxr_defer);
6044 struct ixl_rx_ring *rxr;
6051 rxr = qp->qp_rxr;
6066 evcnt_attach_dynamic(&rxr->rxr_mgethdr_failed, EVCNT_TYPE_MISC,
6068 evcnt_attach_dynamic(&rxr->rxr_mgetcl_failed, EVCNT_TYPE_MISC,
6070 evcnt_attach_dynamic(&rxr->rxr_mbuf_load_failed,
6073 evcnt_attach_dynamic(&rxr->rxr_intr, EVCNT_TYPE_INTR,
6075 evcnt_attach_dynamic(&rxr->rxr_defer, EVCNT_TYPE_MISC,
6203 struct ixl_rx_ring *rxr;
6209 rxr = sc->sc_qps[i].qp_rxr;
6218 evcnt_detach(&rxr->rxr_mgethdr_failed);
6219 evcnt_detach(&rxr->rxr_mgetcl_failed);
6220 evcnt_detach(&rxr->rxr_mbuf_load_failed);
6221 evcnt_detach(&rxr->rxr_intr);
6222 evcnt_detach(&rxr->rxr_defer);