Lines Matching refs:sc_tx
288 --sc->sc_tx.queued;
292 ++sc->sc_tx.queued;
298 sc->sc_tx.desc_ring[index].tdes0 = htole32((uint32_t)paddr);
299 sc->sc_tx.desc_ring[index].tdes1
301 sc->sc_tx.desc_ring[index].tdes2 = htole32(tdes2 | len);
302 sc->sc_tx.desc_ring[index].tdes3 = htole32(tdes3 | total_len);
316 if (sc->sc_tx.queued >= TX_DESC_COUNT - 1)
320 sc->sc_tx.buf_map[index].map, m, BUS_DMA_WRITE | BUS_DMA_NOWAIT);
332 segs = sc->sc_tx.buf_map[index].map->dm_segs;
333 nsegs = sc->sc_tx.buf_map[index].map->dm_nsegs;
335 nospace = sc->sc_tx.queued >= TX_DESC_COUNT - nsegs;
338 sc->sc_tx.buf_map[index].map);
343 bus_dmamap_sync(sc->sc_dmat, sc->sc_tx.buf_map[index].map,
344 0, sc->sc_tx.buf_map[index].map->dm_mapsize, BUS_DMASYNC_PREWRITE);
347 sc->sc_tx.buf_map[index].mbuf = m;
369 bus_dmamap_sync(sc->sc_dmat, sc->sc_tx.desc_map,
374 index, sc->sc_tx.cur, sc->sc_tx.next, sc->sc_tx.queued);
375 sc->sc_tx.desc_ring[index].tdes3 |= htole32(EQOS_TDES3_TX_OWN);
572 sc->sc_tx.cur = sc->sc_tx.next = sc->sc_tx.queued = 0;
580 (uint32_t)((uint64_t)sc->sc_tx.desc_ring_paddr >> 32));
582 (uint32_t)sc->sc_tx.desc_ring_paddr);
585 sc->sc_tx.desc_ring_paddr, TX_DESC_COUNT);
956 for (i = sc->sc_tx.next; sc->sc_tx.queued > 0; i = TX_NEXT(i)) {
957 KASSERT(sc->sc_tx.queued > 0);
958 KASSERT(sc->sc_tx.queued <= TX_DESC_COUNT);
959 eqos_dma_sync(sc, sc->sc_tx.desc_map,
962 desc = &sc->sc_tx.desc_ring[i];
967 bmap = &sc->sc_tx.buf_map[i];
979 eqos_dma_sync(sc, sc->sc_tx.desc_map,
1002 sc->sc_tx.next = i;
1021 for (cnt = 0, start = sc->sc_tx.cur; ; cnt++) {
1022 if (sc->sc_tx.queued >= TX_DESC_COUNT - TX_MAX_SEGS) {
1023 DPRINTF(EDEB_TXRING, "%u sc_tx.queued, ring full\n",
1024 sc->sc_tx.queued);
1032 nsegs = eqos_setup_txbuf(sc, sc->sc_tx.cur, m);
1047 sc->sc_tx.cur = TX_SKIP(sc->sc_tx.cur, nsegs);
1051 "queued: %u\n", cnt, sc->sc_tx.cur, sc->sc_tx.next,
1052 sc->sc_tx.queued);
1055 eqos_dma_sync(sc, sc->sc_tx.desc_map,
1056 start, sc->sc_tx.cur, TX_DESC_COUNT,
1062 (uint32_t)sc->sc_tx.desc_ring_paddr + DESC_OFF(start),
1063 sc->sc_tx.cur,
1064 (uint32_t)sc->sc_tx.desc_ring_paddr +
1065 DESC_OFF(sc->sc_tx.cur),
1069 (uint32_t)sc->sc_tx.desc_ring_paddr +
1070 DESC_OFF(sc->sc_tx.cur));
1370 sc->sc_tx.sc = sc;
1375 DESC_BOUNDARY, BUS_DMA_WAITOK, &sc->sc_tx.desc_map);
1380 DESC_BOUNDARY, &sc->sc_tx.desc_dmaseg, 1, &nsegs, BUS_DMA_WAITOK);
1384 error = bus_dmamem_map(sc->sc_dmat, &sc->sc_tx.desc_dmaseg, nsegs,
1385 TX_DESC_SIZE, (void *)&sc->sc_tx.desc_ring, BUS_DMA_WAITOK);
1389 error = bus_dmamap_load(sc->sc_dmat, sc->sc_tx.desc_map,
1390 sc->sc_tx.desc_ring, TX_DESC_SIZE, NULL, BUS_DMA_WAITOK);
1394 sc->sc_tx.desc_ring_paddr = sc->sc_tx.desc_map->dm_segs[0].ds_addr;
1396 memset(sc->sc_tx.desc_ring, 0, TX_DESC_SIZE);
1397 bus_dmamap_sync(sc->sc_dmat, sc->sc_tx.desc_map, 0, TX_DESC_SIZE,
1400 sc->sc_tx.queued = TX_DESC_COUNT;
1404 &sc->sc_tx.buf_map[i].map);
1466 sc->sc_tx.desc_ring_paddr, sc->sc_rx.desc_ring_paddr);
1683 struct eqos_ring *txr = &sc->sc_tx;
1785 printf("head = %08x\n", (uint32_t)sc->sc_tx.desc_ring_paddr);
1788 (reg - (uint32_t)sc->sc_tx.desc_ring_paddr) /
1794 index = (reg - (uint32_t)sc->sc_tx.desc_ring_paddr) /
1813 index = (reg - (uint32_t)sc->sc_tx.desc_ring_paddr) /