/src/sys/dev/ic/ |
w83l518d.c | 47 wb_idx_read(struct wb_softc *wb, uint8_t reg) 49 bus_space_write_1(wb->wb_iot, wb->wb_ioh, WB_SD_INDEX, reg); 50 return bus_space_read_1(wb->wb_iot, wb->wb_ioh, WB_SD_DATA); 54 wb_idx_write(struct wb_softc *wb, uint8_t reg, uint8_t val) 56 bus_space_write_1(wb->wb_iot, wb->wb_ioh, WB_SD_INDEX, reg); 57 bus_space_write_1(wb->wb_iot, wb->wb_ioh, WB_SD_DATA, val) 127 struct wb_softc *wb = opaque; local in function:wb_intr [all...] |
w83l518d_sdmmc.c | 97 wb_sdmmc_read_data(struct wb_softc *wb, uint8_t *data, int len) 99 bus_space_read_multi_1(wb->wb_iot, wb->wb_ioh, WB_SD_FIFO, data, len); 103 wb_sdmmc_write_data(struct wb_softc *wb, uint8_t *data, int len) 105 bus_space_write_multi_1(wb->wb_iot, wb->wb_ioh, WB_SD_FIFO, data, len); 111 struct wb_softc *wb = opaque; local in function:wb_sdmmc_discover 113 REPORT(wb, "TRACE: discover(wb)\n"); 115 sdmmc_needs_discover(wb->wb_sdmmc_dev) 237 struct wb_softc *wb = sch; local in function:wb_sdmmc_card_detect 252 struct wb_softc *wb = sch; local in function:wb_sdmmc_write_protect 275 struct wb_softc *wb = sch; local in function:wb_sdmmc_bus_clock 300 struct wb_softc *wb = sch; local in function:wb_sdmmc_bus_width 439 struct wb_softc *wb = sch; local in function:wb_sdmmc_exec_command [all...] |
/src/sbin/routed/ |
output.c | 265 clr_ws_buf(struct ws_buf *wb, 270 wb->lim = wb->base + NETS_LEN; 271 wb->n = wb->base; 272 memset(wb->n, 0, NETS_LEN*sizeof(*wb->n)); 279 na = (struct netauth*)wb->n; 284 wb->n++; 292 wb->n++ 359 struct ws_buf *wb; local in function:supply_out [all...] |
/src/sys/external/bsd/drm2/dist/drm/radeon/ |
radeon_r600_dma.c | 61 if (rdev->wb.enabled) 62 rptr = rdev->wb.wb[ring->rptr_offs/4]; 147 /* set the wb address whether it's enabled or not */ 149 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); 151 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); 153 if (rdev->wb.enabled) 248 gpu_addr = rdev->wb.gpu_addr + index; 251 rdev->wb.wb[index/4] = cpu_to_le32(tmp) [all...] |
radeon_ni_dma.c | 63 if (rdev->wb.enabled) { 64 rptr = rdev->wb.wb[ring->rptr_offs/4]; 133 if (rdev->wb.enabled) { 226 /* set the wb address whether it's enabled or not */ 228 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); 230 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); 232 if (rdev->wb.enabled)
|
radeon_device.c | 449 rdev->wb.enabled = false; 463 if (rdev->wb.wb_obj) { 464 if (!radeon_bo_reserve(rdev->wb.wb_obj, false)) { 465 radeon_bo_kunmap(rdev->wb.wb_obj); 466 radeon_bo_unpin(rdev->wb.wb_obj); 467 radeon_bo_unreserve(rdev->wb.wb_obj); 469 radeon_bo_unref(&rdev->wb.wb_obj); 470 rdev->wb.wb = NULL; 471 rdev->wb.wb_obj = NULL [all...] |
radeon_cik_sdma.c | 73 if (rdev->wb.enabled) { 74 rptr = rdev->wb.wb[ring->rptr_offs/4]; 144 if (rdev->wb.enabled) { 404 /* set the wb address whether it's enabled or not */ 406 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); 408 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); 410 if (rdev->wb.enabled) 664 gpu_addr = rdev->wb.gpu_addr + index; 667 rdev->wb.wb[index/4] = cpu_to_le32(tmp) [all...] |
radeon_ring.c | 309 else if (rdev->wb.enabled) 378 * @rptr_offs: offset of the rptr writeback location in the WB buffer 421 if (rdev->wb.enabled) { 423 ring->next_rptr_gpu_addr = rdev->wb.gpu_addr + index; 424 ring->next_rptr_cpu_addr = &rdev->wb.wb[index/4];
|
radeon_fence.c | 78 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { 101 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { 891 if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) { 895 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; 896 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + 915 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; 916 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index;
|
/src/sys/dev/pci/igc/ |
igc_base.h | 34 } wb; member in union:igc_adv_tx_desc 60 #define IGC_ADVTXD_STAT_SN_CRC 0x00000002 /* NXTSEQ/SEED prsnt in WB */ 121 } wb; /* writeback */ member in union:igc_adv_rx_desc
|
/src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/ |
amdgpu_ih.c | 136 ih->wptr_addr = adev->wb.gpu_addr + wptr_offs * 4; 137 ih->wptr_cpu = &adev->wb.wb[wptr_offs]; 138 ih->rptr_addr = adev->wb.gpu_addr + rptr_offs * 4; 139 ih->rptr_cpu = &adev->wb.wb[rptr_offs];
|
amdgpu_sdma_v3_0.c | 359 return ring->adev->wb.wb[ring->rptr_offs] >> 2; 376 wptr = ring->adev->wb.wb[ring->wptr_offs] >> 2; 396 volatile u32 *wb = (volatile u32 *)&adev->wb.wb[ring->wptr_offs]; local in function:sdma_v3_0_ring_set_wptr 398 WRITE_ONCE(*wb, (lower_32_bits(ring->wptr) << 2)); 401 volatile u32 *wb = (volatile u32 *)&adev->wb.wb[ring->wptr_offs] local in function:sdma_v3_0_ring_set_wptr [all...] |
amdgpu_sdma_v2_4.c | 203 return ring->adev->wb.wb[ring->rptr_offs] >> 2; 460 /* set the wb address whether it's enabled or not */ 462 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); 464 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); 566 gpu_addr = adev->wb.gpu_addr + (index * 4); 568 adev->wb.wb[index] = cpu_to_le32(tmp); 583 tmp = le32_to_cpu(adev->wb.wb[index]) [all...] |
amdgpu_ring.c | 276 dev_err(adev->dev, "(%d) ring rptr_offs wb alloc failed\n", r); 282 dev_err(adev->dev, "(%d) ring wptr_offs wb alloc failed\n", r); 288 dev_err(adev->dev, "(%d) ring fence_offs wb alloc failed\n", r); 295 "(%d) ring trail_fence_offs wb alloc failed\n", r); 299 adev->wb.gpu_addr + (ring->trail_fence_offs * 4); 300 ring->trail_fence_cpu_addr = &adev->wb.wb[ring->trail_fence_offs]; 304 dev_err(adev->dev, "(%d) ring cond_exec_polling wb alloc failed\n", r); 307 ring->cond_exe_gpu_addr = adev->wb.gpu_addr + (ring->cond_exe_offs * 4); 308 ring->cond_exe_cpu_addr = &adev->wb.wb[ring->cond_exe_offs] [all...] |
amdgpu_si_dma.c | 48 return ring->adev->wb.wb[ring->rptr_offs>>2]; 161 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); 220 gpu_addr = adev->wb.gpu_addr + (index * 4); 222 adev->wb.wb[index] = cpu_to_le32(tmp); 235 tmp = le32_to_cpu(adev->wb.wb[index]); 271 gpu_addr = adev->wb.gpu_addr + (index * 4); 273 adev->wb.wb[index] = cpu_to_le32(tmp) [all...] |
amdgpu_sdma_v5_0.c | 280 rptr = ((volatile u64 *)&ring->adev->wb.wb[ring->rptr_offs]); 301 wptr = ((volatile u64 *)&adev->wb.wb[ring->wptr_offs]); 343 adev->wb.wb[ring->wptr_offs] = lower_32_bits(ring->wptr << 2); 344 adev->wb.wb[ring->wptr_offs + 1] = upper_32_bits(ring->wptr << 2); 653 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); 666 /* set the wb address whether it's enabled or not * [all...] |
amdgpu_sdma_v4_0.c | 653 rptr = ((volatile u64 *)&ring->adev->wb.wb[ring->rptr_offs]); 673 wptr = READ_ONCE(*((volatile u64 *)&adev->wb.wb[ring->wptr_offs])); 699 volatile u64 *wb = (volatile u64 *)&adev->wb.wb[ring->wptr_offs]; local in function:sdma_v4_0_ring_set_wptr 709 WRITE_ONCE(*wb, (ring->wptr << 2)); 742 wptr = READ_ONCE(*((volatile u64 *)&adev->wb.wb[ring->wptr_offs])) 764 volatile u64 *wb = (volatile u64 *)&adev->wb.wb[ring->wptr_offs]; local in function:sdma_v4_0_page_ring_set_wptr [all...] |
amdgpu_cik_sdma.c | 172 rptr = ring->adev->wb.wb[ring->rptr_offs]; 481 /* set the wb address whether it's enabled or not */ 483 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); 485 ((adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); 631 gpu_addr = adev->wb.gpu_addr + (index * 4); 633 adev->wb.wb[index] = cpu_to_le32(tmp); 647 tmp = le32_to_cpu(adev->wb.wb[index]) [all...] |
amdgpu_jpeg_v2_5.c | 413 return adev->wb.wb[ring->wptr_offs]; 430 adev->wb.wb[ring->wptr_offs] = lower_32_bits(ring->wptr);
|
/src/sys/external/gpl2/dts/dist/arch/mips/boot/dts/qca/ |
ar9132.dtsi | 28 qca,ddr-wb-channel-interrupts = <2>, <3>, <4>, <5>; 29 qca,ddr-wb-channels = <&ddr_ctrl 3>, <&ddr_ctrl 2>, 56 #qca,ddr-wb-channel-cells = <1>;
|
ar9331.dtsi | 28 qca,ddr-wb-channel-interrupts = <2>, <3>; 29 qca,ddr-wb-channels = <&ddr_ctrl 3>, <&ddr_ctrl 2>; 59 #qca,ddr-wb-channel-cells = <1>;
|
/src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dml/ |
amdgpu_display_mode_vba.c | 474 dout->wb.wb_src_height; 476 dout->wb.wb_src_width; 478 dout->wb.wb_dst_width; 480 dout->wb.wb_dst_height; 482 dout->wb.wb_hratio; 484 dout->wb.wb_vratio; 486 (enum source_format_class) (dout->wb.wb_pixel_format); 488 dout->wb.wb_htaps_luma; 490 dout->wb.wb_vtaps_luma; 492 dout->wb.wb_htaps_luma [all...] |
/src/sys/kern/ |
vfs_wapbl.c | 2808 struct wapbl_blk *wb; local in function:wapbl_blkhash_get 2811 LIST_FOREACH(wb, wbh, wb_hash) { 2812 if (blk == wb->wb_blk) 2813 return wb; 2822 struct wapbl_blk *wb; local in function:wapbl_blkhash_ins 2824 wb = wapbl_blkhash_get(wr, blk); 2825 if (wb) { 2826 KASSERT(wb->wb_blk == blk); 2827 wb->wb_off = off; 2829 wb = wapbl_alloc(sizeof(*wb)) 2841 struct wapbl_blk *wb = wapbl_blkhash_get(wr, blk); local in function:wapbl_blkhash_rem 2857 struct wapbl_blk *wb; local in function:wapbl_blkhash_clear 3391 struct wapbl_blk *wb; local in function:wapbl_replay_write 3428 struct wapbl_blk *wb = wapbl_blkhash_get(wr, blk); local in function:wapbl_replay_can_read 3446 struct wapbl_blk *wb = wapbl_blkhash_get(wr, blk); local in function:wapbl_replay_read [all...] |
/src/sys/arch/amiga/amiga/ |
trap.c | 726 _write_back (u_int wb, u_int wb_sts, u_int wb_data, u_int wb_addr, struct vm_map *wb_map) 727 /* wb: writeback type: 1, 2, or 3 */ 738 printf("wb%d valid: %x %x %x\n",wb,wb_sts,wb_addr,wb_data); 756 if (wb == 3) { 793 printf("wb%d: probeva %x %x = %x\n", 794 wb, wb_addr + wb_extra_page, 801 printf("wb%d: page boundary crossed." 802 " Bringing in extra page.\n",wb); 817 printf("wb%d: extra page brought in okay.\n", wb) [all...] |
/src/sys/dev/pci/ixgbe/ |
ixgbe_netmap.c | 427 uint32_t staterr = le32toh(curr->wb.upper.status_error); 431 ring->slot[nm_i].len = le16toh(curr->wb.upper.length) - crclen; 477 curr->wb.upper.status_error = 0;
|