/src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/ |
amdgpu_fence.c | 110 if (drv->cpu_addr) 111 *drv->cpu_addr = cpu_to_le32(seq); 127 if (drv->cpu_addr) 128 seq = le32_to_cpu(*drv->cpu_addr); 409 ring->fence_drv.cpu_addr = &adev->wb.wb[ring->fence_offs]; 414 ring->fence_drv.cpu_addr = adev->uvd.inst[ring->me].cpu_addr + index; 426 ring->fence_drv.gpu_addr, ring->fence_drv.cpu_addr); 454 ring->fence_drv.cpu_addr = NULL; 734 le32_to_cpu(*(ring->fence_drv.cpu_addr + 2))) [all...] |
amdgpu_object.h | 243 u64 *gpu_addr, void **cpu_addr); 247 u64 *gpu_addr, void **cpu_addr); 250 struct amdgpu_bo **bo_ptr, void **cpu_addr); 252 void **cpu_addr);
|
amdgpu_uvd.h | 44 void *cpu_addr; member in struct:amdgpu_uvd_inst
|
amdgpu_vce.h | 40 void *cpu_addr; member in struct:amdgpu_vce
|
amdgpu_virt.c | 193 (void *)&adev->virt.mm_table.cpu_addr); 199 memset((void *)adev->virt.mm_table.cpu_addr, 0, PAGE_SIZE); 202 adev->virt.mm_table.cpu_addr); 218 (void *)&adev->virt.mm_table.cpu_addr);
|
amdgpu_object.c | 228 * @cpu_addr: optional CPU address mapping 241 u64 *gpu_addr, void **cpu_addr) 256 bp.flags = cpu_addr ? AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED 293 if (cpu_addr) { 294 r = amdgpu_bo_kmap(*bo_ptr, cpu_addr); 324 * @cpu_addr: optional CPU address mapping 336 u64 *gpu_addr, void **cpu_addr) 341 gpu_addr, cpu_addr); 360 * @cpu_addr: optional CPU address mapping 369 struct amdgpu_bo **bo_ptr, void **cpu_addr) [all...] |
amdgpu_uvd.c | 268 &adev->uvd.inst[j].gpu_addr, &adev->uvd.inst[j].cpu_addr); 318 (void **)&adev->uvd.inst[j].cpu_addr); 380 ptr = adev->uvd.inst[j].cpu_addr; 412 ptr = adev->uvd.inst[i].cpu_addr; 425 memcpy_toio(adev->uvd.inst[i].cpu_addr, adev->uvd.fw->data + offset,
|
amdgpu_vce.c | 195 &adev->vce.gpu_addr, &adev->vce.cpu_addr); 230 (void **)&adev->vce.cpu_addr); 299 void *cpu_addr; local in function:amdgpu_vce_resume 313 r = amdgpu_bo_kmap(adev->vce.vcpu_bo, &cpu_addr); 322 memcpy_toio(cpu_addr, adev->vce.fw->data + offset,
|
amdgpu_vcn.c | 171 &adev->vcn.inst[i].gpu_addr, &adev->vcn.inst[i].cpu_addr); 209 (void **)&adev->vcn.inst[j].cpu_addr); 237 ptr = adev->vcn.inst[i].cpu_addr; 261 ptr = adev->vcn.inst[i].cpu_addr; 274 memcpy_toio(adev->vcn.inst[i].cpu_addr, adev->vcn.fw->data + offset,
|
amdgpu_vce_v4_0.c | 164 struct mmsch_v1_0_init_header *header = (struct mmsch_v1_0_init_header *)table->cpu_addr; 220 uint32_t *init_table = adev->virt.mm_table.cpu_addr; 575 void *ptr = adev->vce.cpu_addr; 597 void *ptr = adev->vce.cpu_addr;
|
amdgpu_vcn.h | 176 void *cpu_addr; member in struct:amdgpu_vcn_inst
|
amdgpu_virt.h | 37 uint32_t *cpu_addr; member in struct:amdgpu_mm_table
|
amdgpu_ring.h | 77 volatile uint32_t *cpu_addr; member in struct:amdgpu_fence_driver
|
amdgpu_uvd_v7_0.c | 720 struct mmsch_v1_0_init_header *header = (struct mmsch_v1_0_init_header *)table->cpu_addr; 780 uint32_t *init_table = adev->virt.mm_table.cpu_addr;
|
amdgpu_vcn_v2_5.c | 1081 header = (struct mmsch_v1_1_init_header *)table->cpu_addr; 1139 uint32_t *init_table = adev->virt.mm_table.cpu_addr;
|
/src/sys/external/bsd/drm2/dist/drm/amd/display/dmub/inc/ |
dmub_srv.h | 134 * @cpu_addr: cpu virtual address for the region, NULL if invalid 139 void *cpu_addr; member in struct:dmub_fb 182 * @cpu_addr: base cpu address for the framebuffer 187 void *cpu_addr; member in struct:dmub_srv_fb_params
|
/src/sys/external/bsd/drm2/dist/drm/radeon/ |
radeon_fence.c | 79 if (drv->cpu_addr) { 80 *drv->cpu_addr = cpu_to_le32(seq); 102 if (drv->cpu_addr) { 103 seq = le32_to_cpu(*drv->cpu_addr); 895 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; 902 rdev->fence_drv[ring].cpu_addr = (uint32_t *)((uint8_t *)rdev->uvd.cpu_addr + index); 915 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; 921 ring, rdev->fence_drv[ring].gpu_addr, rdev->fence_drv[ring].cpu_addr); 940 rdev->fence_drv[ring].cpu_addr = NULL [all...] |
radeon_uvd.c | 218 r = radeon_bo_kmap(rdev->uvd.vcpu_bo, &rdev->uvd.cpu_addr); 296 memcpy(rdev->uvd.cpu_addr, rdev->uvd_fw->data, rdev->uvd_fw->size); 301 ptr = rdev->uvd.cpu_addr; 791 uint32_t *msg = rdev->uvd.cpu_addr + offs; 827 uint32_t *msg = rdev->uvd.cpu_addr + offs;
|
radeon_vce.c | 280 void *cpu_addr; local in function:radeon_vce_resume 292 r = radeon_bo_kmap(rdev->vce.vcpu_bo, &cpu_addr); 299 memset(cpu_addr, 0, radeon_bo_size(rdev->vce.vcpu_bo)); 301 r = vce_v1_0_load_fw(rdev, cpu_addr); 303 memcpy(cpu_addr, rdev->vce_fw->data, rdev->vce_fw->size);
|
radeon_uvd_v1_0.c | 150 WREG32(UVD_FW_START, *((uint32_t*)rdev->uvd.cpu_addr));
|
/src/sys/external/bsd/drm2/dist/drm/amd/display/dmub/src/ |
amdgpu_dmub_srv.c | 77 const uint8_t *base = (const uint8_t *)fb->cpu_addr; 282 cpu_base = (uint8_t *)params->cpu_addr; 289 out->fb[i].cpu_addr = cpu_base + reg->base; 399 dmub->fw_state = fw_state_fb->cpu_addr; 412 rb_params.base_address = mail_fb->cpu_addr;
|
/src/sys/external/bsd/drm2/dist/drm/amd/display/amdgpu_dm/ |
amdgpu_dm.h | 84 * @cpu_addr: MMIO cpu addr 89 void *cpu_addr; member in struct:dm_comressor_info
|
amdgpu_dm.c | 609 &compressor->gpu_addr, &compressor->cpu_addr); 819 memcpy(fb_info->fb[DMUB_WINDOW_0_INST_CONST].cpu_addr, fw_inst_const, 821 memcpy(fb_info->fb[DMUB_WINDOW_2_BSS_DATA].cpu_addr, fw_bss_data, 823 memcpy(fb_info->fb[DMUB_WINDOW_3_VBIOS].cpu_addr, adev->bios, 827 memset(fb_info->fb[DMUB_WINDOW_4_MAILBOX].cpu_addr, 0, 830 memset(fb_info->fb[DMUB_WINDOW_5_TRACEBUFF].cpu_addr, 0, 833 memset(fb_info->fb[DMUB_WINDOW_6_FW_STATE].cpu_addr, 0, 1281 fb_params.cpu_addr = adev->dm.dmub_bo_cpu_addr;
|
/src/sys/external/bsd/drm2/dist/drm/amd/powerplay/ |
amdgpu_smu.c | 541 memcpy(table->cpu_addr, table_data, table_size); 558 memcpy(table_data, table->cpu_addr, table_size); 1003 &tables[SMU_TABLE_PMSTATUSLOG].cpu_addr); 1032 &driver_table->cpu_addr); 1038 &tables[SMU_TABLE_PMSTATUSLOG].cpu_addr); 1056 &tables[SMU_TABLE_PMSTATUSLOG].cpu_addr); 1060 &driver_table->cpu_addr); 1269 &memory_pool->cpu_addr); 1288 &memory_pool->cpu_addr);
|
amdgpu_smu_v11_0.c | 672 if (memory_pool->size == 0 || memory_pool->cpu_addr == NULL) 675 address = (uintptr_t)memory_pool->cpu_addr;
|