/src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/ |
amdgpu_test.c | 92 void **gart_start, **gart_end; local in function:amdgpu_do_test_moves 124 for (gart_start = gtt_map, gart_end = gtt_map + size; 125 gart_start < gart_end; 126 gart_start++) 127 *gart_start = gart_start; 154 for (gart_start = gtt_map, gart_end = gtt_map + size, 157 gart_start++, vram_start++) { 158 if (*vram_start != gart_start) { 162 i, *vram_start, gart_start, [all...] |
amdgpu_gmc.c | 222 mc->gart_start = 0; 224 mc->gart_start = max_mc_address - mc->gart_size + 1; 226 mc->gart_start &= ~(four_gb - 1); 227 mc->gart_end = mc->gart_start + mc->gart_size - 1; 229 mc->gart_size >> 20, mc->gart_start, mc->gart_end); 257 if (mc->fb_start > mc->gart_start) { 263 size_af = (mc->gart_start & sixteen_gb_mask) -
|
amdgpu_gfxhub_v1_0.c | 64 (u32)(adev->gmc.gart_start >> 12)); 66 (u32)(adev->gmc.gart_start >> 44));
|
amdgpu_gfxhub_v2_0.c | 75 (u32)(adev->gmc.gart_start >> 12)); 77 (u32)(adev->gmc.gart_start >> 44));
|
amdgpu_mmhub_v2_0.c | 60 (u32)(adev->gmc.gart_start >> 12)); 62 (u32)(adev->gmc.gart_start >> 44));
|
amdgpu_gmc.h | 172 u64 gart_start; member in struct:amdgpu_gmc
|
amdgpu_mmhub_v1_0.c | 80 (u32)(adev->gmc.gart_start >> 12)); 82 (u32)(adev->gmc.gart_start >> 44));
|
amdgpu_mmhub_v9_4.c | 93 (u32)(adev->gmc.gart_start >> 12)); 97 (u32)(adev->gmc.gart_start >> 44));
|
amdgpu_gmc_v6_0.c | 530 WREG32(mmVM_CONTEXT0_PAGE_TABLE_START_ADDR, adev->gmc.gart_start >> 12);
|
amdgpu_ttm.c | 112 man->gpu_offset = adev->gmc.gart_start; 2192 *addr = adev->gmc.gart_start;
|
amdgpu_gmc_v7_0.c | 667 WREG32(mmVM_CONTEXT0_PAGE_TABLE_START_ADDR, adev->gmc.gart_start >> 12);
|
amdgpu_gmc_v8_0.c | 904 WREG32(mmVM_CONTEXT0_PAGE_TABLE_START_ADDR, adev->gmc.gart_start >> 12);
|
/src/sys/external/bsd/drm/dist/shared-core/ |
radeon_drv.h | 435 u64 gart_start = dev_priv->gart_vm_start; local in function:radeon_check_offset 436 u64 gart_end = gart_start + dev_priv->gart_size - 1; 439 (off >= gart_start && off <= gart_end));
|