/src/sys/external/bsd/drm2/dist/drm/i915/ |
i915_gem_fence_reg.h | 43 struct i915_ggtt *ggtt; member in struct:i915_fence_reg 59 struct i915_fence_reg *i915_reserve_fence(struct i915_ggtt *ggtt); 62 void i915_gem_restore_fences(struct i915_ggtt *ggtt); 69 void i915_ggtt_init_fences(struct i915_ggtt *ggtt);
|
intel_region_lmem.c | 20 struct i915_ggtt *ggtt = &i915->ggtt; local in function:init_fake_lmem_bar 30 ret = drm_mm_reserve_node(&ggtt->vm.mm, &mem->fake_mappable); 45 ggtt->vm.insert_page(&ggtt->vm, 118 GEM_BUG_ON(i915_ggtt_has_aperture(&i915->ggtt));
|
i915_gem_gtt.c | 87 struct i915_ggtt *ggtt = &dev_priv->ggtt; local in function:i915_gem_gtt_finish_pages 89 if (unlikely(ggtt->do_idle_maps)) { 91 if (intel_gt_retire_requests_timeout(ggtt->vm.gt, 138 GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); 236 GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); 283 * mmap and GPU (i.e. ggtt/aliasing_ppgtt but not full-ppgtt):
|
i915_gem_fence_reg.c | 70 return fence->ggtt->vm.i915; 75 return fence->ggtt->vm.gt->uncore; 236 struct i915_ggtt *ggtt = fence->ggtt; local in function:fence_update 279 list_move(&fence->link, &ggtt->fence_list); 303 list_move_tail(&fence->link, &ggtt->fence_list); 335 static struct i915_fence_reg *fence_find(struct i915_ggtt *ggtt) 339 list_for_each_entry(fence, &ggtt->fence_list, link) { 349 if (intel_has_pending_fb_unpin(ggtt->vm.i915)) 357 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm) local in function:__i915_vma_pin_fence 488 struct i915_ggtt *ggtt = fence->ggtt; local in function:i915_unreserve_fence [all...] |
i915_gem.c | 73 insert_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node, u32 size) 77 err = mutex_lock_interruptible(&ggtt->vm.mutex); 82 err = drm_mm_insert_node_in_range(&ggtt->vm.mm, node, 84 0, ggtt->mappable_end, 87 mutex_unlock(&ggtt->vm.mutex); 93 remove_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node) 95 mutex_lock(&ggtt->vm.mutex); 97 mutex_unlock(&ggtt->vm.mutex); 104 struct i915_ggtt *ggtt = &to_i915(dev)->ggtt; local in function:i915_gem_get_aperture_ioctl 424 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_gem_gtt_pread 611 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_gem_gtt_pwrite_fast 979 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_gem_object_ggtt_pin [all...] |
i915_gpu_error.c | 981 struct i915_ggtt *ggtt = gt->ggtt; local in function:i915_vma_coredump_create 982 const u64 slot = ggtt->error_capture.start; 1015 if (drm_mm_node_allocated(&ggtt->error_capture)) { 1020 ggtt->vm.insert_page(&ggtt->vm, dma, slot, 1024 s = io_mapping_map_wc(&ggtt->iomap, slot, PAGE_SIZE); 1091 struct i915_ggtt *ggtt = gt->_gt->ggtt; local in function:gt_record_fences 1096 for (i = 0; i < ggtt->num_fences; i++ 1535 struct i915_ggtt *ggtt = gt->_gt->ggtt; local in function:gt_capture_prepare 1542 struct i915_ggtt *ggtt = gt->_gt->ggtt; local in function:gt_capture_finish [all...] |
i915_drv.h | 1083 struct i915_ggtt ggtt; /* VM representing the global address space */ member in struct:drm_i915_private 1665 * The Gen7 cmdparser copies the scanned buffer to the ggtt for execution 1988 return i915->ggtt.bit_6_swizzle_x == I915_BIT_6_SWIZZLE_9_10_17 &&
|
/src/sys/external/bsd/drm2/dist/drm/i915/gem/ |
i915_gem_tiling.c | 191 struct i915_ggtt *ggtt = &to_i915(obj->base.dev)->ggtt; local in function:i915_gem_object_fence_prepare 198 mutex_lock(&ggtt->vm.mutex); 207 mutex_unlock(&ggtt->vm.mutex); 332 if (!dev_priv->ggtt.num_fences) 358 args->swizzle_mode = to_i915(dev)->ggtt.bit_6_swizzle_x; 360 args->swizzle_mode = to_i915(dev)->ggtt.bit_6_swizzle_y; 415 if (!dev_priv->ggtt.num_fences) 431 args->swizzle_mode = dev_priv->ggtt.bit_6_swizzle_x; 434 args->swizzle_mode = dev_priv->ggtt.bit_6_swizzle_y [all...] |
i915_gem_stolen.c | 77 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_adjust_stolen 78 struct intel_uncore *uncore = ggtt->vm.gt->uncore; 105 ggtt_total_entries(ggtt) * 4); 770 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_gem_object_create_stolen_for_preallocated 819 vma = i915_vma_instance(obj, &ggtt->vm, NULL); 830 mutex_lock(&ggtt->vm.mutex); 831 ret = i915_gem_gtt_reserve(&ggtt->vm, &vma->node, 836 mutex_unlock(&ggtt->vm.mutex) [all...] |
i915_gem_mman.c | 419 struct i915_ggtt *ggtt = &i915->ggtt; local in function:vm_fault_gtt 455 ret = intel_gt_reset_trylock(ggtt->vm.gt, &srcu); 486 /* The entire mappable GGTT is pinned? Unexpected! */ 515 paddr = ggtt->gmadr.start + vma->node.start 528 (ggtt->gmadr.start + vma->node.start) >> PAGE_SHIFT, 530 &ggtt->iomap); 538 mutex_lock(&i915->ggtt.vm.mutex); 540 list_add(&obj->userfault_link, &i915->ggtt.userfault_list); 541 mutex_unlock(&i915->ggtt.vm.mutex) [all...] |
i915_gem_execbuffer.c | 924 return &i915->ggtt; 959 struct i915_ggtt *ggtt = cache_to_ggtt(cache); local in function:reloc_cache_reset 961 intel_gt_flush_ggtt_writes(ggtt->vm.gt); 963 io_mapping_unmap_atomic(&ggtt->iomap, vaddr); 969 ggtt->vm.clear_range(&ggtt->vm, 972 mutex_lock(&ggtt->vm.mutex); 974 mutex_unlock(&ggtt->vm.mutex); 1020 struct i915_ggtt *ggtt = cache_to_ggtt(cache); local in function:reloc_iomap 1025 intel_gt_flush_ggtt_writes(ggtt->vm.gt) [all...] |
/src/sys/external/bsd/drm2/dist/drm/i915/selftests/ |
i915_gem.c | 48 struct i915_ggtt *ggtt = &i915->ggtt; local in function:trash_stolen 49 const u64 slot = ggtt->error_capture.start; 55 if (!i915_ggtt_has_aperture(ggtt)) 63 ggtt->vm.insert_page(&ggtt->vm, dma, slot, I915_CACHE_NONE, 0); 65 s = io_mapping_map_atomic_wc(&ggtt->iomap, slot); 73 ggtt->vm.clear_range(&ggtt->vm, slot, PAGE_SIZE); 133 i915_gem_restore_fences(&i915->ggtt); [all...] |
i915_gem_evict.c | 51 static int populate_ggtt(struct i915_ggtt *ggtt, struct list_head *objects) 60 obj = i915_gem_object_create_internal(ggtt->vm.i915, 77 pr_debug("Filled GGTT with %lu pages [%llu total]\n", 78 count, ggtt->vm.total / PAGE_SIZE); 104 if (list_empty(&ggtt->vm.bound_list)) { 105 pr_err("No objects on the GGTT inactive list!\n"); 112 static void unpin_ggtt(struct i915_ggtt *ggtt) 116 list_for_each_entry(vma, &ggtt->vm.bound_list, vm_link) 121 static void cleanup_objects(struct i915_ggtt *ggtt, struct list_head *list) 131 i915_gem_drain_freed_objects(ggtt->vm.i915) 137 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_evict_something 183 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_overcommit 220 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_evict_for_vma 271 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_evict_for_cache_color 362 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_evict_vm 402 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_evict_contexts [all...] |
i915_vma.c | 87 pr_err("VMA ggtt status [%d] does not match parent [%d]\n", 155 struct i915_ggtt *ggtt = arg; local in function:igt_vma_create 156 struct drm_i915_private *i915 = ggtt->vm.i915; 262 struct i915_ggtt *ggtt = arg; local in function:igt_vma_pin1 273 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)), 274 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)), 275 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)), 277 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)), 278 INVALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | ggtt->mappable_end), 279 VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)) 483 struct i915_ggtt *ggtt = arg; local in function:igt_vma_rotate_remap 705 struct i915_ggtt *ggtt = arg; local in function:igt_vma_partial 828 struct i915_ggtt *ggtt; local in function:i915_vma_mock_selftests [all...] |
i915_gem_gtt.c | 1086 struct i915_ggtt *ggtt = &i915->ggtt; local in function:exercise_ggtt 1093 list_sort(NULL, &ggtt->vm.mm.hole_stack, sort_holes); 1094 drm_mm_for_each_hole(node, &ggtt->vm.mm, hole_start, hole_end) { 1098 if (ggtt->vm.mm.color_adjust) 1099 ggtt->vm.mm.color_adjust(node, 0, 1104 err = func(&ggtt->vm, hole_start, hole_end, end_time); 1146 struct i915_ggtt *ggtt = &i915->ggtt; local in function:igt_ggtt_page 1153 if (!i915_ggtt_has_aperture(ggtt)) 1275 struct i915_ggtt *ggtt = arg; local in function:igt_mock_fill 1282 struct i915_ggtt *ggtt = arg; local in function:igt_mock_walk 1289 struct i915_ggtt *ggtt = arg; local in function:igt_mock_pot 1296 struct i915_ggtt *ggtt = arg; local in function:igt_mock_drunk 1303 struct i915_ggtt *ggtt = arg; local in function:igt_gtt_reserve 1476 struct i915_ggtt *ggtt = arg; local in function:igt_gtt_insert 1702 struct i915_ggtt *ggtt; local in function:i915_gem_gtt_mock_selftests [all...] |
/src/sys/external/bsd/drm2/i915drm/ |
intelfb.c | 178 struct i915_ggtt *const ggtt = &i915->ggtt; local in function:intelfb_drmfb_mmapfb 184 return bus_space_mmap(dev->bst, ggtt->gmadr.start,
|
/src/sys/external/bsd/drm2/dist/drm/i915/gt/ |
intel_gt_types.h | 36 struct i915_ggtt *ggtt; member in struct:intel_gt 97 * Default address space (either GGTT or ppGTT depending on arch).
|
gen6_ppgtt.c | 210 gen6_ggtt_invalidate(ppgtt->base.vm.gt->ggtt); 343 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm); local in function:pd_vma_bind 356 KASSERTMSG(gtt_nbytes <= ggtt->gsmsz - ggtt_offset_bytes, 363 (uint64_t)ggtt->gsmsz, 364 (uint64_t)(ggtt->gsmsz - ggtt_offset_bytes)); 365 ret = -bus_space_subregion(ggtt->gsmt, ggtt->gsmh, ggtt_offset_bytes, 368 DRM_ERROR("Unable to subregion the GGTT: %d\n", ret); 371 ppgtt->pd_bst = ggtt->gsmt; 374 ppgtt->pd_addr = (gen6_pte_t __iomem *)ggtt->gsm + ggtt_offset 414 struct i915_ggtt *ggtt = ppgtt->base.vm.gt->ggtt; local in function:pd_vma_create 499 struct i915_ggtt * const ggtt = gt->ggtt; local in function:gen6_ppgtt_create [all...] |
intel_ggtt.c | 51 static int ggtt_init_hw(struct i915_ggtt *ggtt) 53 struct drm_i915_private *i915 = ggtt->vm.i915; 55 i915_address_space_init(&ggtt->vm, VM_CLASS_GGTT); 57 ggtt->vm.is_ggtt = true; 59 /* Only VLV supports read-only GGTT mappings */ 60 ggtt->vm.has_read_only = IS_VALLEYVIEW(i915); 63 ggtt->vm.mm.color_adjust = i915_ggtt_color_adjust; 65 if (ggtt->mappable_end) { 67 if (!drm_io_mapping_init_wc(&i915->drm, &ggtt->iomap, 68 ggtt->gmadr.start, ggtt->mappable_end)) 223 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen8_ggtt_insert_page 244 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen8_ggtt_insert_entries 296 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen6_ggtt_insert_page 326 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen6_ggtt_insert_entries 377 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen8_ggtt_clear_range 498 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen6_ggtt_clear_range 992 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen6_gmch_remove 1430 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_gem_restore_gtt_mappings [all...] |
/src/sys/external/bsd/drm2/dist/drm/i915/display/ |
intel_fbdev.c | 188 struct i915_ggtt *ggtt = &dev_priv->ggtt; local in function:intelfb_create 227 /* Pin the GGTT vma for our access via info->screen_base. 245 __USE(ggtt); 291 info->apertures->ranges[0].base = ggtt->gmadr.start; 292 info->apertures->ranges[0].size = ggtt->mappable_end; 296 (unsigned long)(ggtt->gmadr.start + vma->node.start);
|
/src/sys/external/bsd/drm2/dist/drm/i915/gt/uc/ |
intel_uc_fw.c | 399 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt; local in function:uc_fw_ggtt_offset 400 struct drm_mm_node *node = &ggtt->uc_fw; 412 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt; local in function:uc_fw_bind_ggtt 417 .vm = &ggtt->vm, 421 GEM_BUG_ON(dummy.node.size > ggtt->uc_fw.size); 426 ggtt->vm.insert_entries(&ggtt->vm, &dummy, I915_CACHE_NONE, 0); 432 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt local in function:uc_fw_unbind_ggtt [all...] |