/src/sys/external/bsd/drm2/dist/drm/i915/selftests/ |
mock_gtt.c | 108 void mock_init_ggtt(struct drm_i915_private *i915, struct i915_ggtt *ggtt) 110 memset(ggtt, 0, sizeof(*ggtt)); 112 ggtt->vm.gt = &i915->gt; 113 ggtt->vm.i915 = i915; 114 ggtt->vm.is_ggtt = true; 116 ggtt->gmadr = (struct resource) DEFINE_RES_MEM(0, 2048 * PAGE_SIZE); 117 ggtt->mappable_end = resource_size(&ggtt->gmadr); 118 ggtt->vm.total = 4096 * PAGE_SIZE [all...] |
mock_gtt.h | 33 void mock_init_ggtt(struct drm_i915_private *i915, struct i915_ggtt *ggtt); 34 void mock_fini_ggtt(struct i915_ggtt *ggtt);
|
i915_gem_evict.c | 51 static int populate_ggtt(struct i915_ggtt *ggtt, struct list_head *objects) 60 obj = i915_gem_object_create_internal(ggtt->vm.i915, 77 pr_debug("Filled GGTT with %lu pages [%llu total]\n", 78 count, ggtt->vm.total / PAGE_SIZE); 104 if (list_empty(&ggtt->vm.bound_list)) { 105 pr_err("No objects on the GGTT inactive list!\n"); 112 static void unpin_ggtt(struct i915_ggtt *ggtt) 116 list_for_each_entry(vma, &ggtt->vm.bound_list, vm_link) 121 static void cleanup_objects(struct i915_ggtt *ggtt, struct list_head *list) 131 i915_gem_drain_freed_objects(ggtt->vm.i915) 137 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_evict_something 183 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_overcommit 220 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_evict_for_vma 271 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_evict_for_cache_color 362 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_evict_vm 402 struct i915_ggtt *ggtt = gt->ggtt; local in function:igt_evict_contexts [all...] |
i915_gem.c | 48 struct i915_ggtt *ggtt = &i915->ggtt; local in function:trash_stolen 49 const u64 slot = ggtt->error_capture.start; 55 if (!i915_ggtt_has_aperture(ggtt)) 63 ggtt->vm.insert_page(&ggtt->vm, dma, slot, I915_CACHE_NONE, 0); 65 s = io_mapping_map_atomic_wc(&ggtt->iomap, slot); 73 ggtt->vm.clear_range(&ggtt->vm, slot, PAGE_SIZE); 133 i915_gem_restore_fences(&i915->ggtt); [all...] |
i915_gem_gtt.c | 1086 struct i915_ggtt *ggtt = &i915->ggtt; local in function:exercise_ggtt 1093 list_sort(NULL, &ggtt->vm.mm.hole_stack, sort_holes); 1094 drm_mm_for_each_hole(node, &ggtt->vm.mm, hole_start, hole_end) { 1098 if (ggtt->vm.mm.color_adjust) 1099 ggtt->vm.mm.color_adjust(node, 0, 1104 err = func(&ggtt->vm, hole_start, hole_end, end_time); 1146 struct i915_ggtt *ggtt = &i915->ggtt; local in function:igt_ggtt_page 1153 if (!i915_ggtt_has_aperture(ggtt)) 1275 struct i915_ggtt *ggtt = arg; local in function:igt_mock_fill 1282 struct i915_ggtt *ggtt = arg; local in function:igt_mock_walk 1289 struct i915_ggtt *ggtt = arg; local in function:igt_mock_pot 1296 struct i915_ggtt *ggtt = arg; local in function:igt_mock_drunk 1303 struct i915_ggtt *ggtt = arg; local in function:igt_gtt_reserve 1476 struct i915_ggtt *ggtt = arg; local in function:igt_gtt_insert 1702 struct i915_ggtt *ggtt; local in function:i915_gem_gtt_mock_selftests [all...] |
i915_vma.c | 87 pr_err("VMA ggtt status [%d] does not match parent [%d]\n", 155 struct i915_ggtt *ggtt = arg; local in function:igt_vma_create 156 struct drm_i915_private *i915 = ggtt->vm.i915; 262 struct i915_ggtt *ggtt = arg; local in function:igt_vma_pin1 273 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)), 274 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)), 275 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)), 277 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)), 278 INVALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | ggtt->mappable_end), 279 VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)) 483 struct i915_ggtt *ggtt = arg; local in function:igt_vma_rotate_remap 705 struct i915_ggtt *ggtt = arg; local in function:igt_vma_partial 828 struct i915_ggtt *ggtt; local in function:i915_vma_mock_selftests [all...] |
mock_gem_device.c | 71 mock_fini_ggtt(&i915->ggtt); 182 mock_init_ggtt(i915, &i915->ggtt); 183 i915->gt.vm = i915_vm_get(&i915->ggtt.vm);
|
/src/sys/external/bsd/drm2/dist/drm/i915/gt/ |
intel_ggtt.c | 51 static int ggtt_init_hw(struct i915_ggtt *ggtt) 53 struct drm_i915_private *i915 = ggtt->vm.i915; 55 i915_address_space_init(&ggtt->vm, VM_CLASS_GGTT); 57 ggtt->vm.is_ggtt = true; 59 /* Only VLV supports read-only GGTT mappings */ 60 ggtt->vm.has_read_only = IS_VALLEYVIEW(i915); 63 ggtt->vm.mm.color_adjust = i915_ggtt_color_adjust; 65 if (ggtt->mappable_end) { 67 if (!drm_io_mapping_init_wc(&i915->drm, &ggtt->iomap, 68 ggtt->gmadr.start, ggtt->mappable_end)) 223 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen8_ggtt_insert_page 244 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen8_ggtt_insert_entries 296 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen6_ggtt_insert_page 326 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen6_ggtt_insert_entries 377 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen8_ggtt_clear_range 498 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen6_ggtt_clear_range 992 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); local in function:gen6_gmch_remove 1430 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_gem_restore_gtt_mappings [all...] |
gen6_ppgtt.c | 210 gen6_ggtt_invalidate(ppgtt->base.vm.gt->ggtt); 343 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm); local in function:pd_vma_bind 356 KASSERTMSG(gtt_nbytes <= ggtt->gsmsz - ggtt_offset_bytes, 363 (uint64_t)ggtt->gsmsz, 364 (uint64_t)(ggtt->gsmsz - ggtt_offset_bytes)); 365 ret = -bus_space_subregion(ggtt->gsmt, ggtt->gsmh, ggtt_offset_bytes, 368 DRM_ERROR("Unable to subregion the GGTT: %d\n", ret); 371 ppgtt->pd_bst = ggtt->gsmt; 374 ppgtt->pd_addr = (gen6_pte_t __iomem *)ggtt->gsm + ggtt_offset 414 struct i915_ggtt *ggtt = ppgtt->base.vm.gt->ggtt; local in function:pd_vma_create 499 struct i915_ggtt * const ggtt = gt->ggtt; local in function:gen6_ppgtt_create [all...] |
intel_gt.h | 39 void intel_gt_init_hw_early(struct intel_gt *gt, struct i915_ggtt *ggtt);
|
intel_gt_types.h | 36 struct i915_ggtt *ggtt; member in struct:intel_gt 97 * Default address space (either GGTT or ppGTT depending on arch).
|
/src/sys/external/bsd/drm2/dist/drm/i915/ |
i915_gem_fence_reg.h | 43 struct i915_ggtt *ggtt; member in struct:i915_fence_reg 59 struct i915_fence_reg *i915_reserve_fence(struct i915_ggtt *ggtt); 62 void i915_gem_restore_fences(struct i915_ggtt *ggtt); 69 void i915_ggtt_init_fences(struct i915_ggtt *ggtt);
|
i915_vgpu.h | 48 int intel_vgt_balloon(struct i915_ggtt *ggtt); 49 void intel_vgt_deballoon(struct i915_ggtt *ggtt);
|
i915_vgpu.c | 167 static void vgt_deballoon_space(struct i915_ggtt *ggtt, 178 ggtt->vm.reserved -= node->size; 184 * @ggtt: the global GGTT from which we reserved earlier 189 void intel_vgt_deballoon(struct i915_ggtt *ggtt) 193 if (!intel_vgpu_active(ggtt->vm.i915)) 199 vgt_deballoon_space(ggtt, &bl_info.space[i]); 202 static int vgt_balloon_space(struct i915_ggtt *ggtt, 214 ret = i915_gem_gtt_reserve(&ggtt->vm, node, 218 ggtt->vm.reserved += size [all...] |
i915_gem_fence_reg.c | 70 return fence->ggtt->vm.i915; 75 return fence->ggtt->vm.gt->uncore; 236 struct i915_ggtt *ggtt = fence->ggtt; local in function:fence_update 279 list_move(&fence->link, &ggtt->fence_list); 303 list_move_tail(&fence->link, &ggtt->fence_list); 335 static struct i915_fence_reg *fence_find(struct i915_ggtt *ggtt) 339 list_for_each_entry(fence, &ggtt->fence_list, link) { 349 if (intel_has_pending_fb_unpin(ggtt->vm.i915)) 357 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm) local in function:__i915_vma_pin_fence 488 struct i915_ggtt *ggtt = fence->ggtt; local in function:i915_unreserve_fence [all...] |
intel_region_lmem.c | 20 struct i915_ggtt *ggtt = &i915->ggtt; local in function:init_fake_lmem_bar 30 ret = drm_mm_reserve_node(&ggtt->vm.mm, &mem->fake_mappable); 45 ggtt->vm.insert_page(&ggtt->vm, 118 GEM_BUG_ON(i915_ggtt_has_aperture(&i915->ggtt));
|
i915_gem.c | 73 insert_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node, u32 size) 77 err = mutex_lock_interruptible(&ggtt->vm.mutex); 82 err = drm_mm_insert_node_in_range(&ggtt->vm.mm, node, 84 0, ggtt->mappable_end, 87 mutex_unlock(&ggtt->vm.mutex); 93 remove_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node) 95 mutex_lock(&ggtt->vm.mutex); 97 mutex_unlock(&ggtt->vm.mutex); 104 struct i915_ggtt *ggtt = &to_i915(dev)->ggtt; local in function:i915_gem_get_aperture_ioctl 424 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_gem_gtt_pread 611 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_gem_gtt_pwrite_fast 979 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_gem_object_ggtt_pin [all...] |
i915_gem_gtt.c | 87 struct i915_ggtt *ggtt = &dev_priv->ggtt; local in function:i915_gem_gtt_finish_pages 89 if (unlikely(ggtt->do_idle_maps)) { 91 if (intel_gt_retire_requests_timeout(ggtt->vm.gt, 138 GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); 236 GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); 283 * mmap and GPU (i.e. ggtt/aliasing_ppgtt but not full-ppgtt):
|
/src/sys/external/bsd/drm2/dist/drm/i915/gem/ |
i915_gem_tiling.c | 191 struct i915_ggtt *ggtt = &to_i915(obj->base.dev)->ggtt; local in function:i915_gem_object_fence_prepare 198 mutex_lock(&ggtt->vm.mutex); 207 mutex_unlock(&ggtt->vm.mutex); 332 if (!dev_priv->ggtt.num_fences) 358 args->swizzle_mode = to_i915(dev)->ggtt.bit_6_swizzle_x; 360 args->swizzle_mode = to_i915(dev)->ggtt.bit_6_swizzle_y; 415 if (!dev_priv->ggtt.num_fences) 431 args->swizzle_mode = dev_priv->ggtt.bit_6_swizzle_x; 434 args->swizzle_mode = dev_priv->ggtt.bit_6_swizzle_y [all...] |
i915_gem_stolen.c | 77 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_adjust_stolen 78 struct intel_uncore *uncore = ggtt->vm.gt->uncore; 105 ggtt_total_entries(ggtt) * 4); 770 struct i915_ggtt *ggtt = &i915->ggtt; local in function:i915_gem_object_create_stolen_for_preallocated 819 vma = i915_vma_instance(obj, &ggtt->vm, NULL); 830 mutex_lock(&ggtt->vm.mutex); 831 ret = i915_gem_gtt_reserve(&ggtt->vm, &vma->node, 836 mutex_unlock(&ggtt->vm.mutex) [all...] |
i915_gem_pm.c | 25 intel_wakeref_auto(&i915->ggtt.userfault_wakeref, 0);
|
/src/sys/external/bsd/drm2/dist/drm/i915/gvt/ |
aperture_gm.c | 69 mutex_lock(&dev_priv->ggtt.vm.mutex); 71 ret = i915_gem_gtt_insert(&dev_priv->ggtt.vm, node, 76 mutex_unlock(&dev_priv->ggtt.vm.mutex); 106 mutex_lock(&dev_priv->ggtt.vm.mutex); 108 mutex_unlock(&dev_priv->ggtt.vm.mutex); 116 mutex_lock(&dev_priv->ggtt.vm.mutex); 119 mutex_unlock(&dev_priv->ggtt.vm.mutex); 180 mutex_lock(&dev_priv->ggtt.vm.mutex); 187 mutex_unlock(&dev_priv->ggtt.vm.mutex); 203 mutex_lock(&dev_priv->ggtt.vm.mutex) [all...] |
/src/sys/external/bsd/drm2/dist/drm/i915/gt/uc/ |
intel_uc_fw.c | 399 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt; local in function:uc_fw_ggtt_offset 400 struct drm_mm_node *node = &ggtt->uc_fw; 412 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt; local in function:uc_fw_bind_ggtt 417 .vm = &ggtt->vm, 421 GEM_BUG_ON(dummy.node.size > ggtt->uc_fw.size); 426 ggtt->vm.insert_entries(&ggtt->vm, &dummy, I915_CACHE_NONE, 0); 432 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt local in function:uc_fw_unbind_ggtt [all...] |
/src/sys/external/bsd/drm2/i915drm/ |
intelfb.c | 178 struct i915_ggtt *const ggtt = &i915->ggtt; local in function:intelfb_drmfb_mmapfb 184 return bus_space_mmap(dev->bst, ggtt->gmadr.start,
|
/src/sys/external/bsd/drm2/dist/drm/i915/gem/selftests/ |
i915_gem_object.c | 51 i915->ggtt.vm.total + PAGE_SIZE);
|