/src/sys/external/bsd/drm2/dist/drm/nouveau/ |
nouveau_mem.h | 22 struct nvif_vma vma[2]; member in struct:nouveau_mem
|
nouveau_vmm.c | 34 nouveau_vma_unmap(struct nouveau_vma *vma) 36 if (vma->mem) { 37 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); 38 vma->mem = NULL; 43 nouveau_vma_map(struct nouveau_vma *vma, struct nouveau_mem *mem) 45 struct nvif_vma tmp = { .addr = vma->addr }; 46 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); 49 vma->mem = mem; 56 struct nouveau_vma *vma; local in function:nouveau_vma_find 69 struct nouveau_vma *vma = *pvma; local in function:nouveau_vma_del 86 struct nouveau_vma *vma; local in function:nouveau_vma_new [all...] |
nouveau_display.h | 16 struct nouveau_vma *vma; member in struct:nouveau_framebuffer
|
/src/sys/external/bsd/drm2/dist/drm/i915/ |
i915_gem_fence_reg.h | 44 struct i915_vma *vma; member in struct:i915_fence_reg
|
i915_gem_evict.c | 51 * Not everything in the GGTT is tracked via vma (otherwise we 62 struct i915_vma *vma, 66 if (i915_vma_is_pinned(vma)) 69 list_add(&vma->evict_link, unwind); 70 return drm_mm_scan_add_block(scan, &vma->node); 87 * This function is used by the object/vma binding code. 105 struct i915_vma *vma, *next; local in function:i915_gem_evict_something 139 list_for_each_entry_safe(vma, next, &vm->bound_list, vm_link) { 155 if (i915_vma_is_active(vma)) { 156 if (vma == active) 265 struct i915_vma *vma, *next; local in function:i915_gem_evict_for_node 386 struct i915_vma *vma, *vn; local in function:i915_gem_evict_vm [all...] |
/src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/engine/fifo/ |
changf100.h | 19 struct nvkm_vma *vma; member in struct:gf100_fifo_chan::__anonfd9c78110108
|
changk104.h | 23 struct nvkm_vma *vma; member in struct:gk104_fifo_chan::__anon0946543a0108
|
/src/sys/external/bsd/drm2/dist/drm/i915/display/ |
intel_dsb.h | 30 struct i915_vma *vma; member in struct:intel_dsb
|
intel_frontbuffer.c | 230 struct i915_vma *vma; local in function:frontbuffer_release 232 spin_lock(&obj->vma.lock); 233 for_each_ggtt_vma(vma, obj) 234 vma->display_alignment = I915_GTT_MIN_ALIGNMENT; 235 spin_unlock(&obj->vma.lock);
|
intel_dsb.c | 111 struct i915_vma *vma; local in function:intel_dsb_get 129 vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, 0); 130 if (IS_ERR(vma)) { 131 DRM_ERROR("Vma creation failed\n"); 136 buf = i915_gem_object_pin_map(vma->obj, I915_MAP_WC); 143 dsb->vma = vma; 164 * unpinning and releasing the VMA object associated with it. 179 i915_vma_unpin_and_release(&dsb->vma, I915_VMA_RELEASE_MAP); 323 I915_WRITE(DSB_HEAD(pipe, dsb->id), i915_ggtt_offset(dsb->vma)); [all...] |
/src/sys/external/bsd/drm2/dist/drm/i915/gt/ |
intel_renderstate.h | 55 struct i915_vma *vma; member in struct:intel_renderstate
|
intel_ring_types.h | 29 struct i915_vma *vma; member in struct:intel_ring
|
gen6_ppgtt.h | 17 struct i915_vma *vma; member in struct:gen6_ppgtt
|
intel_ring.c | 31 struct i915_vma *vma = ring->vma; local in function:intel_ring_pin 42 flags |= PIN_OFFSET_BIAS | i915_ggtt_pin_bias(vma); 44 if (vma->obj->stolen) 49 ret = i915_vma_pin(vma, 0, 0, flags); 53 if (i915_vma_is_map_and_fenceable(vma)) 54 addr = (void __force *)i915_vma_pin_iomap(vma); 56 addr = i915_gem_object_pin_map(vma->obj, 57 i915_coherent_map_type(vma->vm->i915)); 63 i915_vma_make_unshrinkable(vma); 89 struct i915_vma *vma = ring->vma; local in function:intel_ring_unpin 109 struct i915_vma *vma; local in function:create_ring_vma 142 struct i915_vma *vma; local in function:intel_engine_create_ring [all...] |
/src/sys/external/bsd/drm2/dist/drm/i915/gt/uc/ |
intel_guc_ct.h | 47 struct i915_vma *vma; member in struct:intel_guc_ct
|
intel_huc.c | 66 struct i915_vma *vma; local in function:intel_huc_rsa_data_create 80 * vma within the accessible range that only contains the rsa 86 vma = intel_guc_allocate_vma(guc, PAGE_SIZE); 87 if (IS_ERR(vma)) 88 return PTR_ERR(vma); 90 vaddr = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); 92 i915_vma_unpin_and_release(&vma, 0); 96 copied = intel_uc_fw_copy_rsa(&huc->fw, vaddr, vma->size); 99 i915_gem_object_unpin_map(vma->obj); 101 huc->rsa_data = vma; [all...] |
intel_guc_log.h | 49 struct i915_vma *vma; member in struct:intel_guc_log
|
/src/sys/external/bsd/drm2/dist/drm/i915/gem/ |
i915_gem_tiling.c | 166 static bool i915_vma_fence_prepare(struct i915_vma *vma, 169 struct drm_i915_private *i915 = vma->vm->i915; 172 if (!i915_vma_is_map_and_fenceable(vma)) 175 size = i915_gem_fence_size(i915, vma->size, tiling_mode, stride); 176 if (vma->node.size < size) 179 alignment = i915_gem_fence_alignment(i915, vma->size, tiling_mode, stride); 180 if (!IS_ALIGNED(vma->node.start, alignment)) 192 struct i915_vma *vma; local in function:i915_gem_object_fence_prepare 199 for_each_ggtt_vma(vma, obj) { 200 if (i915_vma_fence_prepare(vma, tiling_mode, stride) 217 struct i915_vma *vma; local in function:i915_gem_object_set_tiling [all...] |
i915_gem_domain.c | 158 struct i915_vma *vma; local in function:i915_gem_object_set_to_gtt_domain 164 spin_lock(&obj->vma.lock); 165 for_each_ggtt_vma(vma, obj) 166 if (i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND)) 167 i915_vma_set_ggtt_write(vma); 168 spin_unlock(&obj->vma.lock); 176 * Changes the cache-level of an object across all VMA. 217 /* The cache-level will be applied when each vma is rebound. */ 321 struct i915_vma *vma; local in function:i915_gem_object_pin_to_display_plane 352 vma = ERR_PTR(-ENOSPC) 374 struct i915_vma *vma; local in function:i915_gem_object_bump_inactive_ggtt [all...] |
i915_gem_object_blt.c | 21 struct i915_vma *vma, 38 count = div_u64(round_up(vma->size, block_size), block_size); 53 rem = vma->size; 54 offset = vma->node.start; 110 int intel_emit_vma_mark_active(struct i915_vma *vma, struct i915_request *rq) 114 i915_vma_lock(vma); 115 err = i915_request_await_object(rq, vma->obj, false); 117 err = i915_vma_move_to_active(vma, rq, 0); 118 i915_vma_unlock(vma); 122 return intel_engine_pool_mark_active(vma->private, rq) 138 struct i915_vma *vma; local in function:i915_gem_object_fill_blt 325 struct i915_vma *vma[2], *batch; local in function:i915_gem_object_copy_blt [all...] |
i915_gem_client_blt.c | 19 struct i915_vma *vma; member in struct:i915_sleeve 25 static int vma_set_pages(struct i915_vma *vma) 27 struct i915_sleeve *sleeve = vma->private; 29 vma->pages = sleeve->pages; 30 vma->page_sizes = sleeve->page_sizes; 35 static void vma_clear_pages(struct i915_vma *vma) 37 GEM_BUG_ON(!vma->pages); 38 vma->pages = NULL; 41 static int vma_bind(struct i915_vma *vma, 45 return vma->vm->vma_ops.bind_vma(vma, cache_level, flags) 66 struct i915_vma *vma; local in function:create_sleeve 164 struct i915_vma *vma = w->sleeve->vma; local in function:clear_pages_worker [all...] |
i915_gem_object.c | 66 spin_lock_init(&obj->vma.lock); 67 INIT_LIST_HEAD(&obj->vma.list); 150 struct i915_vma *vma; local in function:i915_gem_close_object 154 * vma, in the same fd namespace, by virtue of flink/open. 158 vma = radix_tree_delete(&ctx->handles_vma, lut->handle); 159 if (vma) { 160 GEM_BUG_ON(vma->obj != obj); 161 GEM_BUG_ON(!atomic_read(&vma->open_count)); 162 if (atomic_dec_and_test(&vma->open_count) && 163 !i915_vma_is_ggtt(vma)) 206 struct i915_vma *vma; local in function:__i915_gem_free_objects 339 struct i915_vma *vma; local in function:i915_gem_object_flush_write_domain [all...] |
/src/sys/external/bsd/drm2/dist/drm/i915/selftests/ |
igt_spinner.c | 77 static int move_to_active(struct i915_vma *vma, 83 i915_vma_lock(vma); 84 err = i915_request_await_object(rq, vma->obj, 87 err = i915_vma_move_to_active(vma, rq, flags); 88 i915_vma_unlock(vma); 100 struct i915_vma *hws, *vma; local in function:igt_spinner_create_request 110 vma = i915_vma_instance(spin->obj, ce->vm, NULL); 111 if (IS_ERR(vma)) 112 return ERR_CAST(vma); 118 err = i915_vma_pin(vma, 0, 0, PIN_USER) [all...] |
/src/games/sail/ |
pl_5.c | 60 int vma, dir; local in function:acceptmove 76 vma = ma; 94 vma = min(ma, maxmove(ms, dir, 0)); 95 if ((ta < 0 && moved) || (vma < 0 && moved)) 100 vma--; 102 if ((ta < 0 && moved) || (vma < 0 && moved)) 121 vma -= *p - '0'; 122 if ((ta < 0 && moved) || (vma < 0 && moved)) 131 if ((ta < 0 && moved) || (vma < 0 && moved)
|
/src/sys/external/bsd/drm2/i915drm/ |
intelfb.c | 179 struct i915_vma *const vma = fbdev->vma; local in function:intelfb_drmfb_mmapfb 182 KASSERT(offset < vma->node.size); 185 vma->node.start + offset, prot, BUS_SPACE_MAP_PREFETCHABLE);
|