Lines Matching defs:tbo
64 u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT;
82 static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo)
86 bo = container_of(tbo, struct radeon_bo, tbo);
88 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1);
95 if (bo->tbo.base.import_attach)
96 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg);
97 drm_gem_object_release(&bo->tbo.base);
219 drm_gem_private_object_init(rdev->ddev, &bo->tbo.base, size);
269 r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type,
294 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap);
320 ttm_bo_get(&bo->tbo);
326 struct ttm_buffer_object *tbo;
330 tbo = &((*bo)->tbo);
331 ttm_bo_put(tbo);
341 if (radeon_ttm_tt_has_userptr(bo->tbo.ttm))
381 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
417 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
419 if (bo->tbo.mem.mem_type == TTM_PL_VRAM)
452 &bo->tbo.base, bo, (unsigned long)bo->tbo.base.size,
453 *((unsigned long *)&bo->tbo.base.refcount));
458 drm_gem_object_put_unlocked(&bo->tbo.base);
571 radeon_mem_type_to_domain(bo->tbo.mem.mem_type);
594 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
628 dma_resv_assert_held(bo->tbo.base.resv);
660 ttm_bo_unmap_virtual(&old_object->tbo);
670 bo->tbo.mem.start << PAGE_SHIFT,
671 bo->tbo.num_pages << PAGE_SHIFT);
754 dma_resv_assert_held(bo->tbo.base.resv);
766 dma_resv_assert_held(bo->tbo.base.resv);
776 if (bo->tbo.mem.mem_type != TTM_PL_VRAM) {
800 rbo = container_of(bo, struct radeon_bo, tbo);
822 rbo = container_of(bo, struct radeon_bo, tbo);
866 r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL);
870 *mem_type = bo->tbo.mem.mem_type;
872 r = ttm_bo_wait(&bo->tbo, true, no_wait);
873 ttm_bo_unreserve(&bo->tbo);
888 struct dma_resv *resv = bo->tbo.base.resv;