Lines Matching defs:rbo
146 struct radeon_bo *rbo;
155 rbo = container_of(bo, struct radeon_bo, tbo);
158 if (rbo->rdev->ring[radeon_copy_ring_index(rbo->rdev)].ready == false)
159 radeon_ttm_placement_from_domain(rbo, RADEON_GEM_DOMAIN_CPU);
160 else if (rbo->rdev->mc.visible_vram_size < rbo->rdev->mc.real_vram_size &&
161 bo->mem.start < (rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT)) {
162 unsigned fpfn = rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT;
170 radeon_ttm_placement_from_domain(rbo, RADEON_GEM_DOMAIN_VRAM |
172 rbo->placement.num_busy_placement = 0;
173 for (i = 0; i < rbo->placement.num_placement; i++) {
174 if (rbo->placements[i].flags & TTM_PL_FLAG_VRAM) {
175 if (rbo->placements[i].fpfn < fpfn)
176 rbo->placements[i].fpfn = fpfn;
178 rbo->placement.busy_placement =
179 &rbo->placements[i];
180 rbo->placement.num_busy_placement = 1;
184 radeon_ttm_placement_from_domain(rbo, RADEON_GEM_DOMAIN_GTT);
188 radeon_ttm_placement_from_domain(rbo, RADEON_GEM_DOMAIN_CPU);
190 *placement = rbo->placement;
195 struct radeon_bo *rbo = container_of(bo, struct radeon_bo, tbo);
201 return drm_vma_node_verify_access(&rbo->tbo.base.vma_node, drm_file);
203 return drm_vma_node_verify_access(&rbo->tbo.base.vma_node,
361 struct radeon_bo *rbo;
370 rbo = container_of(bo, struct radeon_bo, tbo);
371 if (WARN_ON_ONCE(rbo->pin_count > 0))