Lines Matching refs:rdev

69  * @rdev: radeon_device pointer
76 int radeon_gart_table_ram_alloc(struct radeon_device *rdev)
82 error = bus_dmamem_alloc(rdev->ddev->dmat, rdev->gart.table_size,
83 PAGE_SIZE, 0, &rdev->gart.rg_table_seg, 1, &rsegs, BUS_DMA_WAITOK);
87 error = bus_dmamap_create(rdev->ddev->dmat, rdev->gart.table_size, 1,
88 rdev->gart.table_size, 0, BUS_DMA_WAITOK,
89 &rdev->gart.rg_table_map);
92 error = bus_dmamem_map(rdev->ddev->dmat, &rdev->gart.rg_table_seg, 1,
93 rdev->gart.table_size, &rdev->gart.ptr,
97 error = bus_dmamap_load(rdev->ddev->dmat, rdev->gart.rg_table_map,
98 rdev->gart.ptr, rdev->gart.table_size, NULL, BUS_DMA_WAITOK);
102 memset(rdev->gart.ptr, 0, rdev->gart.table_size);
103 bus_dmamap_sync(rdev->ddev->dmat, rdev->gart.rg_table_map, 0,
104 rdev->gart.table_size, BUS_DMASYNC_PREWRITE);
107 rdev->gart.table_addr = rdev->gart.rg_table_map->dm_segs[0].ds_addr;
111 bus_dmamap_unload(rdev->ddev->dmat, rdev->gart.rg_table_map);
112 fail3: bus_dmamem_unmap(rdev->ddev->dmat, rdev->gart.ptr,
113 rdev->gart.table_size);
114 fail2: bus_dmamap_destroy(rdev->ddev->dmat, rdev->gart.rg_table_map);
115 fail1: bus_dmamem_free(rdev->ddev->dmat, &rdev->gart.rg_table_seg, 1);
122 ptr = pci_alloc_consistent(rdev->pdev, rdev->gart.table_size,
123 &rdev->gart.table_addr);
128 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480 ||
129 rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) {
131 rdev->gart.table_size >> PAGE_SHIFT);
134 rdev->gart.ptr = ptr;
135 memset((void *)rdev->gart.ptr, 0, rdev->gart.table_size);
143 * @rdev: radeon_device pointer
149 void radeon_gart_table_ram_free(struct radeon_device *rdev)
151 if (rdev->gart.ptr == NULL) {
155 bus_dmamap_unload(rdev->ddev->dmat, rdev->gart.rg_table_map);
156 bus_dmamem_unmap(rdev->ddev->dmat, rdev->gart.ptr,
157 rdev->gart.table_size);
158 bus_dmamap_destroy(rdev->ddev->dmat, rdev->gart.rg_table_map);
159 bus_dmamem_free(rdev->ddev->dmat, &rdev->gart.rg_table_seg, 1);
162 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480 ||
163 rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) {
164 set_memory_wb((unsigned long)rdev->gart.ptr,
165 rdev->gart.table_size >> PAGE_SHIFT);
168 pci_free_consistent(rdev->pdev, rdev->gart.table_size,
169 (void *)rdev->gart.ptr,
170 rdev->gart.table_addr);
171 rdev->gart.ptr = NULL;
172 rdev->gart.table_addr = 0;
179 * @rdev: radeon_device pointer
186 int radeon_gart_table_vram_alloc(struct radeon_device *rdev)
190 if (rdev->gart.robj == NULL) {
191 r = radeon_bo_create(rdev, rdev->gart.table_size,
193 0, NULL, NULL, &rdev->gart.robj);
204 * @rdev: radeon_device pointer
211 int radeon_gart_table_vram_pin(struct radeon_device *rdev)
216 r = radeon_bo_reserve(rdev->gart.robj, false);
219 r = radeon_bo_pin(rdev->gart.robj,
222 radeon_bo_unreserve(rdev->gart.robj);
225 r = radeon_bo_kmap(rdev->gart.robj, &rdev->gart.ptr);
227 radeon_bo_unpin(rdev->gart.robj);
228 radeon_bo_unreserve(rdev->gart.robj);
229 rdev->gart.table_addr = gpu_addr;
237 for (i = 0; i < rdev->gart.num_gpu_pages; i++)
238 radeon_gart_set_page(rdev, i, rdev->gart.pages_entry[i]);
240 radeon_gart_tlb_flush(rdev);
249 * @rdev: radeon_device pointer
254 void radeon_gart_table_vram_unpin(struct radeon_device *rdev)
258 if (rdev->gart.robj == NULL) {
261 r = radeon_bo_reserve(rdev->gart.robj, false);
263 radeon_bo_kunmap(rdev->gart.robj);
264 radeon_bo_unpin(rdev->gart.robj);
265 radeon_bo_unreserve(rdev->gart.robj);
266 rdev->gart.ptr = NULL;
273 * @rdev: radeon_device pointer
279 void radeon_gart_table_vram_free(struct radeon_device *rdev)
281 if (rdev->gart.robj == NULL) {
284 radeon_bo_unref(&rdev->gart.robj);
289 radeon_gart_pre_update(struct radeon_device *rdev, unsigned gpu_pgstart,
293 if (rdev->gart.rg_table_map != NULL) {
295 rdev->gart.table_size / rdev->gart.num_gpu_pages;
297 bus_dmamap_sync(rdev->ddev->dmat, rdev->gart.rg_table_map,
304 radeon_gart_post_update(struct radeon_device *rdev, unsigned gpu_pgstart,
308 if (rdev->gart.rg_table_map != NULL) {
310 rdev->gart.table_size / rdev->gart.num_gpu_pages;
312 bus_dmamap_sync(rdev->ddev->dmat, rdev->gart.rg_table_map,
316 if (rdev->gart.ptr != NULL) {
318 radeon_gart_tlb_flush(rdev);
328 radeon_gart_unbind(struct radeon_device *rdev, unsigned gpu_start,
338 KASSERT(npages <= rdev->gart.num_cpu_pages);
339 KASSERT(gpu_npages <= rdev->gart.num_cpu_pages);
341 if (!rdev->gart.ready) {
346 radeon_gart_pre_update(rdev, gpu_pgstart, gpu_npages);
348 if (rdev->gart.pages[pgstart + pgno] == NULL)
350 rdev->gart.pages[pgstart + pgno] = NULL;
354 rdev->gart.pages_entry[t] = rdev->dummy_page.entry;
355 if (rdev->gart.ptr == NULL)
357 radeon_gart_set_page(rdev, t, rdev->dummy_page.entry);
360 radeon_gart_post_update(rdev, gpu_pgstart, gpu_npages);
366 * @rdev: radeon_device pointer
373 void radeon_gart_unbind(struct radeon_device *rdev, unsigned offset,
380 if (!rdev->gart.ready) {
387 if (rdev->gart.pages[p]) {
388 rdev->gart.pages[p] = NULL;
390 rdev->gart.pages_entry[t] = rdev->dummy_page.entry;
391 if (rdev->gart.ptr) {
392 radeon_gart_set_page(rdev, t,
393 rdev->dummy_page.entry);
398 if (rdev->gart.ptr) {
400 radeon_gart_tlb_flush(rdev);
407 radeon_gart_bind(struct radeon_device *rdev, unsigned gpu_start,
419 KASSERT(npages <= rdev->gart.num_cpu_pages);
420 KASSERT(gpu_npages <= rdev->gart.num_cpu_pages);
422 if (!rdev->gart.ready) {
427 radeon_gart_pre_update(rdev, gpu_pgstart, gpu_npages);
432 rdev->gart.pages[pgstart + pgno] = pages[pgno];
438 rdev->gart.pages_entry[i] = page_entry;
439 if (rdev->gart.ptr == NULL)
441 radeon_gart_set_page(rdev, i, page_entry);
444 radeon_gart_post_update(rdev, gpu_pgstart, gpu_npages);
452 * @rdev: radeon_device pointer
463 int radeon_gart_bind(struct radeon_device *rdev, unsigned offset,
472 if (!rdev->gart.ready) {
480 rdev->gart.pages[p] = pagelist[i];
484 rdev->gart.pages_entry[t] = page_entry;
485 if (rdev->gart.ptr) {
486 radeon_gart_set_page(rdev, t, page_entry);
491 if (rdev->gart.ptr) {
493 radeon_gart_tlb_flush(rdev);
502 * @rdev: radeon_device pointer
507 int radeon_gart_init(struct radeon_device *rdev)
511 if (rdev->gart.pages) {
519 r = radeon_dummy_page_init(rdev);
523 rdev->gart.num_cpu_pages = rdev->mc.gtt_size / PAGE_SIZE;
524 rdev->gart.num_gpu_pages = rdev->mc.gtt_size / RADEON_GPU_PAGE_SIZE;
526 rdev->gart.num_cpu_pages, rdev->gart.num_gpu_pages);
528 rdev->gart.pages = vzalloc(array_size(sizeof(void *),
529 rdev->gart.num_cpu_pages));
530 if (rdev->gart.pages == NULL) {
531 radeon_gart_fini(rdev);
534 rdev->gart.pages_entry = vmalloc(array_size(sizeof(uint64_t),
535 rdev->gart.num_gpu_pages));
536 if (rdev->gart.pages_entry == NULL) {
537 radeon_gart_fini(rdev);
541 for (i = 0; i < rdev->gart.num_gpu_pages; i++)
542 rdev->gart.pages_entry[i] = rdev->dummy_page.entry;
549 * @rdev: radeon_device pointer
553 void radeon_gart_fini(struct radeon_device *rdev)
555 if (rdev->gart.ready) {
557 radeon_gart_unbind(rdev, 0, rdev->gart.num_cpu_pages);
559 rdev->gart.ready = false;
560 vfree(rdev->gart.pages);
561 vfree(rdev->gart.pages_entry);
562 rdev->gart.pages = NULL;
563 rdev->gart.pages_entry = NULL;
565 radeon_dummy_page_fini(rdev);