/src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/ |
amdgpu_job.c | 44 if (amdgpu_ring_soft_recovery(ring, job->vmid, s_job->s_fence->parent)) { 117 f = job->base.s_fence ? &job->base.s_fence->finished : job->fence; 161 *f = dma_fence_get(&job->base.s_fence->finished); 208 &job->base.s_fence->finished, 227 finished = &job->base.s_fence->finished; 273 struct drm_sched_fence *s_fence = s_job->s_fence; local in function:amdgpu_job_stop_all_jobs_on_sched 275 dma_fence_signal(&s_fence->scheduled); 276 dma_fence_set_error(&s_fence->finished, -EHWPOISON) 285 struct drm_sched_fence *s_fence = s_job->s_fence; local in function:amdgpu_job_stop_all_jobs_on_sched [all...] |
amdgpu_job.c | 44 if (amdgpu_ring_soft_recovery(ring, job->vmid, s_job->s_fence->parent)) { 117 f = job->base.s_fence ? &job->base.s_fence->finished : job->fence; 161 *f = dma_fence_get(&job->base.s_fence->finished); 208 &job->base.s_fence->finished, 227 finished = &job->base.s_fence->finished; 273 struct drm_sched_fence *s_fence = s_job->s_fence; local in function:amdgpu_job_stop_all_jobs_on_sched 275 dma_fence_signal(&s_fence->scheduled); 276 dma_fence_set_error(&s_fence->finished, -EHWPOISON) 285 struct drm_sched_fence *s_fence = s_job->s_fence; local in function:amdgpu_job_stop_all_jobs_on_sched [all...] |
amdgpu_sync.c | 72 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); local in function:amdgpu_sync_same_dev 74 if (s_fence) { 77 ring = container_of(s_fence->sched, struct amdgpu_ring, sched); 93 struct drm_sched_fence *s_fence; local in function:amdgpu_sync_get_owner 99 s_fence = to_drm_sched_fence(f); 100 if (s_fence) 101 return s_fence->owner; 288 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); local in function:amdgpu_sync_peek_fence 296 if (ring && s_fence) { 300 if (s_fence->sched == &ring->sched) [all...] |
amdgpu_sync.c | 72 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); local in function:amdgpu_sync_same_dev 74 if (s_fence) { 77 ring = container_of(s_fence->sched, struct amdgpu_ring, sched); 93 struct drm_sched_fence *s_fence; local in function:amdgpu_sync_get_owner 99 s_fence = to_drm_sched_fence(f); 100 if (s_fence) 101 return s_fence->owner; 288 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); local in function:amdgpu_sync_peek_fence 296 if (ring && s_fence) { 300 if (s_fence->sched == &ring->sched) [all...] |
amdgpu_cs.c | 993 struct drm_sched_fence *s_fence; local in function:amdgpu_cs_process_fence_dep 996 s_fence = to_drm_sched_fence(fence); 997 fence = dma_fence_get(&s_fence->scheduled); 1250 p->fence = dma_fence_get(&job->base.s_fence->finished);
|
amdgpu_cs.c | 993 struct drm_sched_fence *s_fence; local in function:amdgpu_cs_process_fence_dep 996 s_fence = to_drm_sched_fence(fence); 997 fence = dma_fence_get(&s_fence->scheduled); 1250 p->fence = dma_fence_get(&job->base.s_fence->finished);
|
/src/sys/external/bsd/drm2/dist/drm/scheduler/ |
sched_entity.c | 243 drm_sched_fence_finished(job->s_fence); 244 WARN_ON(job->s_fence->parent); 262 struct drm_sched_fence *s_fence = job->s_fence; local in function:drm_sched_entity_kill_jobs 264 drm_sched_fence_scheduled(s_fence); 265 dma_fence_set_error(&s_fence->finished, -ESRCH); 408 struct drm_sched_fence *s_fence; local in function:drm_sched_entity_add_dependency_cb 421 s_fence = to_drm_sched_fence(fence); 422 if (s_fence && s_fence->sched == sched) [all...] |
sched_entity.c | 243 drm_sched_fence_finished(job->s_fence); 244 WARN_ON(job->s_fence->parent); 262 struct drm_sched_fence *s_fence = job->s_fence; local in function:drm_sched_entity_kill_jobs 264 drm_sched_fence_scheduled(s_fence); 265 dma_fence_set_error(&s_fence->finished, -ESRCH); 408 struct drm_sched_fence *s_fence; local in function:drm_sched_entity_add_dependency_cb 421 s_fence = to_drm_sched_fence(fence); 422 if (s_fence && s_fence->sched == sched) [all...] |
sched_main.c | 182 struct drm_sched_fence *s_fence; local in function:drm_sched_dependency_optimized 188 s_fence = to_drm_sched_fence(fence); 189 if (s_fence && s_fence->sched == sched) 365 if (bad->s_fence->scheduled.context == 422 if (s_job->s_fence->parent && 423 dma_fence_remove_callback(s_job->s_fence->parent, 441 dma_fence_wait(&s_job->s_fence->finished, false); 485 struct dma_fence *fence = s_job->s_fence->parent; 528 struct drm_sched_fence *s_fence = s_job->s_fence local in function:drm_sched_resubmit_jobs 669 struct drm_sched_fence *s_fence = s_job->s_fence; local in function:drm_sched_process_job 762 struct drm_sched_fence *s_fence; local in function:drm_sched_main [all...] |
sched_main.c | 182 struct drm_sched_fence *s_fence; local in function:drm_sched_dependency_optimized 188 s_fence = to_drm_sched_fence(fence); 189 if (s_fence && s_fence->sched == sched) 365 if (bad->s_fence->scheduled.context == 422 if (s_job->s_fence->parent && 423 dma_fence_remove_callback(s_job->s_fence->parent, 441 dma_fence_wait(&s_job->s_fence->finished, false); 485 struct dma_fence *fence = s_job->s_fence->parent; 528 struct drm_sched_fence *s_fence = s_job->s_fence local in function:drm_sched_resubmit_jobs 669 struct drm_sched_fence *s_fence = s_job->s_fence; local in function:drm_sched_process_job 762 struct drm_sched_fence *s_fence; local in function:drm_sched_main [all...] |
/src/sys/external/bsd/drm2/dist/include/drm/ |
gpu_scheduler.h | 181 * @s_fence: contains the fences for the scheduling of job. 190 * @cb: the callback for the parent fence in s_fence. 199 struct drm_sched_fence *s_fence; member in struct:drm_sched_job
|
gpu_scheduler.h | 181 * @s_fence: contains the fences for the scheduling of job. 190 * @cb: the callback for the parent fence in s_fence. 199 struct drm_sched_fence *s_fence; member in struct:drm_sched_job
|