HomeSort by: relevance | last modified time | path
    Searched refs:idle_work (Results 1 - 14 of 14) sorted by relevancy

  /src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/
amdgpu_jpeg.c 44 INIT_DELAYED_WORK(&adev->jpeg.idle_work, amdgpu_jpeg_idle_work_handler);
53 cancel_delayed_work_sync(&adev->jpeg.idle_work);
67 cancel_delayed_work_sync(&adev->jpeg.idle_work);
80 container_of(work, struct amdgpu_device, jpeg.idle_work.work);
95 schedule_delayed_work(&adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT);
101 bool set_clocks = !cancel_delayed_work_sync(&adev->jpeg.idle_work);
110 schedule_delayed_work(&ring->adev->jpeg.idle_work, JPEG_IDLE_TIMEOUT);
amdgpu_jpeg.h 51 struct delayed_work idle_work; member in struct:amdgpu_jpeg
amdgpu_uvd.h 68 struct delayed_work idle_work; member in struct:amdgpu_uvd
amdgpu_vce.h 47 struct delayed_work idle_work; member in struct:amdgpu_vce
amdgpu_uvd.c 139 INIT_DELAYED_WORK(&adev->uvd.idle_work, amdgpu_uvd_idle_work_handler);
308 cancel_delayed_work_sync(&adev->uvd.idle_work);
361 cancel_delayed_work_sync(&adev->uvd.idle_work);
1200 container_of(work, struct amdgpu_device, uvd.idle_work.work);
1224 schedule_delayed_work(&adev->uvd.idle_work, UVD_IDLE_TIMEOUT);
1236 set_clocks = !cancel_delayed_work_sync(&adev->uvd.idle_work);
1253 schedule_delayed_work(&ring->adev->uvd.idle_work, UVD_IDLE_TIMEOUT);
amdgpu_vce.c 206 INIT_DELAYED_WORK(&adev->vce.idle_work, amdgpu_vce_idle_work_handler);
226 cancel_delayed_work_sync(&adev->vce.idle_work);
275 cancel_delayed_work_sync(&adev->vce.idle_work);
342 container_of(work, struct amdgpu_device, vce.idle_work.work);
359 schedule_delayed_work(&adev->vce.idle_work, VCE_IDLE_TIMEOUT);
379 set_clocks = !cancel_delayed_work_sync(&adev->vce.idle_work);
405 schedule_delayed_work(&ring->adev->vce.idle_work, VCE_IDLE_TIMEOUT);
amdgpu_vcn.c 70 INIT_DELAYED_WORK(&adev->vcn.idle_work, amdgpu_vcn_idle_work_handler);
195 cancel_delayed_work_sync(&adev->vcn.idle_work);
228 cancel_delayed_work_sync(&adev->vcn.idle_work);
288 container_of(work, struct amdgpu_device, vcn.idle_work.work);
320 schedule_delayed_work(&adev->vcn.idle_work, VCN_IDLE_TIMEOUT);
327 bool set_clocks = !cancel_delayed_work_sync(&adev->vcn.idle_work);
357 schedule_delayed_work(&ring->adev->vcn.idle_work, VCN_IDLE_TIMEOUT);
amdgpu_vcn.h 192 struct delayed_work idle_work; member in struct:amdgpu_vcn
amdgpu_vcn_v1_0.c 119 adev->vcn.idle_work.work.func = vcn_v1_0_idle_work_handler;
1779 container_of(work, struct amdgpu_device, vcn.idle_work.work);
1812 schedule_delayed_work(&adev->vcn.idle_work, VCN_IDLE_TIMEOUT);
1819 bool set_clocks = !cancel_delayed_work_sync(&adev->vcn.idle_work);
  /src/sys/external/bsd/drm2/dist/drm/radeon/
radeon_uvd.c 79 INIT_DELAYED_WORK(&rdev->uvd.idle_work, radeon_uvd_idle_work_handler);
880 container_of(work, struct radeon_device, uvd.idle_work.work);
891 schedule_delayed_work(&rdev->uvd.idle_work,
899 bool set_clocks = !cancel_delayed_work_sync(&rdev->uvd.idle_work);
900 set_clocks &= schedule_delayed_work(&rdev->uvd.idle_work,
radeon_vce.c 108 INIT_DELAYED_WORK(&rdev->vce.idle_work, radeon_vce_idle_work_handler);
322 container_of(work, struct radeon_device, vce.idle_work.work);
332 schedule_delayed_work(&rdev->vce.idle_work,
347 bool set_clocks = !cancel_delayed_work_sync(&rdev->vce.idle_work);
348 set_clocks &= schedule_delayed_work(&rdev->vce.idle_work,
radeon.h 1724 struct delayed_work idle_work; member in struct:radeon_uvd
1766 struct delayed_work idle_work; member in struct:radeon_vce
  /src/sys/external/bsd/drm2/dist/drm/i915/display/
intel_psr.c 591 container_of(work, typeof(*dev_priv), psr.idle_work.work);
595 if (delayed_work_pending(&dev_priv->psr.idle_work))
609 cancel_delayed_work(&dev_priv->psr.idle_work);
976 cancel_delayed_work_sync(&dev_priv->psr.idle_work);
1312 mod_delayed_work(system_wq, &dev_priv->psr.idle_work,
1397 INIT_DELAYED_WORK(&dev_priv->psr.idle_work, tgl_dc5_idle_thread);
  /src/sys/external/bsd/drm2/dist/drm/i915/
i915_drv.h 511 struct delayed_work idle_work; member in struct:i915_psr

Completed in 27 milliseconds