HomeSort by: relevance | last modified time | path
    Searched defs:hws (Results 1 - 19 of 19) sorted by relevancy

  /src/sys/external/bsd/drm2/dist/drm/i915/selftests/
igt_spinner.h 23 struct drm_i915_gem_object *hws; member in struct:igt_spinner
igt_spinner.c 26 spin->hws = i915_gem_object_create_internal(gt->i915, PAGE_SIZE);
27 if (IS_ERR(spin->hws)) {
28 err = PTR_ERR(spin->hws);
38 i915_gem_object_set_cache_coherency(spin->hws, I915_CACHE_LLC);
39 vaddr = i915_gem_object_pin_map(spin->hws, I915_MAP_WB);
57 i915_gem_object_unpin_map(spin->hws);
61 i915_gem_object_put(spin->hws);
71 static u64 hws_address(const struct i915_vma *hws,
74 return hws->node.start + seqno_offset(rq->fence.context);
100 struct i915_vma *hws, *vma local in function:igt_spinner_create_request
    [all...]
  /src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dcn21/
amdgpu_dcn21_hwseq.c 45 hws->ctx
47 hws->regs->reg
51 hws->shifts->field_name, hws->masks->field_name
55 struct dce_hwseq *hws)
69 int dcn21_init_sys_ctx(struct dce_hwseq *hws, struct dc *dc, struct dc_phy_addr_space_config *pa_config)
83 mmhub_update_page_table_config(&config, hws);
92 struct dce_hwseq *hws = dc->hwseq; local in function:dcn21_s0i3_golden_init_wa
amdgpu_dcn21_resource.c 1476 struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL); local in function:dcn21_hwseq_create
1478 if (hws) {
1479 hws->ctx = ctx;
1480 hws->regs = &hwseq_reg;
1481 hws->shifts = &hwseq_shift;
1482 hws->masks = &hwseq_mask;
1483 hws->wa.DEGVIDCN21 = true;
1485 return hws;
  /src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dce/
amdgpu_dce_hwseq.c 37 hws->ctx
39 hws->regs->reg
43 hws->shifts->field_name, hws->masks->field_name
45 void dce_enable_fe_clock(struct dce_hwseq *hws,
58 struct dce_hwseq *hws = dc->hwseq; local in function:dce_pipe_control_lock
80 if (hws->masks->BLND_BLND_V_UPDATE_LOCK != 0)
85 if (hws->wa.blnd_crtc_trigger) {
93 void dce_set_blender_mode(struct dce_hwseq *hws,
125 if (hws->masks->BLND_ALPHA_MODE != 0)
    [all...]
  /src/sys/external/bsd/drm/dist/shared-core/
i915_dma.c 155 /* Clear the HWS virtual address at teardown */
801 drm_i915_hws_addr_t *hws = data; local in function:i915_set_status_page
811 DRM_DEBUG("set status page addr 0x%08x\n", (u32)hws->addr);
813 dev_priv->status_gfx_addr = hws->addr & (0x1ffff<<12);
815 dev_priv->hws_map.offset = dev->agp->base + hws->addr;
833 DRM_DEBUG("load hws HWS_PGA with gfx mem 0x%x\n",
835 DRM_DEBUG("load hws at %p\n", dev_priv->hw_status_page);
876 /* Init HWS */
  /src/sys/external/bsd/drm2/dist/drm/i915/gt/
selftest_hangcheck.c 53 struct drm_i915_gem_object *hws; member in struct:hang
74 h->hws = i915_gem_object_create_internal(gt->i915, PAGE_SIZE);
75 if (IS_ERR(h->hws)) {
76 err = PTR_ERR(h->hws);
86 i915_gem_object_set_cache_coherency(h->hws, I915_CACHE_LLC);
87 vaddr = i915_gem_object_pin_map(h->hws, I915_MAP_WB);
105 i915_gem_object_unpin_map(h->hws);
109 i915_gem_object_put(h->hws);
115 static u64 hws_address(const struct i915_vma *hws,
118 return hws->node.start + offset_in_page(sizeof(u32)*rq->fence.context)
144 struct i915_vma *hws, *vma; local in function:hang_create_request
    [all...]
intel_engine_cs.c 522 * On g33, we cannot place HWS above 256MiB, so
526 * and hang if the HWS is placed at the top of the
528 * platforms have issues with us placing the HWS
547 * Though the HWS register does support 36bit addresses, historically
549 * the HWS is placed above 4G. We only allow objects to be allocated
1340 const u32 *hws = local in function:intel_engine_print_registers
1375 idx, hws[idx * 2], hws[idx * 2 + 1]);
  /src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dce100/
amdgpu_dce100_resource.c 504 struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL); local in function:dce100_hwseq_create
506 if (hws) {
507 hws->ctx = ctx;
508 hws->regs = &hwseq_reg;
509 hws->shifts = &hwseq_shift;
510 hws->masks = &hwseq_mask;
512 return hws;
  /src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dce112/
amdgpu_dce112_resource.c 531 struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL); local in function:dce112_hwseq_create
533 if (hws) {
534 hws->ctx = ctx;
535 hws->regs = &hwseq_reg;
536 hws->shifts = &hwseq_shift;
537 hws->masks = &hwseq_mask;
539 return hws;
  /src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dce120/
amdgpu_dce120_resource.c 786 struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL); local in function:dce120_hwseq_create
788 if (hws) {
789 hws->ctx = ctx;
790 hws->regs = &hwseq_reg;
791 hws->shifts = &hwseq_shift;
792 hws->masks = &hwseq_mask;
794 return hws;
800 struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL); local in function:dce121_hwseq_create
802 if (hws) {
803 hws->ctx = ctx
    [all...]
  /src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dce80/
amdgpu_dce80_resource.c 627 struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL); local in function:dce80_hwseq_create
629 if (hws) {
630 hws->ctx = ctx;
631 hws->regs = &hwseq_reg;
632 hws->shifts = &hwseq_shift;
633 hws->masks = &hwseq_mask;
635 return hws;
  /src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dcn20/
amdgpu_dcn20_hwseq.c 61 hws->ctx
63 hws->regs->reg
67 hws->shifts->field_name, hws->masks->field_name
188 struct dce_hwseq *hws,
228 void dcn20_dccg_init(struct dce_hwseq *hws)
253 struct dce_hwseq *hws)
280 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_init_blank
328 hws->funcs.wait_for_blank_complete(opp);
332 struct dce_hwseq *hws,
561 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_plane_atomic_disable
612 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_enable_stream_timing
834 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_set_input_transfer_func
1315 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_update_dchubp_dpp
1477 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_program_pipe
1561 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_program_front_end_for_ctx
1705 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_update_bandwidth
1825 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_disable_stream_gating
1840 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_enable_stream_gating
1973 struct dce_hwseq *hws = link->dc->hwseq; local in function:dcn20_unblank_stream
2089 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_reset_hw_ctx_wrap
2140 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_update_mpcc
2292 struct dce_hwseq *hws = dc->hwseq; local in function:dcn20_fpga_init_hw
    [all...]
amdgpu_dcn20_resource.c 1258 struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL); local in function:dcn20_hwseq_create
1260 if (hws) {
1261 hws->ctx = ctx;
1262 hws->regs = &hwseq_reg;
1263 hws->shifts = &hwseq_shift;
1264 hws->masks = &hwseq_mask;
1266 return hws;
  /src/sys/external/bsd/drm2/dist/drm/i915/
i915_gpu_error.h 77 u32 hws; member in struct:intel_engine_coredump
  /src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dce110/
amdgpu_dce110_hw_sequencer.c 81 hws->ctx
86 hws->regs->reg
90 hws->shifts->field_name, hws->masks->field_name
704 static bool is_panel_backlight_on(struct dce_hwseq *hws)
713 static bool is_panel_powered_on(struct dce_hwseq *hws)
895 struct dce_hwseq *hws = ctx->dc->hwseq; local in function:dce110_edp_backlight_control
904 if (enable && is_panel_backlight_on(hws)) {
1056 struct dce_hwseq *hws = link->dc->hwseq; local in function:dce110_unblank_stream
1066 hws->funcs.edp_backlight_control(link, true)
1074 struct dce_hwseq *hws = link->dc->hwseq; local in function:dce110_blank_stream
1334 struct dce_hwseq *hws = dc->hwseq; local in function:apply_single_controller_ctx_to_hw
1567 struct dce_hwseq *hws = dc->hwseq; local in function:dce110_enable_accelerated_mode
2025 struct dce_hwseq *hws = dc->hwseq; local in function:dce110_apply_ctx_to_hw
2366 struct dce_hwseq *hws = dc->hwseq; local in function:init_hw
2465 struct dce_hwseq *hws = dc->hwseq; local in function:dce110_program_front_end_for_pipe
2631 struct dce_hwseq *hws = dc->hwseq; local in function:dce110_power_down_fe
    [all...]
amdgpu_dce110_resource.c 558 struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL); local in function:dce110_hwseq_create
560 if (hws) {
561 hws->ctx = ctx;
562 hws->regs = ASIC_REV_IS_STONEY(ctx->asic_id.hw_internal_rev) ?
564 hws->shifts = &hwseq_shift;
565 hws->masks = &hwseq_mask;
566 hws->wa.blnd_crtc_trigger = true;
568 return hws;
1105 struct dce_hwseq *hws = dc->hwseq; local in function:dce110_acquire_underlay
1126 hws->funcs.enable_display_power_gating
    [all...]
  /src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dcn10/
amdgpu_dcn10_hw_sequencer.c 63 hws->ctx
65 hws->regs->reg
69 hws->shifts->field_name, hws->masks->field_name
121 struct dce_hwseq *hws = dc->hwseq; local in function:log_mpc_crc
484 struct dce_hwseq *hws,
506 struct dce_hwseq *hws)
539 struct dce_hwseq *hws,
546 if (hws->ctx->dc->debug.disable_dpp_power_gate)
591 struct dce_hwseq *hws,
661 struct dce_hwseq *hws = dc->hwseq; local in function:undo_DEGVIDCN10_253_wa
681 struct dce_hwseq *hws = dc->hwseq; local in function:apply_DEGVIDCN10_253_wa
711 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_bios_golden_init
1032 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_plane_atomic_disconnect
1064 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_plane_atomic_power_down
1085 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_plane_atomic_disable
1118 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_disable_plane
1135 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_init_pipes
1244 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_init_hw
1376 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_reset_hw_ctx_wrap
1579 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_pipe_control_lock
1901 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_enable_plane
2138 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_update_mpcc
2238 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_update_dchubp_dpp
2429 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_program_pipe
2458 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_program_all_pipe_in_tree
2516 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_apply_ctx_for_surface
2648 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_prepare_bandwidth
2681 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_optimize_bandwidth
2852 struct dce_hwseq *hws = dc->hwseq; local in function:dcn10_wait_for_mpcc_disconnect
3238 struct dce_hwseq *hws = link->dc->hwseq; local in function:dcn10_unblank_stream
    [all...]
amdgpu_dcn10_resource.c 884 struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL); local in function:dcn10_hwseq_create
886 if (hws) {
887 hws->ctx = ctx;
888 hws->regs = &hwseq_reg;
889 hws->shifts = &hwseq_shift;
890 hws->masks = &hwseq_mask;
891 hws->wa.DEGVIDCN10_253 = true;
892 hws->wa.false_optc_underflow = true;
893 hws->wa.DEGVIDCN10_254 = true;
895 return hws;
    [all...]

Completed in 26 milliseconds