Lines Matching refs:vgpu
80 /* GM resources owned by a vGPU */
90 /* Fences owned by a vGPU */
113 #define vgpu_cfg_space(vgpu) ((vgpu)->cfg_space.virtual_cfg_space)
127 #define vgpu_opregion(vgpu) (&(vgpu->opregion))
146 int (*init)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
147 void (*clean)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
148 void (*reset)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
174 unsigned long handle; /* vGPU handle used by hypervisor MPT modules */
181 * scheduler structure. So below 2 vgpu data are protected
305 * not yet protected by special locks(vgpu and scheduler lock).
308 /* scheduler scope lock, protect gvt and vgpu schedule related data */
312 struct idr vgpu_idr; /* vGPU IDR pool */
401 /* Aperture/GM space definitions for vGPU */
402 #define vgpu_aperture_offset(vgpu) ((vgpu)->gm.low_gm_node.start)
403 #define vgpu_hidden_offset(vgpu) ((vgpu)->gm.high_gm_node.start)
404 #define vgpu_aperture_sz(vgpu) ((vgpu)->gm.aperture_sz)
405 #define vgpu_hidden_sz(vgpu) ((vgpu)->gm.hidden_sz)
407 #define vgpu_aperture_pa_base(vgpu) \
408 (gvt_aperture_pa_base(vgpu->gvt) + vgpu_aperture_offset(vgpu))
410 #define vgpu_ggtt_gm_sz(vgpu) ((vgpu)->gm.aperture_sz + (vgpu)->gm.hidden_sz)
412 #define vgpu_aperture_pa_end(vgpu) \
413 (vgpu_aperture_pa_base(vgpu) + vgpu_aperture_sz(vgpu) - 1)
415 #define vgpu_aperture_gmadr_base(vgpu) (vgpu_aperture_offset(vgpu))
416 #define vgpu_aperture_gmadr_end(vgpu) \
417 (vgpu_aperture_gmadr_base(vgpu) + vgpu_aperture_sz(vgpu) - 1)
419 #define vgpu_hidden_gmadr_base(vgpu) (vgpu_hidden_offset(vgpu))
420 #define vgpu_hidden_gmadr_end(vgpu) \
421 (vgpu_hidden_gmadr_base(vgpu) + vgpu_hidden_sz(vgpu) - 1)
423 #define vgpu_fence_base(vgpu) (vgpu->fence.base)
424 #define vgpu_fence_sz(vgpu) (vgpu->fence.size)
438 int intel_vgpu_alloc_resource(struct intel_vgpu *vgpu,
440 void intel_vgpu_reset_resource(struct intel_vgpu *vgpu);
441 void intel_vgpu_free_resource(struct intel_vgpu *vgpu);
442 void intel_vgpu_write_fence(struct intel_vgpu *vgpu,
445 /* Macros for easily accessing vGPU virtual/shadow register.
447 #define vgpu_vreg_t(vgpu, reg) \
448 (*(u32 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
449 #define vgpu_vreg(vgpu, offset) \
450 (*(u32 *)(vgpu->mmio.vreg + (offset)))
451 #define vgpu_vreg64_t(vgpu, reg) \
452 (*(u64 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
453 #define vgpu_vreg64(vgpu, offset) \
454 (*(u64 *)(vgpu->mmio.vreg + (offset)))
456 #define for_each_active_vgpu(gvt, vgpu, id) \
457 idr_for_each_entry((&(gvt)->vgpu_idr), (vgpu), (id)) \
458 for_each_if(vgpu->active)
460 static inline void intel_vgpu_write_pci_bar(struct intel_vgpu *vgpu,
467 pval = (u32 *)(vgpu_cfg_space(vgpu) + offset);
484 void intel_gvt_destroy_idle_vgpu(struct intel_vgpu *vgpu);
487 void intel_gvt_destroy_vgpu(struct intel_vgpu *vgpu);
488 void intel_gvt_release_vgpu(struct intel_vgpu *vgpu);
489 void intel_gvt_reset_vgpu_locked(struct intel_vgpu *vgpu, bool dmlr,
491 void intel_gvt_reset_vgpu(struct intel_vgpu *vgpu);
492 void intel_gvt_activate_vgpu(struct intel_vgpu *vgpu);
493 void intel_gvt_deactivate_vgpu(struct intel_vgpu *vgpu);
496 #define vgpu_gmadr_is_aperture(vgpu, gmadr) \
497 ((gmadr >= vgpu_aperture_gmadr_base(vgpu)) && \
498 (gmadr <= vgpu_aperture_gmadr_end(vgpu)))
500 #define vgpu_gmadr_is_hidden(vgpu, gmadr) \
501 ((gmadr >= vgpu_hidden_gmadr_base(vgpu)) && \
502 (gmadr <= vgpu_hidden_gmadr_end(vgpu)))
504 #define vgpu_gmadr_is_valid(vgpu, gmadr) \
505 ((vgpu_gmadr_is_aperture(vgpu, gmadr) || \
506 (vgpu_gmadr_is_hidden(vgpu, gmadr))))
520 bool intel_gvt_ggtt_validate_range(struct intel_vgpu *vgpu, u64 addr, u32 size);
521 int intel_gvt_ggtt_gmadr_g2h(struct intel_vgpu *vgpu, u64 g_addr, u64 *h_addr);
522 int intel_gvt_ggtt_gmadr_h2g(struct intel_vgpu *vgpu, u64 h_addr, u64 *g_addr);
523 int intel_gvt_ggtt_index_g2h(struct intel_vgpu *vgpu, unsigned long g_index,
525 int intel_gvt_ggtt_h2g_index(struct intel_vgpu *vgpu, unsigned long h_index,
528 void intel_vgpu_init_cfg_space(struct intel_vgpu *vgpu,
530 void intel_vgpu_reset_cfg_space(struct intel_vgpu *vgpu);
532 int intel_vgpu_emulate_cfg_read(struct intel_vgpu *vgpu, unsigned int offset,
535 int intel_vgpu_emulate_cfg_write(struct intel_vgpu *vgpu, unsigned int offset,
538 void intel_vgpu_emulate_hotplug(struct intel_vgpu *vgpu, bool connected);
540 static inline u64 intel_vgpu_get_bar_gpa(struct intel_vgpu *vgpu, int bar)
543 return (*(u64 *)(vgpu->cfg_space.virtual_cfg_space + bar)) &
547 void intel_vgpu_clean_opregion(struct intel_vgpu *vgpu);
548 int intel_vgpu_init_opregion(struct intel_vgpu *vgpu);
549 int intel_vgpu_opregion_base_write_handler(struct intel_vgpu *vgpu, u32 gpa);
551 int intel_vgpu_emulate_opregion_request(struct intel_vgpu *vgpu, u32 swsci);
552 void populate_pvinfo_page(struct intel_vgpu *vgpu);
555 void enter_failsafe_mode(struct intel_vgpu *vgpu, int reason);
568 void (*vgpu_destroy)(struct intel_vgpu *vgpu);
569 void (*vgpu_release)(struct intel_vgpu *vgpu);
577 int (*vgpu_query_plane)(struct intel_vgpu *vgpu, void *);
578 int (*vgpu_get_dmabuf)(struct intel_vgpu *vgpu, unsigned int);
581 void (*emulate_hotplug)(struct intel_vgpu *vgpu, bool connected);
691 void intel_gvt_debugfs_add_vgpu(struct intel_vgpu *vgpu);
692 void intel_gvt_debugfs_remove_vgpu(struct intel_vgpu *vgpu);