HomeSort by: relevance | last modified time | path
    Searched refs:sgt (Results 1 - 23 of 23) sorted by relevancy

  /src/sys/external/bsd/drm2/dist/drm/i915/
i915_mm.c 42 struct sgt_iter sgt; member in struct:remap_pfn
62 return (r->sgt.dma + r->sgt.curr + r->iobase) >> PAGE_SHIFT;
64 return r->sgt.pfn + (r->sgt.curr >> PAGE_SHIFT);
71 if (GEM_WARN_ON(!r->sgt.pfn))
79 r->sgt.curr += PAGE_SIZE;
80 if (r->sgt.curr >= r->sgt.max)
81 r->sgt = __sgt_iter(__sg_next(r->sgt.sgp), use_dma(r->iobase))
    [all...]
i915_scatterlist.h 24 #define for_each_sgt_page(pp, iter, sgt) \
26 ((iter).i < (sgt)->sgl->sg_npgs \
27 ? (((pp) = (sgt)->sgl->sg_pgs[(iter).i]), 1) \
  /src/sys/external/bsd/drm2/linux/
linux_sgt.c 44 sg_alloc_table(struct sg_table *sgt, unsigned npgs, gfp_t gfp)
47 sgt->sgl->sg_pgs = kcalloc(npgs, sizeof(sgt->sgl->sg_pgs[0]), gfp);
48 if (sgt->sgl->sg_pgs == NULL)
50 sgt->sgl->sg_npgs = sgt->nents = npgs;
51 sgt->sgl->sg_dmamap = NULL;
57 __sg_alloc_table_from_pages(struct sg_table *sgt, struct page **pgs,
67 ret = sg_alloc_table(sgt, npgs, gfp);
72 sgt->sgl->sg_pgs[i] = pgs[i]
    [all...]
  /src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/
amdgpu_dma_buf.c 273 struct sg_table *sgt; local in function:amdgpu_dma_buf_map
280 sgt = drm_prime_pages_to_sg(bo->tbo.ttm->pages, bo->tbo.num_pages);
281 if (IS_ERR(sgt))
282 return sgt;
284 if (!dma_map_sg_attrs(attach->dev, sgt->sgl, sgt->nents, dir,
288 return sgt;
291 sg_free_table(sgt);
292 kfree(sgt);
299 * @sgt: sg_table to unma
    [all...]
amdgpu_ttm.c 1340 struct sg_table *sgt; local in function:amdgpu_ttm_tt_populate
1343 sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL);
1344 if (IS_ERR(sgt))
1345 return PTR_ERR(sgt);
1347 ttm->sg = sgt;
  /src/sys/external/bsd/drm2/dist/drm/i915/gem/
i915_gem_internal.c 47 struct sg_table *sgt = NULL; local in function:i915_gem_object_get_pages_internal
77 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL);
78 if (sgt == NULL) {
82 if (sg_alloc_table_from_bus_dmamem(sgt, dmat, obj->mm.u.internal.segs,
89 ret = i915_gem_gtt_prepare_pages(obj, sgt);
95 __i915_gem_object_set_pages(obj, sgt, i915_sg_page_sizes(sgt->sgl));
101 i915_gem_gtt_finish_pages(obj, sgt);
103 sg_free_table(sgt);
    [all...]
i915_gem_pages.c 329 struct sg_table *sgt = obj->mm.pages;
339 return kmap(sg_page(sgt->sgl));
373 for_each_sgt_page(page, iter, sgt)
384 for_each_sgt_daddr(addr, iter, sgt)
  /src/sys/external/bsd/drm2/dist/drm/xen/
xen_drm_front_gem.h 29 struct sg_table *sgt);
xen_drm_front_gem.c 193 struct sg_table *sgt)
209 xen_obj->sgt_imported = sgt;
211 ret = drm_prime_sg_to_page_addr_arrays(sgt, xen_obj->pages,
223 size, sgt->nents);
  /src/sys/external/bsd/drm2/dist/drm/
drm_prime.c 756 struct sg_table *sgt; local in function:drm_gem_map_dma_buf
762 sgt = obj->funcs->get_sg_table(obj);
764 sgt = obj->dev->driver->gem_prime_get_sg_table(obj);
766 if (!dma_map_sg_attrs(attach->dev, sgt->sgl, sgt->nents, dir,
768 sg_free_table(sgt);
769 kfree(sgt);
770 sgt = ERR_PTR(-ENOMEM);
773 return sgt;
780 * @sgt: scatterlist info of the buffer to unma
1059 struct sg_table *sgt; local in function:drm_gem_prime_import_dev
    [all...]
  /src/sys/external/bsd/drm2/drm/
drm_gem_cma_helper.c 44 struct sg_table *sgt)
53 if (sgt) {
55 &nsegs, sgt);
92 if (!sgt)
104 if (obj->sgt)
105 drm_prime_sg_free(obj->sgt);
130 if (obj->sgt)
131 drm_prime_sg_free(obj->sgt);
249 struct dma_buf_attachment *attach, struct sg_table *sgt)
251 size_t size = drm_prime_sg_size(sgt);
    [all...]
drm_cache.c 75 drm_clflush_sg(struct sg_table *sgt)
77 drm_clflush_pages(sgt->sgl->sg_pgs, sgt->sgl->sg_npgs);
  /src/sys/dev/ic/
ld_aac.c 147 struct aac_sg_tableraw *sgt; local in function:ld_aac_dobio
157 sgt = &raw->SgMapRaw;
161 sgt->SgCount = xfer->dm_nsegs;
162 sge = sgt->SgEntryRaw;
178 struct aac_sg_table *sgt; local in function:ld_aac_dobio
191 sgt = &bw->SgMap;
200 sgt = &br->SgMap;
204 sgt->SgCount = xfer->dm_nsegs;
205 sge = sgt->SgEntry;
223 struct aac_sg_table64 *sgt; local in function:ld_aac_dobio
    [all...]
  /src/sys/external/bsd/drm2/dist/drm/vmwgfx/
vmwgfx_ttm_buffer.c 250 struct sg_table sgt; member in struct:vmw_ttm_tt
350 __sg_page_iter_start(&viter->iter.base, vsgt->sgt->sgl,
351 vsgt->sgt->orig_nents, p_offset);
370 dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents,
372 vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents;
393 ret = dma_map_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.orig_nents,
398 vmw_tt->sgt.nents = ret
    [all...]
vmwgfx_drv.h 310 * @sgt: Pointer to a struct sg_table with binding information
317 struct sg_table *sgt; member in struct:vmw_sg_table
  /src/sys/external/bsd/drm2/include/drm/
drm_gem_cma_helper.h 56 struct sg_table *sgt; member in struct:drm_gem_cma_object
  /src/sys/external/bsd/drm2/dist/include/drm/
drm_prime.h 85 struct sg_table *sgt,
127 int drm_prime_sg_to_page_addr_arrays(struct sg_table *sgt, struct page **pages,
drm_drv.h 635 struct sg_table *sgt);
  /src/sys/external/bsd/drm2/dist/drm/virtio/
virtgpu_vq.c 279 struct sg_table *sgt; local in function:vmalloc_to_sgt
286 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL);
287 if (!sgt)
291 ret = sg_alloc_table(sgt, *sg_ents, GFP_KERNEL);
293 kfree(sgt);
297 for_each_sg(sgt->sgl, sg, *sg_ents, i) {
300 sg_free_table(sgt);
301 kfree(sgt);
312 return sgt;
368 struct sg_table *sgt = NULL; local in function:virtio_gpu_queue_fenced_ctrl_buffer
    [all...]
virtgpu_drv.h 369 struct sg_table *sgt);
  /src/sys/external/bsd/drm2/dist/drm/i915/selftests/
scatterlist.c 121 struct sgt_iter sgt; local in function:expect_pfn_sgtiter
126 for_each_sgt_page(page, sgt, &pt->st) {
i915_vma.c 631 struct sgt_iter sgt; local in function:assert_partial
634 for_each_sgt_daddr(dma, sgt, vma->pages) {
  /src/sys/external/bsd/drm2/dist/drm/qxl/
qxl_drv.h 456 struct sg_table *sgt);

Completed in 60 milliseconds