HomeSort by: relevance | last modified time | path
    Searched defs:dmaobj (Results 1 - 16 of 16) sorted by relevancy

  /src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/engine/dma/
nouveau_nvkm_engine_dma_base.c 42 struct nvkm_dmaobj *dmaobj = NULL; local in function:nvkm_dma_oclass_new
45 ret = dma->func->class_new(dma, oclass, data, size, &dmaobj);
46 if (dmaobj)
47 *pobject = &dmaobj->object;
nouveau_nvkm_engine_dma_usergf119.c 48 struct gf119_dmaobj *dmaobj = gf119_dmaobj(base); local in function:gf119_dmaobj_bind
49 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
55 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0);
56 nvkm_wo32(*pgpuobj, 0x04, dmaobj->base.start >> 8);
57 nvkm_wo32(*pgpuobj, 0x08, dmaobj->base.limit >> 8);
80 struct gf119_dmaobj *dmaobj; local in function:gf119_dmaobj_new
84 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL)))
86 *pdmaobj = &dmaobj->base;
89 &data, &size, &dmaobj->base)
    [all...]
nouveau_nvkm_engine_dma_usernv04.c 49 struct nv04_dmaobj *dmaobj = nv04_dmaobj(base); local in function:nv04_dmaobj_bind
50 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
51 u64 offset = dmaobj->base.start & 0xfffff000;
52 u64 adjust = dmaobj->base.start & 0x00000fff;
53 u32 length = dmaobj->base.limit - dmaobj->base.start;
56 if (dmaobj->clone) {
59 if (!dmaobj->base.start)
70 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0 | (adjust << 20));
72 nvkm_wo32(*pgpuobj, 0x08, dmaobj->flags2 | offset)
90 struct nv04_dmaobj *dmaobj; local in function:nv04_dmaobj_new
    [all...]
nouveau_nvkm_engine_dma_base.c 42 struct nvkm_dmaobj *dmaobj = NULL; local in function:nvkm_dma_oclass_new
45 ret = dma->func->class_new(dma, oclass, data, size, &dmaobj);
46 if (dmaobj)
47 *pobject = &dmaobj->object;
nouveau_nvkm_engine_dma_usergf119.c 48 struct gf119_dmaobj *dmaobj = gf119_dmaobj(base); local in function:gf119_dmaobj_bind
49 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
55 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0);
56 nvkm_wo32(*pgpuobj, 0x04, dmaobj->base.start >> 8);
57 nvkm_wo32(*pgpuobj, 0x08, dmaobj->base.limit >> 8);
80 struct gf119_dmaobj *dmaobj; local in function:gf119_dmaobj_new
84 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL)))
86 *pdmaobj = &dmaobj->base;
89 &data, &size, &dmaobj->base)
    [all...]
nouveau_nvkm_engine_dma_usernv04.c 49 struct nv04_dmaobj *dmaobj = nv04_dmaobj(base); local in function:nv04_dmaobj_bind
50 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
51 u64 offset = dmaobj->base.start & 0xfffff000;
52 u64 adjust = dmaobj->base.start & 0x00000fff;
53 u32 length = dmaobj->base.limit - dmaobj->base.start;
56 if (dmaobj->clone) {
59 if (!dmaobj->base.start)
70 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0 | (adjust << 20));
72 nvkm_wo32(*pgpuobj, 0x08, dmaobj->flags2 | offset)
90 struct nv04_dmaobj *dmaobj; local in function:nv04_dmaobj_new
    [all...]
nouveau_nvkm_engine_dma_user.c 56 struct nvkm_dmaobj *dmaobj = nvkm_dmaobj(base); local in function:nvkm_dmaobj_bind
57 return dmaobj->func->bind(dmaobj, gpuobj, align, pgpuobj);
75 struct nvkm_dmaobj *dmaobj)
89 nvkm_object_ctor(&nvkm_dmaobj_func, oclass, &dmaobj->object);
90 dmaobj->func = func;
91 dmaobj->dma = dma;
99 dmaobj->target = args->v0.target;
100 dmaobj->access = args->v0.access;
101 dmaobj->start = args->v0.start
    [all...]
nouveau_nvkm_engine_dma_usergf100.c 49 struct gf100_dmaobj *dmaobj = gf100_dmaobj(base); local in function:gf100_dmaobj_bind
50 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
56 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0);
57 nvkm_wo32(*pgpuobj, 0x04, lower_32_bits(dmaobj->base.limit));
58 nvkm_wo32(*pgpuobj, 0x08, lower_32_bits(dmaobj->base.start));
59 nvkm_wo32(*pgpuobj, 0x0c, upper_32_bits(dmaobj->base.limit) << 24 |
60 upper_32_bits(dmaobj->base.start));
62 nvkm_wo32(*pgpuobj, 0x14, dmaobj->flags5);
82 struct gf100_dmaobj *dmaobj; local in function:gf100_dmaobj_new
86 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))
    [all...]
nouveau_nvkm_engine_dma_usergv100.c 46 struct gv100_dmaobj *dmaobj = gv100_dmaobj(base); local in function:gv100_dmaobj_bind
47 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
48 u64 start = dmaobj->base.start >> 8;
49 u64 limit = dmaobj->base.limit >> 8;
55 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0);
79 struct gv100_dmaobj *dmaobj; local in function:gv100_dmaobj_new
83 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL)))
85 *pdmaobj = &dmaobj->base;
88 &data, &size, &dmaobj->base)
    [all...]
nouveau_nvkm_engine_dma_usernv50.c 49 struct nv50_dmaobj *dmaobj = nv50_dmaobj(base); local in function:nv50_dmaobj_bind
50 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
56 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0);
57 nvkm_wo32(*pgpuobj, 0x04, lower_32_bits(dmaobj->base.limit));
58 nvkm_wo32(*pgpuobj, 0x08, lower_32_bits(dmaobj->base.start));
59 nvkm_wo32(*pgpuobj, 0x0c, upper_32_bits(dmaobj->base.limit) << 24 |
60 upper_32_bits(dmaobj->base.start));
62 nvkm_wo32(*pgpuobj, 0x14, dmaobj->flags5);
82 struct nv50_dmaobj *dmaobj; local in function:nv50_dmaobj_new
86 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))
    [all...]
nouveau_nvkm_engine_dma_user.c 56 struct nvkm_dmaobj *dmaobj = nvkm_dmaobj(base); local in function:nvkm_dmaobj_bind
57 return dmaobj->func->bind(dmaobj, gpuobj, align, pgpuobj);
75 struct nvkm_dmaobj *dmaobj)
89 nvkm_object_ctor(&nvkm_dmaobj_func, oclass, &dmaobj->object);
90 dmaobj->func = func;
91 dmaobj->dma = dma;
99 dmaobj->target = args->v0.target;
100 dmaobj->access = args->v0.access;
101 dmaobj->start = args->v0.start
    [all...]
nouveau_nvkm_engine_dma_usergf100.c 49 struct gf100_dmaobj *dmaobj = gf100_dmaobj(base); local in function:gf100_dmaobj_bind
50 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
56 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0);
57 nvkm_wo32(*pgpuobj, 0x04, lower_32_bits(dmaobj->base.limit));
58 nvkm_wo32(*pgpuobj, 0x08, lower_32_bits(dmaobj->base.start));
59 nvkm_wo32(*pgpuobj, 0x0c, upper_32_bits(dmaobj->base.limit) << 24 |
60 upper_32_bits(dmaobj->base.start));
62 nvkm_wo32(*pgpuobj, 0x14, dmaobj->flags5);
82 struct gf100_dmaobj *dmaobj; local in function:gf100_dmaobj_new
86 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))
    [all...]
nouveau_nvkm_engine_dma_usergv100.c 46 struct gv100_dmaobj *dmaobj = gv100_dmaobj(base); local in function:gv100_dmaobj_bind
47 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
48 u64 start = dmaobj->base.start >> 8;
49 u64 limit = dmaobj->base.limit >> 8;
55 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0);
79 struct gv100_dmaobj *dmaobj; local in function:gv100_dmaobj_new
83 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL)))
85 *pdmaobj = &dmaobj->base;
88 &data, &size, &dmaobj->base)
    [all...]
nouveau_nvkm_engine_dma_usernv50.c 49 struct nv50_dmaobj *dmaobj = nv50_dmaobj(base); local in function:nv50_dmaobj_bind
50 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
56 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0);
57 nvkm_wo32(*pgpuobj, 0x04, lower_32_bits(dmaobj->base.limit));
58 nvkm_wo32(*pgpuobj, 0x08, lower_32_bits(dmaobj->base.start));
59 nvkm_wo32(*pgpuobj, 0x0c, upper_32_bits(dmaobj->base.limit) << 24 |
60 upper_32_bits(dmaobj->base.start));
62 nvkm_wo32(*pgpuobj, 0x14, dmaobj->flags5);
82 struct nv50_dmaobj *dmaobj; local in function:nv50_dmaobj_new
86 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))
    [all...]
  /src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/engine/fifo/
nouveau_nvkm_engine_fifo_chan.c 424 struct nvkm_dmaobj *dmaobj; local in function:nvkm_fifo_chan_ctor
441 dmaobj = nvkm_dmaobj_search(client, push);
442 if (IS_ERR(dmaobj))
443 return PTR_ERR(dmaobj);
445 ret = nvkm_object_bind(&dmaobj->object, chan->inst, -16,
nouveau_nvkm_engine_fifo_chan.c 424 struct nvkm_dmaobj *dmaobj; local in function:nvkm_fifo_chan_ctor
441 dmaobj = nvkm_dmaobj_search(client, push);
442 if (IS_ERR(dmaobj))
443 return PTR_ERR(dmaobj);
445 ret = nvkm_object_bind(&dmaobj->object, chan->inst, -16,

Completed in 206 milliseconds