/src/sys/external/bsd/drm2/dist/drm/i915/gt/ |
intel_engine_pool_types.h | 28 struct intel_engine_pool *pool; member in struct:intel_engine_pool_node
|
intel_engine_pool.c | 18 static struct intel_engine_cs *to_engine(struct intel_engine_pool *pool) 20 return container_of(pool, struct intel_engine_cs, pool); 24 bucket_for_size(struct intel_engine_pool *pool, size_t sz) 34 if (n >= ARRAY_SIZE(pool->cache_list)) 35 n = ARRAY_SIZE(pool->cache_list) - 1; 37 return &pool->cache_list[n]; 74 struct intel_engine_pool *pool = node->pool; local in function:pool_retire 75 struct list_head *list = bucket_for_size(pool, node->obj->base.size) 130 struct intel_engine_pool *pool = lookup_pool(engine); local in function:intel_engine_get_pool [all...] |
/src/distrib/utils/more/ |
linenum.c | 94 #define NPOOL 50 /* Size of line number pool */ 102 static struct linenum pool[NPOOL]; /* The pool itself */ variable in typeref:struct:linenum[] 119 for (p = pool; p < &pool[NPOOL-2]; p++) 121 pool[NPOOL-2].next = NULL; 122 freelist = pool; 124 spare = &pool[NPOOL-1];
|
/src/sys/external/bsd/drm2/dist/drm/i915/gem/ |
i915_gem_object_blt.c | 26 struct intel_engine_pool_node *pool; local in function:intel_emit_vma_fill_blt 41 pool = intel_engine_get_pool(ce->engine, size); 42 if (IS_ERR(pool)) { 43 err = PTR_ERR(pool); 47 cmd = i915_gem_object_pin_map(pool->obj, I915_MAP_WC); 88 i915_gem_object_unpin_map(pool->obj); 90 batch = i915_vma_instance(pool->obj, ce->vm, NULL); 100 batch->private = pool; 104 intel_engine_pool_put(pool); 210 struct intel_engine_pool_node *pool; local in function:intel_emit_vma_copy_blt [all...] |
/src/tests/rump/kernspace/ |
threadpool.c | 158 struct threadpool *pool; local in function:rumptest_threadpool_unbound_schedule 161 error = threadpool_get(&pool, PRI_NONE); 168 threadpool_schedule_job(pool, &data.job); 176 threadpool_put(pool, PRI_NONE); 184 struct threadpool *pool; local in function:rumptest_threadpool_percpu_schedule 190 pool = threadpool_percpu_ref(pcpu); 196 threadpool_schedule_job(pool, &data.job); 211 struct threadpool *pool; local in function:rumptest_threadpool_job_cancel 215 error = threadpool_get(&pool, PRI_NONE); 221 threadpool_schedule_job(pool, &data.job) 247 struct threadpool *pool; local in function:rumptest_threadpool_job_cancelthrash [all...] |
/src/regress/sys/kern/allocfree/ |
allocfree.c | 60 static struct pool pool; variable in typeref:struct:pool 114 p = pool_get(&pool, PR_WAITOK); 117 pool_put(&pool, p); 262 pool_init(&pool, sz, 0, 0, 0, "tpool", 267 pool_destroy(&pool);
|
/src/sys/fs/nfs/common/ |
nfs_fha.c | 234 SVCPOOL *pool; local in function:fha_hash_entry_lookup 238 pool = *softc->pool; 298 SVCPOOL *pool; local in function:fha_hash_entry_choose_thread 302 pool = *softc->pool; 426 * Grab the pool lock here to not let chosen thread go away before 478 SVCPOOL *pool; local in function:fhe_stats_sysctl 482 pool = NULL; 484 if (!*softc->pool) { [all...] |
nfs_fha.h | 106 SVCPOOL **pool; member in struct:fha_params
|
/src/tests/kernel/threadpool_tester/ |
threadpool_tester.c | 70 struct threadpool *pool, *opool = NULL; local in function:threadpool_tester_get_unbound 86 error = threadpool_get(&pool, val); 95 ctx->ctx_unbound[pri_to_idx(val)] = pool; 101 /* Should have gotten reference to existing pool. */ 102 TP_LOG(("%s: found existing unbound pool for pri %d (%s)\n", 103 __func__, val, opool == pool ? "match" : "NO MATCH")); 104 KASSERT(opool == pool); 105 threadpool_put(pool, val); 108 TP_LOG(("%s: created unbound pool for pri %d\n", 119 struct threadpool *pool; local in function:threadpool_tester_put_unbound 158 struct threadpool *pool; local in function:threadpool_tester_run_unbound 284 struct threadpool *pool; local in function:threadpool_tester_run_percpu 450 struct threadpool *pool = local in function:threadpool_tester_fini [all...] |
/src/tests/modules/threadpool_tester/ |
threadpool_tester.c | 70 struct threadpool *pool, *opool = NULL; local in function:threadpool_tester_get_unbound 86 error = threadpool_get(&pool, val); 95 ctx->ctx_unbound[pri_to_idx(val)] = pool; 101 /* Should have gotten reference to existing pool. */ 102 TP_LOG(("%s: found existing unbound pool for pri %d (%s)\n", 103 __func__, val, opool == pool ? "match" : "NO MATCH")); 104 KASSERT(opool == pool); 105 threadpool_put(pool, val); 108 TP_LOG(("%s: created unbound pool for pri %d\n", 119 struct threadpool *pool; local in function:threadpool_tester_put_unbound 158 struct threadpool *pool; local in function:threadpool_tester_run_unbound 284 struct threadpool *pool; local in function:threadpool_tester_run_percpu 450 struct threadpool *pool = local in function:threadpool_tester_fini [all...] |
/src/sys/dev/dm/ |
dm_target_delay.c | 43 #include <sys/pool.h> 83 static struct pool pool; variable in typeref:struct:pool 302 dp = pool_get(&pool, PR_WAITOK); 364 pool_put(&pool, dp); 499 pool_init(&pool, sizeof(struct dm_delay_buf), 0, 0, 0, 502 aprint_debug("Delay target pool initialized\n"); 511 pool_destroy(&pool); 513 aprint_debug("Delay target pool destroyed\n");
|
/src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dcn10/ |
amdgpu_dcn10_hw_sequencer_debug.c | 117 struct resource_pool *pool = dc->res_pool; local in function:dcn10_get_hubp_states 138 for (i = 0; i < pool->pipe_count; i++) { 139 struct hubp *hubp = pool->hubps[i]; 195 struct resource_pool *pool = dc->res_pool; local in function:dcn10_get_rq_states 208 for (i = 0; i < pool->pipe_count; i++) { 209 struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state); 217 pool->hubps[i]->inst, rq_regs->drq_expansion_mode, rq_regs->prq_expansion_mode, rq_regs->mrq_expansion_mode, 237 struct resource_pool *pool = dc->res_pool; local in function:dcn10_get_dlg_states 253 for (i = 0; i < pool->pipe_count; i++) { 254 struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state) 294 struct resource_pool *pool = dc->res_pool; local in function:dcn10_get_ttu_states 334 struct resource_pool *pool = dc->res_pool; local in function:dcn10_get_cm_states 389 struct resource_pool *pool = dc->res_pool; local in function:dcn10_get_mpcc_states 420 struct resource_pool *pool = dc->res_pool; local in function:dcn10_get_otg_states 495 struct resource_pool *pool = dc->res_pool; local in function:dcn10_clear_otpc_underflow 511 struct resource_pool *pool = dc->res_pool; local in function:dcn10_clear_hubp_underflow [all...] |
/src/sys/lib/libkern/ |
entpool.c | 33 * Entropy pool (`reseedable pseudorandom number generator') based on a 63 * On top of the underlying sponge state, an entropy pool maintains an 65 * the input buffer. Zeroing an entropy pool initializes it. 129 * Enter len bytes from buf into the entropy pool P, stirring as 169 * entropy pool P. Roughly corresponds to P.feed in the paper, 214 * Stir the entropy pool after entpool_enter_nostir fails. If it 234 * Extract len bytes from the entropy pool P into buf. 422 struct entpool pool, *P = &pool; 415 struct entpool pool, *P = &pool; local in function:entpool_selftest
|
/src/usr.sbin/makefs/ffs/ |
buf.h | 123 struct pool { struct
|
/src/sys/external/bsd/drm2/dist/drm/amd/display/dc/calcs/ |
amdgpu_dcn_calcs.c | 488 // dc->dml.logger = pool->base.logger; 513 const struct resource_pool *pool, 525 secondary_pipe->plane_res.mi = pool->mis[secondary_pipe->pipe_idx]; 526 secondary_pipe->plane_res.hubp = pool->hubps[secondary_pipe->pipe_idx]; 527 secondary_pipe->plane_res.ipp = pool->ipps[secondary_pipe->pipe_idx]; 528 secondary_pipe->plane_res.xfm = pool->transforms[secondary_pipe->pipe_idx]; 529 secondary_pipe->plane_res.dpp = pool->dpps[secondary_pipe->pipe_idx]; 530 secondary_pipe->plane_res.mpcc_inst = pool->dpps[secondary_pipe->pipe_idx]->inst; 732 const struct resource_pool *pool = dc->res_pool; local in function:dcn_validate_bandwidth 867 for (i = 0, input_idx = 0; i < pool->pipe_count; i++) [all...] |
/src/games/hack/ |
hack.mon.c | 116 inpool = (levl[mtmp->mx][mtmp->my].typ == POOL); 581 boolean pool; local in function:mfndpos 587 pool = (mon->data->mlet == ';'); 600 if ((ntyp == POOL) == pool) { 660 if (!cnt && pool && nowtyp != POOL) { 661 pool = FALSE;
|
/src/sys/arch/mac68k/include/ |
iopreg.h | 138 struct pool pool; member in struct:_s_IOP
|
/src/sys/external/bsd/drm2/dist/drm/ttm/ |
ttm_page_alloc.c | 30 /* simple list based uncached page pool 31 * - Pool collects resently freed pages for reuse 64 * struct ttm_page_pool - Pool to reuse recently allocated uc/wc pages. 66 * @lock: Protects the shared pool from concurrnet access. Must be used with 67 * irqsave/irqrestore variants because pool allocator maybe called from 70 * @list: Pool of free uc/wc pages for fast reuse. 72 * @npages: Number of pages in pool. 87 * Limits for the pool. They are handled without locks because only place where 103 * Manager is read only object for pool code so it doesn't need locking. 105 * @free_interval: minimum number of jiffies between freeing pages from pool 388 struct ttm_page_pool *pool; local in function:ttm_pool_shrink_scan 424 struct ttm_page_pool *pool; local in function:ttm_pool_shrink_count 716 struct ttm_page_pool *pool = ttm_get_pool(flags, false, cstate); local in function:ttm_put_pages 834 struct ttm_page_pool *pool = ttm_get_pool(flags, false, cstate); local in function:ttm_get_pages [all...] |
/src/sys/external/bsd/gnu-efi/dist/lib/ |
print.c | 377 // Grow the pool buffer 514 Prints a formatted unicode string to allocated pool using va_list argument. 545 Prints a formatted unicode string to allocated pool. The caller 561 CHAR16 *pool; local in function:PoolPrint 563 pool = VPoolPrint(fmt, args); 565 return pool; 578 Concatenates a formatted unicode string to allocated pool. 583 Str - Tracks the allocated pool, size in use, and 584 amount of pool allocated.
|
/src/sys/kern/ |
kern_threadpool.c | 35 * A thread pool is a collection of worker threads idle or running 38 * a thread pool does not allocate or even sleep at all, except perhaps 43 * A per-CPU thread pool (threadpool_percpu) is a collection of thread 45 * use, there is one shared unbound thread pool (i.e., pool of threads 46 * not bound to any CPU) and one shared per-CPU thread pool. 48 * To use the unbound thread pool at priority pri, call 49 * threadpool_get(&pool, pri). When you're done, call 50 * threadpool_put(pool, pri). 54 * pool returned by threadpool_percpu_ref(pool_percpu) for the curren 645 struct threadpool **poolp, *pool; local in function:threadpool_percpu_ref 658 struct threadpool **poolp, *pool; local in function:threadpool_percpu_ref_remote 967 struct threadpool *const pool = dispatcher->tpt_pool; local in function:threadpool_dispatcher_thread 1105 struct threadpool *const pool = thread->tpt_pool; local in function:threadpool_thread [all...] |
/src/sys/arch/hpcsh/dev/hd64465/ |
hd64465pcmcia.c | 407 struct hd64465pcmcia_event *pe, *pool; local in function:__queue_event 416 pool = sc->sc_event_pool; 418 if (!pool[i].__queued) { 419 pe = &pool[i];
|
/src/sys/dist/pf/net/ |
pf_ioctl.c | 57 #include <sys/pool.h> 1256 struct pf_pool *pool = NULL; local in function:pfioctl 2438 pool = pf_get_pool(pp->anchor, pp->ticket, pp->r_action, 2440 if (pool == NULL) { 2444 TAILQ_FOREACH(pa, &pool->list, entries) 2453 pool = pf_get_pool(pp->anchor, pp->ticket, pp->r_action, 2455 if (pool == NULL) { 2459 pa = TAILQ_FIRST(&pool->list); 2497 pool = pf_get_pool(pca->anchor, pca->ticket, pca->r_action, 2499 if (pool == NULL) [all...] |
/src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dce100/ |
amdgpu_dce100_resource.c | 733 static void dce100_resource_destruct(struct dce110_resource_pool *pool) 737 for (i = 0; i < pool->base.pipe_count; i++) { 738 if (pool->base.opps[i] != NULL) 739 dce110_opp_destroy(&pool->base.opps[i]); 741 if (pool->base.transforms[i] != NULL) 742 dce100_transform_destroy(&pool->base.transforms[i]); 744 if (pool->base.ipps[i] != NULL) 745 dce_ipp_destroy(&pool->base.ipps[i]); 747 if (pool->base.mis[i] != NULL) { 748 kfree(TO_DCE_MEM_INPUT(pool->base.mis[i])) 1139 struct dce110_resource_pool *pool = local in function:dce100_create_resource_pool [all...] |
/src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dce112/ |
amdgpu_dce112_resource.c | 751 static void dce112_resource_destruct(struct dce110_resource_pool *pool) 755 for (i = 0; i < pool->base.pipe_count; i++) { 756 if (pool->base.opps[i] != NULL) 757 dce110_opp_destroy(&pool->base.opps[i]); 759 if (pool->base.transforms[i] != NULL) 760 dce112_transform_destroy(&pool->base.transforms[i]); 762 if (pool->base.ipps[i] != NULL) 763 dce_ipp_destroy(&pool->base.ipps[i]); 765 if (pool->base.mis[i] != NULL) { 766 kfree(TO_DCE_MEM_INPUT(pool->base.mis[i])) 1387 struct dce110_resource_pool *pool = local in function:dce112_create_resource_pool [all...] |
/src/sys/external/bsd/drm2/dist/drm/amd/display/dc/dce120/ |
amdgpu_dce120_resource.c | 595 static void dce120_resource_destruct(struct dce110_resource_pool *pool) 599 for (i = 0; i < pool->base.pipe_count; i++) { 600 if (pool->base.opps[i] != NULL) 601 dce110_opp_destroy(&pool->base.opps[i]); 603 if (pool->base.transforms[i] != NULL) 604 dce120_transform_destroy(&pool->base.transforms[i]); 606 if (pool->base.ipps[i] != NULL) 607 dce_ipp_destroy(&pool->base.ipps[i]); 609 if (pool->base.mis[i] != NULL) { 610 kfree(TO_DCE_MEM_INPUT(pool->base.mis[i])) 1254 struct dce110_resource_pool *pool = local in function:dce120_create_resource_pool [all...] |