/src/sys/external/bsd/sljit/dist/sljit_src/ |
sljitExecAllocator.c | 166 static sljit_uw allocated_size; variable in typeref:typename:sljit_uw 223 allocated_size += size; 244 allocated_size += size; 255 allocated_size += chunk_size; 272 allocated_size -= header->size; 299 /* If this block is freed, we still have (allocated_size / 2) free space. */ 300 if (total_size - free_block->size > (allocated_size * 3 / 2)) {
|
sljitExecAllocator.c | 166 static sljit_uw allocated_size; variable in typeref:typename:sljit_uw 223 allocated_size += size; 244 allocated_size += size; 255 allocated_size += chunk_size; 272 allocated_size -= header->size; 299 /* If this block is freed, we still have (allocated_size / 2) free space. */ 300 if (total_size - free_block->size > (allocated_size * 3 / 2)) {
|
sljitProtExecAllocator.c | 238 static sljit_uw allocated_size; variable in typeref:typename:sljit_uw 298 allocated_size += size; 326 allocated_size += size; 338 allocated_size += chunk_size; 357 allocated_size -= header->size; 384 /* If this block is freed, we still have (allocated_size / 2) free space. */ 385 if (total_size - free_block->size > (allocated_size * 3 / 2)) {
|
sljitProtExecAllocator.c | 238 static sljit_uw allocated_size; variable in typeref:typename:sljit_uw 298 allocated_size += size; 326 allocated_size += size; 338 allocated_size += chunk_size; 357 allocated_size -= header->size; 384 /* If this block is freed, we still have (allocated_size / 2) free space. */ 385 if (total_size - free_block->size > (allocated_size * 3 / 2)) {
|
/src/sys/external/bsd/drm2/dist/drm/amd/display/dc/inc/ |
compressor.h | 110 uint32_t allocated_size; member in struct:compressor
|
compressor.h | 110 uint32_t allocated_size; member in struct:compressor
|
/src/sys/external/bsd/compiler_rt/dist/lib/interception/ |
interception_win.cc | 309 uptr allocated_size; member in struct:__interception::TrampolineMemoryRegion 372 current->allocated_size = 0; 376 } else if (current->max_size - current->allocated_size > size) { 379 uptr next_address = current->content + current->allocated_size; 395 uptr allocated_space = region->content + region->allocated_size; 396 region->allocated_size += size;
|
interception_win.cc | 309 uptr allocated_size; member in struct:__interception::TrampolineMemoryRegion 372 current->allocated_size = 0; 376 } else if (current->max_size - current->allocated_size > size) { 379 uptr next_address = current->content + current->allocated_size; 395 uptr allocated_space = region->content + region->allocated_size; 396 region->allocated_size += size;
|
/src/sys/external/bsd/compiler_rt/dist/lib/asan/ |
asan_allocator.cc | 298 uptr allocated_size = allocator.GetActuallyAllocatedSize((void *)ac); local in function:__asan::Allocator::RePoisonChunk 301 uptr chunk_end = chunk + allocated_size; 313 PoisonShadow(chunk, allocated_size, kAsanHeapLeftRedzoneMagic); 471 uptr allocated_size = allocator.GetActuallyAllocatedSize(allocated); local in function:__asan::Allocator::Allocate 472 PoisonShadow((uptr)allocated, allocated_size, kAsanHeapLeftRedzoneMagic); 1084 uptr allocated_size = instance.AllocationSize(ptr); local in function:__sanitizer_get_allocated_size 1086 if (allocated_size == 0) { 1090 return allocated_size;
|
asan_allocator.cc | 298 uptr allocated_size = allocator.GetActuallyAllocatedSize((void *)ac); local in function:__asan::Allocator::RePoisonChunk 301 uptr chunk_end = chunk + allocated_size; 313 PoisonShadow(chunk, allocated_size, kAsanHeapLeftRedzoneMagic); 471 uptr allocated_size = allocator.GetActuallyAllocatedSize(allocated); local in function:__asan::Allocator::Allocate 472 PoisonShadow((uptr)allocated, allocated_size, kAsanHeapLeftRedzoneMagic); 1084 uptr allocated_size = instance.AllocationSize(ptr); local in function:__sanitizer_get_allocated_size 1086 if (allocated_size == 0) { 1090 return allocated_size;
|