| /src/sys/external/bsd/sljit/dist/test_src/ |
| sljitMain.c | 45 sljit_sw (SLJIT_CALL *func)(sljit_sw* a); 54 sljit_sw buf[4]; 66 sljit_emit_enter(compiler, 0, 1, 4, 5, 4, 0, 2 * sizeof(sljit_sw)); 75 printf("Function returned with %ld\n", (long)code.func((sljit_sw*)buf));
|
| sljitMain.c | 45 sljit_sw (SLJIT_CALL *func)(sljit_sw* a); 54 sljit_sw buf[4]; 66 sljit_emit_enter(compiler, 0, 1, 4, 5, 4, 0, 2 * sizeof(sljit_sw)); 75 printf("Function returned with %ld\n", (long)code.func((sljit_sw*)buf));
|
| sljitTest.c | 50 sljit_sw (SLJIT_CALL *func0)(void); 51 sljit_sw (SLJIT_CALL *func1)(sljit_sw a); 52 sljit_sw (SLJIT_CALL *func2)(sljit_sw a, sljit_sw b); 53 sljit_sw (SLJIT_CALL *func3)(sljit_sw a, sljit_sw b, sljit_sw c) [all...] |
| sljitTest.c | 50 sljit_sw (SLJIT_CALL *func0)(void); 51 sljit_sw (SLJIT_CALL *func1)(sljit_sw a); 52 sljit_sw (SLJIT_CALL *func2)(sljit_sw a, sljit_sw b); 53 sljit_sw (SLJIT_CALL *func3)(sljit_sw a, sljit_sw b, sljit_sw c) [all...] |
| /src/sys/external/bsd/sljit/dist/sljit_src/ |
| sljitLir.h | 277 /* Must be aligned to sljit_sw. */ 291 sljit_sw flags; 366 sljit_sw cache_argw; 370 sljit_sw imm; 372 sljit_sw cache_argw; 378 sljit_sw cache_argw; 384 sljit_sw cache_argw; 389 sljit_sw cache_argw; 448 sizeof(sljit_sw) aligned. Excellent for allocating small blocks during 484 static SLJIT_INLINE sljit_sw sljit_get_executable_offset(struct sljit_compiler *compiler) { return compiler->executable_offset; (…) [all...] |
| sljitLir.h | 277 /* Must be aligned to sljit_sw. */ 291 sljit_sw flags; 366 sljit_sw cache_argw; 370 sljit_sw imm; 372 sljit_sw cache_argw; 378 sljit_sw cache_argw; 384 sljit_sw cache_argw; 389 sljit_sw cache_argw; 448 sizeof(sljit_sw) aligned. Excellent for allocating small blocks during 484 static SLJIT_INLINE sljit_sw sljit_get_executable_offset(struct sljit_compiler *compiler) { return compiler->executable_offset; (…) [all...] |
| sljitNativeX86_32.c | 31 static sljit_s32 emit_do_imm(struct sljit_compiler *compiler, sljit_u8 opcode, sljit_sw imm) 35 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + sizeof(sljit_sw)); 37 INC_SIZE(1 + sizeof(sljit_sw)); 43 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type, sljit_sw executable_offset) 83 compiler->saveds_offset = 1 * sizeof(sljit_sw); 87 compiler->saveds_offset = 1 * sizeof(sljit_sw); 89 compiler->saveds_offset = ((scratches == 2) ? 2 : 3) * sizeof(sljit_sw); 93 compiler->saveds_offset += ((scratches > (3 + 6)) ? 6 : (scratches - 3)) * sizeof(sljit_sw); 98 compiler->locals_offset += (saveds - 3) * sizeof(sljit_sw); 140 *inst++ = sizeof(sljit_sw) * (3 + 2); /* saveds >= 3 as well. * [all...] |
| sljitNativeX86_32.c | 31 static sljit_s32 emit_do_imm(struct sljit_compiler *compiler, sljit_u8 opcode, sljit_sw imm) 35 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + sizeof(sljit_sw)); 37 INC_SIZE(1 + sizeof(sljit_sw)); 43 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type, sljit_sw executable_offset) 83 compiler->saveds_offset = 1 * sizeof(sljit_sw); 87 compiler->saveds_offset = 1 * sizeof(sljit_sw); 89 compiler->saveds_offset = ((scratches == 2) ? 2 : 3) * sizeof(sljit_sw); 93 compiler->saveds_offset += ((scratches > (3 + 6)) ? 6 : (scratches - 3)) * sizeof(sljit_sw); 98 compiler->locals_offset += (saveds - 3) * sizeof(sljit_sw); 140 *inst++ = sizeof(sljit_sw) * (3 + 2); /* saveds >= 3 as well. * [all...] |
| sljitConfigInternal.h | 39 sljit_sw, sljit_uw : signed and unsigned machine word, enough to store a pointer 60 SLJIT_WORD_SHIFT : the shift required to apply when accessing a sljit_sw/sljit_uw array by index 360 typedef long int sljit_sw; typedef in typeref:typename:long int 369 typedef int sljit_sw; typedef in typeref:typename:int 375 typedef __int64 sljit_sw; typedef in typeref:typename:__int64 378 typedef long int sljit_sw; typedef in typeref:typename:long int 544 SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr); 595 #define SLJIT_LOCALS_OFFSET_BASE (2 * sizeof(sljit_sw)) 602 #define SLJIT_LOCALS_OFFSET_BASE ((6 + 8) * sizeof(sljit_sw)) 605 #define SLJIT_LOCALS_OFFSET_BASE ((3 + 1) * sizeof(sljit_sw)) [all...] |
| sljitConfigInternal.h | 39 sljit_sw, sljit_uw : signed and unsigned machine word, enough to store a pointer 60 SLJIT_WORD_SHIFT : the shift required to apply when accessing a sljit_sw/sljit_uw array by index 360 typedef long int sljit_sw; typedef in typeref:typename:long int 369 typedef int sljit_sw; typedef in typeref:typename:int 375 typedef __int64 sljit_sw; typedef in typeref:typename:__int64 378 typedef long int sljit_sw; typedef in typeref:typename:long int 544 SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr); 595 #define SLJIT_LOCALS_OFFSET_BASE (2 * sizeof(sljit_sw)) 602 #define SLJIT_LOCALS_OFFSET_BASE ((6 + 8) * sizeof(sljit_sw)) 605 #define SLJIT_LOCALS_OFFSET_BASE ((3 + 1) * sizeof(sljit_sw)) [all...] |
| sljitLir.c | 247 extra) * sizeof(sljit_sw)) 368 && sizeof(sljit_p) <= sizeof(sljit_sw) 369 && (sizeof(sljit_sw) == 4 || sizeof(sljit_sw) == 8) 955 static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw) 981 static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw) 997 static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw) 1036 sljit_s32 dst, sljit_sw dstw, 1037 sljit_s32 src, sljit_sw srcw) 1115 sljit_s32 dst, sljit_sw dstw [all...] |
| sljitLir.c | 247 extra) * sizeof(sljit_sw)) 368 && sizeof(sljit_p) <= sizeof(sljit_sw) 369 && (sizeof(sljit_sw) == 4 || sizeof(sljit_sw) == 8) 955 static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw) 981 static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw) 997 static SLJIT_INLINE CHECK_RETURN_TYPE check_sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw) 1036 sljit_s32 dst, sljit_sw dstw, 1037 sljit_s32 src, sljit_sw srcw) 1115 sljit_s32 dst, sljit_sw dstw [all...] |
| sljitNativeSPARC_common.c | 204 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset) 206 sljit_sw diff; 244 diff = ((sljit_sw)target_addr - (sljit_sw)(inst - 1) - executable_offset) >> 2; 287 sljit_sw executable_offset; 364 addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2; 365 SLJIT_ASSERT((sljit_sw)addr <= 0x1fffffff && (sljit_sw)addr >= -0x20000000); 370 addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2; 371 SLJIT_ASSERT((sljit_sw)addr <= MAX_DISP && (sljit_sw)addr >= MIN_DISP) [all...] |
| sljitNativeSPARC_common.c | 204 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset) 206 sljit_sw diff; 244 diff = ((sljit_sw)target_addr - (sljit_sw)(inst - 1) - executable_offset) >> 2; 287 sljit_sw executable_offset; 364 addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2; 365 SLJIT_ASSERT((sljit_sw)addr <= 0x1fffffff && (sljit_sw)addr >= -0x20000000); 370 addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2; 371 SLJIT_ASSERT((sljit_sw)addr <= MAX_DISP && (sljit_sw)addr >= MIN_DISP) [all...] |
| sljitNativeX86_common.c | 78 w = compiler->saveds_offset - ((p) - SLJIT_R2) * (sljit_sw)sizeof(sljit_sw); \ 80 w = compiler->locals_offset + ((p) - SLJIT_S2) * (sljit_sw)sizeof(sljit_sw); \ 301 static SLJIT_INLINE void sljit_unaligned_store_sw(void *addr, sljit_sw value) 418 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type, sljit_sw executable_offset); 423 static sljit_u8* generate_near_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_s32 type, sljit_sw executable_offset) 433 short_jump = (sljit_sw)(label_addr - (jump->addr + 2)) >= -128 && (sljit_sw)(label_addr - (jump->addr + 2)) <= 127; 436 if ((sljit_sw)(label_addr - (jump->addr + 1)) > HALFWORD_MAX || (sljit_sw)(label_addr - (jump->addr + 1)) < HALF (…) [all...] |
| sljitNativeX86_common.c | 78 w = compiler->saveds_offset - ((p) - SLJIT_R2) * (sljit_sw)sizeof(sljit_sw); \ 80 w = compiler->locals_offset + ((p) - SLJIT_S2) * (sljit_sw)sizeof(sljit_sw); \ 301 static SLJIT_INLINE void sljit_unaligned_store_sw(void *addr, sljit_sw value) 418 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type, sljit_sw executable_offset); 423 static sljit_u8* generate_near_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_s32 type, sljit_sw executable_offset) 433 short_jump = (sljit_sw)(label_addr - (jump->addr + 2)) >= -128 && (sljit_sw)(label_addr - (jump->addr + 2)) <= 127; 436 if ((sljit_sw)(label_addr - (jump->addr + 1)) > HALFWORD_MAX || (sljit_sw)(label_addr - (jump->addr + 1)) < HALF (…) [all...] |
| sljitNativeARM_64.c | 156 static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset) 158 sljit_sw diff; 173 diff = (sljit_sw)target_addr - (sljit_sw)(code_ptr + 4) - executable_offset; 217 sljit_sw executable_offset; 280 SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size); 289 addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2; 290 SLJIT_ASSERT((sljit_sw)addr <= 0x1ffffff && (sljit_sw)addr >= -0x2000000); 297 addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2 [all...] |
| sljitNativeARM_64.c | 156 static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset) 158 sljit_sw diff; 173 diff = (sljit_sw)target_addr - (sljit_sw)(code_ptr + 4) - executable_offset; 217 sljit_sw executable_offset; 280 SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size); 289 addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2; 290 SLJIT_ASSERT((sljit_sw)addr <= 0x1ffffff && (sljit_sw)addr >= -0x2000000); 297 addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2 [all...] |
| sljitNativeARM_32.c | 385 static SLJIT_INLINE sljit_s32 emit_imm(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm) 393 static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_uw *code_ptr, sljit_uw *code, sljit_sw executable_offset) 395 sljit_sw diff; 405 diff = ((sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2) - executable_offset); 408 diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2)); 430 diff = ((sljit_sw)jump->u.target - (sljit_sw)code_ptr - executable_offset); 433 diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)code_ptr) [all...] |
| sljitNativeARM_32.c | 385 static SLJIT_INLINE sljit_s32 emit_imm(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm) 393 static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_uw *code_ptr, sljit_uw *code, sljit_sw executable_offset) 395 sljit_sw diff; 405 diff = ((sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2) - executable_offset); 408 diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2)); 430 diff = ((sljit_sw)jump->u.target - (sljit_sw)code_ptr - executable_offset); 433 diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)code_ptr) [all...] |
| sljitNativeMIPS_common.c | 218 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset) 220 sljit_sw diff; 251 diff = ((sljit_sw)target_addr - (sljit_sw)inst - executable_offset) >> 2; 269 diff = ((sljit_sw)target_addr - (sljit_sw)(inst + 1) - executable_offset) >> 2; 365 sljit_sw executable_offset; 432 SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size); 441 addr = (sljit_sw)(addr - ((sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset) + sizeof(sljit_ins))) >> 2; 442 SLJIT_ASSERT((sljit_sw)addr <= SIMM_MAX && (sljit_sw)addr >= SIMM_MIN) [all...] |
| sljitNativeMIPS_common.c | 218 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset) 220 sljit_sw diff; 251 diff = ((sljit_sw)target_addr - (sljit_sw)inst - executable_offset) >> 2; 269 diff = ((sljit_sw)target_addr - (sljit_sw)(inst + 1) - executable_offset) >> 2; 365 sljit_sw executable_offset; 432 SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size); 441 addr = (sljit_sw)(addr - ((sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset) + sizeof(sljit_ins))) >> 2; 442 SLJIT_ASSERT((sljit_sw)addr <= SIMM_MAX && (sljit_sw)addr >= SIMM_MIN) [all...] |
| sljitNativeX86_64.c | 31 static sljit_s32 emit_load_imm64(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm) 35 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + sizeof(sljit_sw)); 37 INC_SIZE(2 + sizeof(sljit_sw)); 62 code_ptr += sizeof(sljit_sw); 84 compiler->locals_offset = 6 * sizeof(sljit_sw); 86 compiler->locals_offset = ((scratches > 2) ? 4 : 2) * sizeof(sljit_sw); 171 *inst++ = 5 * sizeof(sljit_sw); 172 local_size -= 5 * sizeof(sljit_sw); 174 *inst++ = 4 * sizeof(sljit_sw); 175 local_size -= 4 * sizeof(sljit_sw); [all...] |
| /src/sys/external/bsd/sljit/dist/regex_src/ |
| regexJIT.c | 58 sljit_sw no_states; 60 sljit_sw size; 64 sljit_sw (SLJIT_CALL *call_init)(void *next, void* match); 79 sljit_sw *current; 80 sljit_sw *next; 82 sljit_sw head; 84 sljit_sw index; 86 sljit_sw best_begin; 87 sljit_sw best_end; 88 sljit_sw best_id [all...] |
| regexJIT.c | 58 sljit_sw no_states; 60 sljit_sw size; 64 sljit_sw (SLJIT_CALL *call_init)(void *next, void* match); 79 sljit_sw *current; 80 sljit_sw *next; 82 sljit_sw head; 84 sljit_sw index; 86 sljit_sw best_begin; 87 sljit_sw best_end; 88 sljit_sw best_id [all...] |