Lines Matching refs:inst
33 sljit_u8 *inst;
35 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + sizeof(sljit_sw));
36 FAIL_IF(!inst);
38 *inst++ = REX_W | ((reg_map[reg] <= 7) ? 0 : REX_B);
39 *inst++ = MOV_r_i32 + (reg_map[reg] & 0x7);
40 sljit_unaligned_store_sw(inst, imm);
75 sljit_u8 *inst;
95 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
96 FAIL_IF(!inst);
99 *inst++ = REX_B;
105 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
106 FAIL_IF(!inst);
109 *inst++ = REX_B;
115 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
116 FAIL_IF(!inst);
122 *inst++ = REX_W;
123 *inst++ = MOV_r_rm;
124 *inst++ = MOD_REG | (reg_map[SLJIT_S0] << 3) | 0x7 /* rdi */;
127 *inst++ = REX_W | REX_R;
128 *inst++ = MOV_r_rm;
129 *inst++ = MOD_REG | (reg_lmap[SLJIT_S1] << 3) | 0x6 /* rsi */;
132 *inst++ = REX_W | REX_R;
133 *inst++ = MOV_r_rm;
134 *inst++ = MOD_REG | (reg_lmap[SLJIT_S2] << 3) | 0x2 /* rdx */;
138 *inst++ = REX_W;
139 *inst++ = MOV_r_rm;
140 *inst++ = MOD_REG | (reg_map[SLJIT_S0] << 3) | 0x1 /* rcx */;
143 *inst++ = REX_W;
144 *inst++ = MOV_r_rm;
145 *inst++ = MOD_REG | (reg_map[SLJIT_S1] << 3) | 0x2 /* rdx */;
148 *inst++ = REX_W | REX_B;
149 *inst++ = MOV_r_rm;
150 *inst++ = MOD_REG | (reg_map[SLJIT_S2] << 3) | 0x0 /* r8 */;
161 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + (3 + sizeof(sljit_s32)));
162 FAIL_IF(!inst);
164 *inst++ = REX_W;
165 *inst++ = GROUP_BINARY_83;
166 *inst++ = MOD_REG | SUB | reg_map[SLJIT_SP];
171 *inst++ = 5 * sizeof(sljit_sw);
174 *inst++ = 4 * sizeof(sljit_sw);
179 *inst++ = REX_W;
180 *inst++ = MOV_rm_i32;
181 *inst++ = MOD_REG | reg_lmap[SLJIT_R0];
182 sljit_unaligned_store_s32(inst, local_size);
193 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
194 FAIL_IF(!inst);
196 *inst++ = REX_W;
197 *inst++ = GROUP_BINARY_83;
198 *inst++ = MOD_REG | SUB | reg_map[SLJIT_SP];
199 *inst++ = local_size;
202 inst = (sljit_u8*)ensure_buf(compiler, 1 + 7);
203 FAIL_IF(!inst);
205 *inst++ = REX_W;
206 *inst++ = GROUP_BINARY_81;
207 *inst++ = MOD_REG | SUB | reg_map[SLJIT_SP];
208 sljit_unaligned_store_s32(inst, local_size);
209 inst += sizeof(sljit_s32);
216 inst = (sljit_u8*)ensure_buf(compiler, 1 + 5);
217 FAIL_IF(!inst);
219 *inst++ = GROUP_0F;
220 sljit_unaligned_store_s32(inst, 0x20247429);
254 sljit_u8 *inst;
264 inst = (sljit_u8*)ensure_buf(compiler, 1 + 5);
265 FAIL_IF(!inst);
267 *inst++ = GROUP_0F;
268 sljit_unaligned_store_s32(inst, 0x20247428);
274 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
275 FAIL_IF(!inst);
277 *inst++ = REX_W;
278 *inst++ = GROUP_BINARY_83;
279 *inst++ = MOD_REG | ADD | 4;
280 *inst = compiler->local_size;
283 inst = (sljit_u8*)ensure_buf(compiler, 1 + 7);
284 FAIL_IF(!inst);
286 *inst++ = REX_W;
287 *inst++ = GROUP_BINARY_81;
288 *inst++ = MOD_REG | ADD | 4;
289 sljit_unaligned_store_s32(inst, compiler->local_size);
296 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
297 FAIL_IF(!inst);
300 *inst++ = REX_B;
307 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
308 FAIL_IF(!inst);
311 *inst++ = REX_B;
315 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
316 FAIL_IF(!inst);
328 sljit_u8 *inst;
331 inst = (sljit_u8*)ensure_buf(compiler, 1 + length);
332 FAIL_IF(!inst);
335 *inst++ = rex;
336 *inst++ = opcode;
337 sljit_unaligned_store_s32(inst, imm);
347 sljit_u8 *inst;
453 inst = (sljit_u8*)ensure_buf(compiler, 1 + inst_size);
454 PTR_FAIL_IF(!inst);
459 *inst++ = 0xf2;
461 *inst++ = 0xf3;
463 *inst++ = 0x66;
465 *inst++ = rex;
466 buf_ptr = inst + size;
471 *inst = (flags & EX86_BYTE_ARG) ? GROUP_BINARY_83 : GROUP_BINARY_81;
483 *inst = GROUP_SHIFT_1;
485 *inst = GROUP_SHIFT_N;
487 *inst = GROUP_SHIFT_CL;
543 return !(flags & EX86_SHIFT_INS) ? instinst + 1);
552 sljit_u8 *inst;
557 inst = (sljit_u8*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
558 FAIL_IF(!inst);
561 *inst++ = REX_W;
562 *inst++ = MOV_r_rm;
563 *inst++ = MOD_REG | (0x2 /* rdx */ << 3) | reg_lmap[SLJIT_R2];
565 *inst++ = REX_W;
566 *inst++ = MOV_r_rm;
567 *inst++ = MOD_REG | (0x7 /* rdi */ << 3) | reg_lmap[SLJIT_R0];
571 inst = (sljit_u8*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
572 FAIL_IF(!inst);
575 *inst++ = REX_W | REX_R;
576 *inst++ = MOV_r_rm;
577 *inst++ = MOD_REG | (0x0 /* r8 */ << 3) | reg_lmap[SLJIT_R2];
579 *inst++ = REX_W;
580 *inst++ = MOV_r_rm;
581 *inst++ = MOD_REG | (0x1 /* rcx */ << 3) | reg_lmap[SLJIT_R0];
588 sljit_u8 *inst;
600 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
601 FAIL_IF(!inst);
607 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
608 FAIL_IF(!inst);
610 *inst++ = REX_B;
617 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
618 FAIL_IF(!inst);
619 *inst++ = POP_rm;
625 sljit_u8 *inst;
638 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 1);
639 FAIL_IF(!inst);
645 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + 1);
646 FAIL_IF(!inst);
649 *inst++ = REX_B;
656 inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
657 FAIL_IF(!inst);
658 *inst++ = GROUP_FF;
659 *inst |= PUSH_rm;
661 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
662 FAIL_IF(!inst);
668 inst = (sljit_u8*)ensure_buf(compiler, 1 + 5 + 1);
669 FAIL_IF(!inst);
672 *inst++ = PUSH_i32;
673 sljit_unaligned_store_s32(inst, srcw);
674 inst += sizeof(sljit_s32);
690 sljit_u8* inst;
701 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
702 FAIL_IF(!inst);
703 *inst = MOV_rm_i32;
709 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, (sljit_sw)(sljit_s32)srcw, dst, dstw);
710 FAIL_IF(!inst);
711 *inst = MOV_rm_i32;
722 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src, srcw);
723 FAIL_IF(!inst);
724 *inst++ = MOVSXD_r_rm;
734 inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw);
735 FAIL_IF(!inst);
736 *inst = MOV_rm_r;