sljitNativeARM_T2_32.c revision 1.1
1/*
2 *    Stack-less Just-In-Time compiler
3 *
4 *    Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
8 *
9 *   1. Redistributions of source code must retain the above copyright notice, this list of
10 *      conditions and the following disclaimer.
11 *
12 *   2. Redistributions in binary form must reproduce the above copyright notice, this list
13 *      of conditions and the following disclaimer in the documentation and/or other materials
14 *      provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void)
28{
29	return "ARM-Thumb2" SLJIT_CPUINFO;
30}
31
32/* Length of an instruction word. */
33typedef sljit_ui sljit_ins;
34
35/* Last register + 1. */
36#define TMP_REG1	(SLJIT_NO_REGISTERS + 1)
37#define TMP_REG2	(SLJIT_NO_REGISTERS + 2)
38#define TMP_REG3	(SLJIT_NO_REGISTERS + 3)
39#define TMP_PC		(SLJIT_NO_REGISTERS + 4)
40
41#define TMP_FREG1	(0)
42#define TMP_FREG2	(SLJIT_FLOAT_REG6 + 1)
43
44/* See sljit_emit_enter and sljit_emit_op0 if you want to change them. */
45static SLJIT_CONST sljit_ub reg_map[SLJIT_NO_REGISTERS + 5] = {
46	0, 0, 1, 2, 12, 5, 6, 7, 8, 10, 11, 13, 3, 4, 14, 15
47};
48
49#define COPY_BITS(src, from, to, bits) \
50	((from >= to ? (src >> (from - to)) : (src << (to - from))) & (((1 << bits) - 1) << to))
51
52/* Thumb16 encodings. */
53#define RD3(rd) (reg_map[rd])
54#define RN3(rn) (reg_map[rn] << 3)
55#define RM3(rm) (reg_map[rm] << 6)
56#define RDN3(rdn) (reg_map[rdn] << 8)
57#define IMM3(imm) (imm << 6)
58#define IMM8(imm) (imm)
59
60/* Thumb16 helpers. */
61#define SET_REGS44(rd, rn) \
62	((reg_map[rn] << 3) | (reg_map[rd] & 0x7) | ((reg_map[rd] & 0x8) << 4))
63#define IS_2_LO_REGS(reg1, reg2) \
64	(reg_map[reg1] <= 7 && reg_map[reg2] <= 7)
65#define IS_3_LO_REGS(reg1, reg2, reg3) \
66	(reg_map[reg1] <= 7 && reg_map[reg2] <= 7 && reg_map[reg3] <= 7)
67
68/* Thumb32 encodings. */
69#define RD4(rd) (reg_map[rd] << 8)
70#define RN4(rn) (reg_map[rn] << 16)
71#define RM4(rm) (reg_map[rm])
72#define RT4(rt) (reg_map[rt] << 12)
73#define DD4(dd) ((dd) << 12)
74#define DN4(dn) ((dn) << 16)
75#define DM4(dm) (dm)
76#define IMM5(imm) \
77	(COPY_BITS(imm, 2, 12, 3) | ((imm & 0x3) << 6))
78#define IMM12(imm) \
79	(COPY_BITS(imm, 11, 26, 1) | COPY_BITS(imm, 8, 12, 3) | (imm & 0xff))
80
81/* --------------------------------------------------------------------- */
82/*  Instrucion forms                                                     */
83/* --------------------------------------------------------------------- */
84
85/* dot '.' changed to _
86   I immediate form (possibly followed by number of immediate bits). */
87#define ADCI		0xf1400000
88#define ADCS		0x4140
89#define ADC_W		0xeb400000
90#define ADD		0x4400
91#define ADDS		0x1800
92#define ADDSI3		0x1c00
93#define ADDSI8		0x3000
94#define ADD_W		0xeb000000
95#define ADDWI		0xf2000000
96#define ADD_SP		0xb000
97#define ADD_W		0xeb000000
98#define ADD_WI		0xf1000000
99#define ANDI		0xf0000000
100#define ANDS		0x4000
101#define AND_W		0xea000000
102#define ASRS		0x4100
103#define ASRSI		0x1000
104#define ASR_W		0xfa40f000
105#define ASR_WI		0xea4f0020
106#define BICI		0xf0200000
107#define BKPT		0xbe00
108#define BLX		0x4780
109#define BX		0x4700
110#define CLZ		0xfab0f080
111#define CMPI		0x2800
112#define CMP_W		0xebb00f00
113#define EORI		0xf0800000
114#define EORS		0x4040
115#define EOR_W		0xea800000
116#define IT		0xbf00
117#define LSLS		0x4080
118#define LSLSI		0x0000
119#define LSL_W		0xfa00f000
120#define LSL_WI		0xea4f0000
121#define LSRS		0x40c0
122#define LSRSI		0x0800
123#define LSR_W		0xfa20f000
124#define LSR_WI		0xea4f0010
125#define MOV		0x4600
126#define MOVS		0x0000
127#define MOVSI		0x2000
128#define MOVT		0xf2c00000
129#define MOVW		0xf2400000
130#define MOV_W		0xea4f0000
131#define MOV_WI		0xf04f0000
132#define MUL		0xfb00f000
133#define MVNS		0x43c0
134#define MVN_W		0xea6f0000
135#define MVN_WI		0xf06f0000
136#define NOP		0xbf00
137#define ORNI		0xf0600000
138#define ORRI		0xf0400000
139#define ORRS		0x4300
140#define ORR_W		0xea400000
141#define POP		0xbd00
142#define POP_W		0xe8bd0000
143#define PUSH		0xb500
144#define PUSH_W		0xe92d0000
145#define RSB_WI		0xf1c00000
146#define RSBSI		0x4240
147#define SBCI		0xf1600000
148#define SBCS		0x4180
149#define SBC_W		0xeb600000
150#define SMULL		0xfb800000
151#define STR_SP		0x9000
152#define SUBS		0x1a00
153#define SUBSI3		0x1e00
154#define SUBSI8		0x3800
155#define SUB_W		0xeba00000
156#define SUBWI		0xf2a00000
157#define SUB_SP		0xb080
158#define SUB_WI		0xf1a00000
159#define SXTB		0xb240
160#define SXTB_W		0xfa4ff080
161#define SXTH		0xb200
162#define SXTH_W		0xfa0ff080
163#define TST		0x4200
164#define UMULL		0xfba00000
165#define UXTB		0xb2c0
166#define UXTB_W		0xfa5ff080
167#define UXTH		0xb280
168#define UXTH_W		0xfa1ff080
169#define VABS_F32	0xeeb00ac0
170#define VADD_F32	0xee300a00
171#define VCMP_F32	0xeeb40a40
172#define VDIV_F32	0xee800a00
173#define VMOV_F32	0xeeb00a40
174#define VMRS		0xeef1fa10
175#define VMUL_F32	0xee200a00
176#define VNEG_F32	0xeeb10a40
177#define VSTR_F32	0xed000a00
178#define VSUB_F32	0xee300a40
179
180static sljit_si push_inst16(struct sljit_compiler *compiler, sljit_ins inst)
181{
182	sljit_uh *ptr;
183	SLJIT_ASSERT(!(inst & 0xffff0000));
184
185	ptr = (sljit_uh*)ensure_buf(compiler, sizeof(sljit_uh));
186	FAIL_IF(!ptr);
187	*ptr = inst;
188	compiler->size++;
189	return SLJIT_SUCCESS;
190}
191
192static sljit_si push_inst32(struct sljit_compiler *compiler, sljit_ins inst)
193{
194	sljit_uh *ptr = (sljit_uh*)ensure_buf(compiler, sizeof(sljit_ins));
195	FAIL_IF(!ptr);
196	*ptr++ = inst >> 16;
197	*ptr = inst;
198	compiler->size += 2;
199	return SLJIT_SUCCESS;
200}
201
202static SLJIT_INLINE sljit_si emit_imm32_const(struct sljit_compiler *compiler, sljit_si dst, sljit_uw imm)
203{
204	FAIL_IF(push_inst32(compiler, MOVW | RD4(dst) |
205		COPY_BITS(imm, 12, 16, 4) | COPY_BITS(imm, 11, 26, 1) | COPY_BITS(imm, 8, 12, 3) | (imm & 0xff)));
206	return push_inst32(compiler, MOVT | RD4(dst) |
207		COPY_BITS(imm, 12 + 16, 16, 4) | COPY_BITS(imm, 11 + 16, 26, 1) | COPY_BITS(imm, 8 + 16, 12, 3) | ((imm & 0xff0000) >> 16));
208}
209
210static SLJIT_INLINE void modify_imm32_const(sljit_uh *inst, sljit_uw new_imm)
211{
212	sljit_si dst = inst[1] & 0x0f00;
213	SLJIT_ASSERT(((inst[0] & 0xfbf0) == (MOVW >> 16)) && ((inst[2] & 0xfbf0) == (MOVT >> 16)) && dst == (inst[3] & 0x0f00));
214	inst[0] = (MOVW >> 16) | COPY_BITS(new_imm, 12, 0, 4) | COPY_BITS(new_imm, 11, 10, 1);
215	inst[1] = dst | COPY_BITS(new_imm, 8, 12, 3) | (new_imm & 0xff);
216	inst[2] = (MOVT >> 16) | COPY_BITS(new_imm, 12 + 16, 0, 4) | COPY_BITS(new_imm, 11 + 16, 10, 1);
217	inst[3] = dst | COPY_BITS(new_imm, 8 + 16, 12, 3) | ((new_imm & 0xff0000) >> 16);
218}
219
220static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_uh *code_ptr, sljit_uh *code)
221{
222	sljit_sw diff;
223
224	if (jump->flags & SLJIT_REWRITABLE_JUMP)
225		return 0;
226
227	if (jump->flags & JUMP_ADDR) {
228		/* Branch to ARM code is not optimized yet. */
229		if (!(jump->u.target & 0x1))
230			return 0;
231		diff = ((sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2)) >> 1;
232	}
233	else {
234		SLJIT_ASSERT(jump->flags & JUMP_LABEL);
235		diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2)) >> 1;
236	}
237
238	if (jump->flags & IS_COND) {
239		SLJIT_ASSERT(!(jump->flags & IS_BL));
240		if (diff <= 127 && diff >= -128) {
241			jump->flags |= PATCH_TYPE1;
242			return 5;
243		}
244		if (diff <= 524287 && diff >= -524288) {
245			jump->flags |= PATCH_TYPE2;
246			return 4;
247		}
248		/* +1 comes from the prefix IT instruction. */
249		diff--;
250		if (diff <= 8388607 && diff >= -8388608) {
251			jump->flags |= PATCH_TYPE3;
252			return 3;
253		}
254	}
255	else if (jump->flags & IS_BL) {
256		if (diff <= 8388607 && diff >= -8388608) {
257			jump->flags |= PATCH_BL;
258			return 3;
259		}
260	}
261	else {
262		if (diff <= 1023 && diff >= -1024) {
263			jump->flags |= PATCH_TYPE4;
264			return 4;
265		}
266		if (diff <= 8388607 && diff >= -8388608) {
267			jump->flags |= PATCH_TYPE5;
268			return 3;
269		}
270	}
271
272	return 0;
273}
274
275static SLJIT_INLINE void set_jump_instruction(struct sljit_jump *jump)
276{
277	sljit_si type = (jump->flags >> 4) & 0xf;
278	sljit_sw diff;
279	sljit_uh *jump_inst;
280	sljit_si s, j1, j2;
281
282	if (SLJIT_UNLIKELY(type == 0)) {
283		modify_imm32_const((sljit_uh*)jump->addr, (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target);
284		return;
285	}
286
287	if (jump->flags & JUMP_ADDR) {
288		SLJIT_ASSERT(jump->u.target & 0x1);
289		diff = ((sljit_sw)jump->u.target - (sljit_sw)(jump->addr + 4)) >> 1;
290	}
291	else
292		diff = ((sljit_sw)(jump->u.label->addr) - (sljit_sw)(jump->addr + 4)) >> 1;
293	jump_inst = (sljit_uh*)jump->addr;
294
295	switch (type) {
296	case 1:
297		/* Encoding T1 of 'B' instruction */
298		SLJIT_ASSERT(diff <= 127 && diff >= -128 && (jump->flags & IS_COND));
299		jump_inst[0] = 0xd000 | (jump->flags & 0xf00) | (diff & 0xff);
300		return;
301	case 2:
302		/* Encoding T3 of 'B' instruction */
303		SLJIT_ASSERT(diff <= 524287 && diff >= -524288 && (jump->flags & IS_COND));
304		jump_inst[0] = 0xf000 | COPY_BITS(jump->flags, 8, 6, 4) | COPY_BITS(diff, 11, 0, 6) | COPY_BITS(diff, 19, 10, 1);
305		jump_inst[1] = 0x8000 | COPY_BITS(diff, 17, 13, 1) | COPY_BITS(diff, 18, 11, 1) | (diff & 0x7ff);
306		return;
307	case 3:
308		SLJIT_ASSERT(jump->flags & IS_COND);
309		*jump_inst++ = IT | ((jump->flags >> 4) & 0xf0) | 0x8;
310		diff--;
311		type = 5;
312		break;
313	case 4:
314		/* Encoding T2 of 'B' instruction */
315		SLJIT_ASSERT(diff <= 1023 && diff >= -1024 && !(jump->flags & IS_COND));
316		jump_inst[0] = 0xe000 | (diff & 0x7ff);
317		return;
318	}
319
320	SLJIT_ASSERT(diff <= 8388607 && diff >= -8388608);
321
322	/* Really complex instruction form for branches. */
323	s = (diff >> 23) & 0x1;
324	j1 = (~(diff >> 21) ^ s) & 0x1;
325	j2 = (~(diff >> 22) ^ s) & 0x1;
326	jump_inst[0] = 0xf000 | (s << 10) | COPY_BITS(diff, 11, 0, 10);
327	jump_inst[1] = (j1 << 13) | (j2 << 11) | (diff & 0x7ff);
328
329	/* The others have a common form. */
330	if (type == 5) /* Encoding T4 of 'B' instruction */
331		jump_inst[1] |= 0x9000;
332	else if (type == 6) /* Encoding T1 of 'BL' instruction */
333		jump_inst[1] |= 0xd000;
334	else
335		SLJIT_ASSERT_STOP();
336}
337
338SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
339{
340	struct sljit_memory_fragment *buf;
341	sljit_uh *code;
342	sljit_uh *code_ptr;
343	sljit_uh *buf_ptr;
344	sljit_uh *buf_end;
345	sljit_uw half_count;
346
347	struct sljit_label *label;
348	struct sljit_jump *jump;
349	struct sljit_const *const_;
350
351	CHECK_ERROR_PTR();
352	check_sljit_generate_code(compiler);
353	reverse_buf(compiler);
354
355	code = (sljit_uh*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_uh));
356	PTR_FAIL_WITH_EXEC_IF(code);
357	buf = compiler->buf;
358
359	code_ptr = code;
360	half_count = 0;
361	label = compiler->labels;
362	jump = compiler->jumps;
363	const_ = compiler->consts;
364
365	do {
366		buf_ptr = (sljit_uh*)buf->memory;
367		buf_end = buf_ptr + (buf->used_size >> 1);
368		do {
369			*code_ptr = *buf_ptr++;
370			/* These structures are ordered by their address. */
371			SLJIT_ASSERT(!label || label->size >= half_count);
372			SLJIT_ASSERT(!jump || jump->addr >= half_count);
373			SLJIT_ASSERT(!const_ || const_->addr >= half_count);
374			if (label && label->size == half_count) {
375				label->addr = ((sljit_uw)code_ptr) | 0x1;
376				label->size = code_ptr - code;
377				label = label->next;
378			}
379			if (jump && jump->addr == half_count) {
380					jump->addr = (sljit_uw)code_ptr - ((jump->flags & IS_COND) ? 10 : 8);
381					code_ptr -= detect_jump_type(jump, code_ptr, code);
382					jump = jump->next;
383			}
384			if (const_ && const_->addr == half_count) {
385				const_->addr = (sljit_uw)code_ptr;
386				const_ = const_->next;
387			}
388			code_ptr ++;
389			half_count ++;
390		} while (buf_ptr < buf_end);
391
392		buf = buf->next;
393	} while (buf);
394
395	if (label && label->size == half_count) {
396		label->addr = ((sljit_uw)code_ptr) | 0x1;
397		label->size = code_ptr - code;
398		label = label->next;
399	}
400
401	SLJIT_ASSERT(!label);
402	SLJIT_ASSERT(!jump);
403	SLJIT_ASSERT(!const_);
404	SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
405
406	jump = compiler->jumps;
407	while (jump) {
408		set_jump_instruction(jump);
409		jump = jump->next;
410	}
411
412	compiler->error = SLJIT_ERR_COMPILED;
413	compiler->executable_size = (code_ptr - code) * sizeof(sljit_uh);
414	SLJIT_CACHE_FLUSH(code, code_ptr);
415	/* Set thumb mode flag. */
416	return (void*)((sljit_uw)code | 0x1);
417}
418
419/* --------------------------------------------------------------------- */
420/*  Core code generator functions.                                       */
421/* --------------------------------------------------------------------- */
422
423#define INVALID_IMM	0x80000000
424static sljit_uw get_imm(sljit_uw imm)
425{
426	/* Thumb immediate form. */
427	sljit_si counter;
428
429	if (imm <= 0xff)
430		return imm;
431
432	if ((imm & 0xffff) == (imm >> 16)) {
433		/* Some special cases. */
434		if (!(imm & 0xff00))
435			return (1 << 12) | (imm & 0xff);
436		if (!(imm & 0xff))
437			return (2 << 12) | ((imm >> 8) & 0xff);
438		if ((imm & 0xff00) == ((imm & 0xff) << 8))
439			return (3 << 12) | (imm & 0xff);
440	}
441
442	/* Assembly optimization: count leading zeroes? */
443	counter = 8;
444	if (!(imm & 0xffff0000)) {
445		counter += 16;
446		imm <<= 16;
447	}
448	if (!(imm & 0xff000000)) {
449		counter += 8;
450		imm <<= 8;
451	}
452	if (!(imm & 0xf0000000)) {
453		counter += 4;
454		imm <<= 4;
455	}
456	if (!(imm & 0xc0000000)) {
457		counter += 2;
458		imm <<= 2;
459	}
460	if (!(imm & 0x80000000)) {
461		counter += 1;
462		imm <<= 1;
463	}
464	/* Since imm >= 128, this must be true. */
465	SLJIT_ASSERT(counter <= 31);
466
467	if (imm & 0x00ffffff)
468		return INVALID_IMM; /* Cannot be encoded. */
469
470	return ((imm >> 24) & 0x7f) | COPY_BITS(counter, 4, 26, 1) | COPY_BITS(counter, 1, 12, 3) | COPY_BITS(counter, 0, 7, 1);
471}
472
473static sljit_si load_immediate(struct sljit_compiler *compiler, sljit_si dst, sljit_uw imm)
474{
475	sljit_uw tmp;
476
477	if (imm >= 0x10000) {
478		tmp = get_imm(imm);
479		if (tmp != INVALID_IMM)
480			return push_inst32(compiler, MOV_WI | RD4(dst) | tmp);
481		tmp = get_imm(~imm);
482		if (tmp != INVALID_IMM)
483			return push_inst32(compiler, MVN_WI | RD4(dst) | tmp);
484	}
485
486	/* set low 16 bits, set hi 16 bits to 0. */
487	FAIL_IF(push_inst32(compiler, MOVW | RD4(dst) |
488		COPY_BITS(imm, 12, 16, 4) | COPY_BITS(imm, 11, 26, 1) | COPY_BITS(imm, 8, 12, 3) | (imm & 0xff)));
489
490	/* set hi 16 bit if needed. */
491	if (imm >= 0x10000)
492		return push_inst32(compiler, MOVT | RD4(dst) |
493			COPY_BITS(imm, 12 + 16, 16, 4) | COPY_BITS(imm, 11 + 16, 26, 1) | COPY_BITS(imm, 8 + 16, 12, 3) | ((imm & 0xff0000) >> 16));
494	return SLJIT_SUCCESS;
495}
496
497#define ARG1_IMM	0x0010000
498#define ARG2_IMM	0x0020000
499#define KEEP_FLAGS	0x0040000
500/* SET_FLAGS must be 0x100000 as it is also the value of S bit (can be used for optimization). */
501#define SET_FLAGS	0x0100000
502#define UNUSED_RETURN	0x0200000
503#define SLOW_DEST	0x0400000
504#define SLOW_SRC1	0x0800000
505#define SLOW_SRC2	0x1000000
506
507static sljit_si emit_op_imm(struct sljit_compiler *compiler, sljit_si flags, sljit_si dst, sljit_uw arg1, sljit_uw arg2)
508{
509	/* dst must be register, TMP_REG1
510	   arg1 must be register, TMP_REG1, imm
511	   arg2 must be register, TMP_REG2, imm */
512	sljit_si reg;
513	sljit_uw imm, nimm;
514
515	if (SLJIT_UNLIKELY((flags & (ARG1_IMM | ARG2_IMM)) == (ARG1_IMM | ARG2_IMM))) {
516		/* Both are immediates. */
517		flags &= ~ARG1_IMM;
518		FAIL_IF(load_immediate(compiler, TMP_REG1, arg1));
519		arg1 = TMP_REG1;
520	}
521
522	if (flags & (ARG1_IMM | ARG2_IMM)) {
523		reg = (flags & ARG2_IMM) ? arg1 : arg2;
524		imm = (flags & ARG2_IMM) ? arg2 : arg1;
525
526		switch (flags & 0xffff) {
527		case SLJIT_CLZ:
528		case SLJIT_MUL:
529			/* No form with immediate operand. */
530			break;
531		case SLJIT_MOV:
532			SLJIT_ASSERT(!(flags & SET_FLAGS) && (flags & ARG2_IMM) && arg1 == TMP_REG1);
533			return load_immediate(compiler, dst, imm);
534		case SLJIT_NOT:
535			if (!(flags & SET_FLAGS))
536				return load_immediate(compiler, dst, ~imm);
537			/* Since the flags should be set, we just fallback to the register mode.
538			   Although some clever things could be done here, "NOT IMM" does not worth the efforts. */
539			break;
540		case SLJIT_ADD:
541			nimm = -imm;
542			if (!(flags & KEEP_FLAGS) && IS_2_LO_REGS(reg, dst)) {
543				if (imm <= 0x7)
544					return push_inst16(compiler, ADDSI3 | IMM3(imm) | RD3(dst) | RN3(reg));
545				if (nimm <= 0x7)
546					return push_inst16(compiler, SUBSI3 | IMM3(nimm) | RD3(dst) | RN3(reg));
547				if (reg == dst) {
548					if (imm <= 0xff)
549						return push_inst16(compiler, ADDSI8 | IMM8(imm) | RDN3(dst));
550					if (nimm <= 0xff)
551						return push_inst16(compiler, SUBSI8 | IMM8(nimm) | RDN3(dst));
552				}
553			}
554			if (!(flags & SET_FLAGS)) {
555				if (imm <= 0xfff)
556					return push_inst32(compiler, ADDWI | RD4(dst) | RN4(reg) | IMM12(imm));
557				if (nimm <= 0xfff)
558					return push_inst32(compiler, SUBWI | RD4(dst) | RN4(reg) | IMM12(nimm));
559			}
560			imm = get_imm(imm);
561			if (imm != INVALID_IMM)
562				return push_inst32(compiler, ADD_WI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
563			break;
564		case SLJIT_ADDC:
565			imm = get_imm(imm);
566			if (imm != INVALID_IMM)
567				return push_inst32(compiler, ADCI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
568			break;
569		case SLJIT_SUB:
570			if (flags & ARG1_IMM) {
571				if (!(flags & KEEP_FLAGS) && imm == 0 && IS_2_LO_REGS(reg, dst))
572					return push_inst16(compiler, RSBSI | RD3(dst) | RN3(reg));
573				imm = get_imm(imm);
574				if (imm != INVALID_IMM)
575					return push_inst32(compiler, RSB_WI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
576				break;
577			}
578			nimm = -imm;
579			if (!(flags & KEEP_FLAGS) && IS_2_LO_REGS(reg, dst)) {
580				if (imm <= 0x7)
581					return push_inst16(compiler, SUBSI3 | IMM3(imm) | RD3(dst) | RN3(reg));
582				if (nimm <= 0x7)
583					return push_inst16(compiler, ADDSI3 | IMM3(nimm) | RD3(dst) | RN3(reg));
584				if (reg == dst) {
585					if (imm <= 0xff)
586						return push_inst16(compiler, SUBSI8 | IMM8(imm) | RDN3(dst));
587					if (nimm <= 0xff)
588						return push_inst16(compiler, ADDSI8 | IMM8(nimm) | RDN3(dst));
589				}
590				if (imm <= 0xff && (flags & UNUSED_RETURN))
591					return push_inst16(compiler, CMPI | IMM8(imm) | RDN3(reg));
592			}
593			if (!(flags & SET_FLAGS)) {
594				if (imm <= 0xfff)
595					return push_inst32(compiler, SUBWI | RD4(dst) | RN4(reg) | IMM12(imm));
596				if (nimm <= 0xfff)
597					return push_inst32(compiler, ADDWI | RD4(dst) | RN4(reg) | IMM12(nimm));
598			}
599			imm = get_imm(imm);
600			if (imm != INVALID_IMM)
601				return push_inst32(compiler, SUB_WI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
602			break;
603		case SLJIT_SUBC:
604			if (flags & ARG1_IMM)
605				break;
606			imm = get_imm(imm);
607			if (imm != INVALID_IMM)
608				return push_inst32(compiler, SBCI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
609			break;
610		case SLJIT_AND:
611			nimm = get_imm(imm);
612			if (nimm != INVALID_IMM)
613				return push_inst32(compiler, ANDI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | nimm);
614			imm = get_imm(imm);
615			if (imm != INVALID_IMM)
616				return push_inst32(compiler, BICI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
617			break;
618		case SLJIT_OR:
619			nimm = get_imm(imm);
620			if (nimm != INVALID_IMM)
621				return push_inst32(compiler, ORRI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | nimm);
622			imm = get_imm(imm);
623			if (imm != INVALID_IMM)
624				return push_inst32(compiler, ORNI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
625			break;
626		case SLJIT_XOR:
627			imm = get_imm(imm);
628			if (imm != INVALID_IMM)
629				return push_inst32(compiler, EORI | (flags & SET_FLAGS) | RD4(dst) | RN4(reg) | imm);
630			break;
631		case SLJIT_SHL:
632		case SLJIT_LSHR:
633		case SLJIT_ASHR:
634			if (flags & ARG1_IMM)
635				break;
636			imm &= 0x1f;
637			if (imm == 0) {
638				if (!(flags & SET_FLAGS))
639					return push_inst16(compiler, MOV | SET_REGS44(dst, reg));
640				if (IS_2_LO_REGS(dst, reg))
641					return push_inst16(compiler, MOVS | RD3(dst) | RN3(reg));
642				return push_inst32(compiler, MOV_W | SET_FLAGS | RD4(dst) | RM4(reg));
643			}
644			switch (flags & 0xffff) {
645			case SLJIT_SHL:
646				if (!(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, reg))
647					return push_inst16(compiler, LSLSI | RD3(dst) | RN3(reg) | (imm << 6));
648				return push_inst32(compiler, LSL_WI | (flags & SET_FLAGS) | RD4(dst) | RM4(reg) | IMM5(imm));
649			case SLJIT_LSHR:
650				if (!(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, reg))
651					return push_inst16(compiler, LSRSI | RD3(dst) | RN3(reg) | (imm << 6));
652				return push_inst32(compiler, LSR_WI | (flags & SET_FLAGS) | RD4(dst) | RM4(reg) | IMM5(imm));
653			default: /* SLJIT_ASHR */
654				if (!(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, reg))
655					return push_inst16(compiler, ASRSI | RD3(dst) | RN3(reg) | (imm << 6));
656				return push_inst32(compiler, ASR_WI | (flags & SET_FLAGS) | RD4(dst) | RM4(reg) | IMM5(imm));
657			}
658		default:
659			SLJIT_ASSERT_STOP();
660			break;
661		}
662
663		if (flags & ARG2_IMM) {
664			FAIL_IF(load_immediate(compiler, TMP_REG2, arg2));
665			arg2 = TMP_REG2;
666		}
667		else {
668			FAIL_IF(load_immediate(compiler, TMP_REG1, arg1));
669			arg1 = TMP_REG1;
670		}
671	}
672
673	/* Both arguments are registers. */
674	switch (flags & 0xffff) {
675	case SLJIT_MOV:
676	case SLJIT_MOV_UI:
677	case SLJIT_MOV_SI:
678	case SLJIT_MOV_P:
679	case SLJIT_MOVU:
680	case SLJIT_MOVU_UI:
681	case SLJIT_MOVU_SI:
682	case SLJIT_MOVU_P:
683		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
684		if (dst == arg2)
685			return SLJIT_SUCCESS;
686		return push_inst16(compiler, MOV | SET_REGS44(dst, arg2));
687	case SLJIT_MOV_UB:
688	case SLJIT_MOVU_UB:
689		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
690		if (IS_2_LO_REGS(dst, arg2))
691			return push_inst16(compiler, UXTB | RD3(dst) | RN3(arg2));
692		return push_inst32(compiler, UXTB_W | RD4(dst) | RM4(arg2));
693	case SLJIT_MOV_SB:
694	case SLJIT_MOVU_SB:
695		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
696		if (IS_2_LO_REGS(dst, arg2))
697			return push_inst16(compiler, SXTB | RD3(dst) | RN3(arg2));
698		return push_inst32(compiler, SXTB_W | RD4(dst) | RM4(arg2));
699	case SLJIT_MOV_UH:
700	case SLJIT_MOVU_UH:
701		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
702		if (IS_2_LO_REGS(dst, arg2))
703			return push_inst16(compiler, UXTH | RD3(dst) | RN3(arg2));
704		return push_inst32(compiler, UXTH_W | RD4(dst) | RM4(arg2));
705	case SLJIT_MOV_SH:
706	case SLJIT_MOVU_SH:
707		SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
708		if (IS_2_LO_REGS(dst, arg2))
709			return push_inst16(compiler, SXTH | RD3(dst) | RN3(arg2));
710		return push_inst32(compiler, SXTH_W | RD4(dst) | RM4(arg2));
711	case SLJIT_NOT:
712		SLJIT_ASSERT(arg1 == TMP_REG1);
713		if (!(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, arg2))
714			return push_inst16(compiler, MVNS | RD3(dst) | RN3(arg2));
715		return push_inst32(compiler, MVN_W | (flags & SET_FLAGS) | RD4(dst) | RM4(arg2));
716	case SLJIT_CLZ:
717		SLJIT_ASSERT(arg1 == TMP_REG1);
718		FAIL_IF(push_inst32(compiler, CLZ | RN4(arg2) | RD4(dst) | RM4(arg2)));
719		if (flags & SET_FLAGS) {
720			if (reg_map[dst] <= 7)
721				return push_inst16(compiler, CMPI | RDN3(dst));
722			return push_inst32(compiler, ADD_WI | SET_FLAGS | RN4(dst) | RD4(dst));
723		}
724		return SLJIT_SUCCESS;
725	case SLJIT_ADD:
726		if (!(flags & KEEP_FLAGS) && IS_3_LO_REGS(dst, arg1, arg2))
727			return push_inst16(compiler, ADDS | RD3(dst) | RN3(arg1) | RM3(arg2));
728		if (dst == arg1 && !(flags & SET_FLAGS))
729			return push_inst16(compiler, ADD | SET_REGS44(dst, arg2));
730		return push_inst32(compiler, ADD_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
731	case SLJIT_ADDC:
732		if (dst == arg1 && !(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, arg2))
733			return push_inst16(compiler, ADCS | RD3(dst) | RN3(arg2));
734		return push_inst32(compiler, ADC_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
735	case SLJIT_SUB:
736		if (!(flags & KEEP_FLAGS) && IS_3_LO_REGS(dst, arg1, arg2))
737			return push_inst16(compiler, SUBS | RD3(dst) | RN3(arg1) | RM3(arg2));
738		return push_inst32(compiler, SUB_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
739	case SLJIT_SUBC:
740		if (dst == arg1 && !(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, arg2))
741			return push_inst16(compiler, SBCS | RD3(dst) | RN3(arg2));
742		return push_inst32(compiler, SBC_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
743	case SLJIT_MUL:
744		if (!(flags & SET_FLAGS))
745			return push_inst32(compiler, MUL | RD4(dst) | RN4(arg1) | RM4(arg2));
746		SLJIT_ASSERT(reg_map[TMP_REG2] <= 7 && dst != TMP_REG2);
747		FAIL_IF(push_inst32(compiler, SMULL | RT4(dst) | RD4(TMP_REG2) | RN4(arg1) | RM4(arg2)));
748		/* cmp TMP_REG2, dst asr #31. */
749		return push_inst32(compiler, CMP_W | RN4(TMP_REG2) | 0x70e0 | RM4(dst));
750	case SLJIT_AND:
751		if (!(flags & KEEP_FLAGS)) {
752			if (dst == arg1 && IS_2_LO_REGS(dst, arg2))
753				return push_inst16(compiler, ANDS | RD3(dst) | RN3(arg2));
754			if ((flags & UNUSED_RETURN) && IS_2_LO_REGS(arg1, arg2))
755				return push_inst16(compiler, TST | RD3(arg1) | RN3(arg2));
756		}
757		return push_inst32(compiler, AND_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
758	case SLJIT_OR:
759		if (dst == arg1 && !(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, arg2))
760			return push_inst16(compiler, ORRS | RD3(dst) | RN3(arg2));
761		return push_inst32(compiler, ORR_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
762	case SLJIT_XOR:
763		if (dst == arg1 && !(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, arg2))
764			return push_inst16(compiler, EORS | RD3(dst) | RN3(arg2));
765		return push_inst32(compiler, EOR_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
766	case SLJIT_SHL:
767		if (dst == arg1 && !(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, arg2))
768			return push_inst16(compiler, LSLS | RD3(dst) | RN3(arg2));
769		return push_inst32(compiler, LSL_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
770	case SLJIT_LSHR:
771		if (dst == arg1 && !(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, arg2))
772			return push_inst16(compiler, LSRS | RD3(dst) | RN3(arg2));
773		return push_inst32(compiler, LSR_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
774	case SLJIT_ASHR:
775		if (dst == arg1 && !(flags & KEEP_FLAGS) && IS_2_LO_REGS(dst, arg2))
776			return push_inst16(compiler, ASRS | RD3(dst) | RN3(arg2));
777		return push_inst32(compiler, ASR_W | (flags & SET_FLAGS) | RD4(dst) | RN4(arg1) | RM4(arg2));
778	}
779
780	SLJIT_ASSERT_STOP();
781	return SLJIT_SUCCESS;
782}
783
784#define STORE		0x01
785#define SIGNED		0x02
786
787#define WORD_SIZE	0x00
788#define BYTE_SIZE	0x04
789#define HALF_SIZE	0x08
790
791#define UPDATE		0x10
792#define ARG_TEST	0x20
793
794#define IS_WORD_SIZE(flags)		(!(flags & (BYTE_SIZE | HALF_SIZE)))
795#define OFFSET_CHECK(imm, shift)	(!(argw & ~(imm << shift)))
796
797/*
798  1st letter:
799  w = word
800  b = byte
801  h = half
802
803  2nd letter:
804  s = signed
805  u = unsigned
806
807  3rd letter:
808  l = load
809  s = store
810*/
811
812static SLJIT_CONST sljit_ins sljit_mem16[12] = {
813/* w u l */ 0x5800 /* ldr */,
814/* w u s */ 0x5000 /* str */,
815/* w s l */ 0x5800 /* ldr */,
816/* w s s */ 0x5000 /* str */,
817
818/* b u l */ 0x5c00 /* ldrb */,
819/* b u s */ 0x5400 /* strb */,
820/* b s l */ 0x5600 /* ldrsb */,
821/* b s s */ 0x5400 /* strb */,
822
823/* h u l */ 0x5a00 /* ldrh */,
824/* h u s */ 0x5200 /* strh */,
825/* h s l */ 0x5e00 /* ldrsh */,
826/* h s s */ 0x5200 /* strh */,
827};
828
829static SLJIT_CONST sljit_ins sljit_mem16_imm5[12] = {
830/* w u l */ 0x6800 /* ldr imm5 */,
831/* w u s */ 0x6000 /* str imm5 */,
832/* w s l */ 0x6800 /* ldr imm5 */,
833/* w s s */ 0x6000 /* str imm5 */,
834
835/* b u l */ 0x7800 /* ldrb imm5 */,
836/* b u s */ 0x7000 /* strb imm5 */,
837/* b s l */ 0x0000 /* not allowed */,
838/* b s s */ 0x7000 /* strb imm5 */,
839
840/* h u l */ 0x8800 /* ldrh imm5 */,
841/* h u s */ 0x8000 /* strh imm5 */,
842/* h s l */ 0x0000 /* not allowed */,
843/* h s s */ 0x8000 /* strh imm5 */,
844};
845
846#define MEM_IMM8	0xc00
847#define MEM_IMM12	0x800000
848static SLJIT_CONST sljit_ins sljit_mem32[12] = {
849/* w u l */ 0xf8500000 /* ldr.w */,
850/* w u s */ 0xf8400000 /* str.w */,
851/* w s l */ 0xf8500000 /* ldr.w */,
852/* w s s */ 0xf8400000 /* str.w */,
853
854/* b u l */ 0xf8100000 /* ldrb.w */,
855/* b u s */ 0xf8000000 /* strb.w */,
856/* b s l */ 0xf9100000 /* ldrsb.w */,
857/* b s s */ 0xf8000000 /* strb.w */,
858
859/* h u l */ 0xf8300000 /* ldrh.w */,
860/* h u s */ 0xf8200000 /* strsh.w */,
861/* h s l */ 0xf9300000 /* ldrsh.w */,
862/* h s s */ 0xf8200000 /* strsh.w */,
863};
864
865/* Helper function. Dst should be reg + value, using at most 1 instruction, flags does not set. */
866static sljit_si emit_set_delta(struct sljit_compiler *compiler, sljit_si dst, sljit_si reg, sljit_sw value)
867{
868	if (value >= 0) {
869		if (value <= 0xfff)
870			return push_inst32(compiler, ADDWI | RD4(dst) | RN4(reg) | IMM12(value));
871		value = get_imm(value);
872		if (value != INVALID_IMM)
873			return push_inst32(compiler, ADD_WI | RD4(dst) | RN4(reg) | value);
874	}
875	else {
876		value = -value;
877		if (value <= 0xfff)
878			return push_inst32(compiler, SUBWI | RD4(dst) | RN4(reg) | IMM12(value));
879		value = get_imm(value);
880		if (value != INVALID_IMM)
881			return push_inst32(compiler, SUB_WI | RD4(dst) | RN4(reg) | value);
882	}
883	return SLJIT_ERR_UNSUPPORTED;
884}
885
886/* Can perform an operation using at most 1 instruction. */
887static sljit_si getput_arg_fast(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
888{
889	sljit_si other_r, shift;
890
891	SLJIT_ASSERT(arg & SLJIT_MEM);
892
893	if (SLJIT_UNLIKELY(flags & UPDATE)) {
894		if ((arg & REG_MASK) && !(arg & OFFS_REG_MASK) && argw <= 0xff && argw >= -0xff) {
895			if (SLJIT_UNLIKELY(flags & ARG_TEST))
896				return 1;
897
898			flags &= ~UPDATE;
899			arg &= 0xf;
900			if (argw >= 0)
901				argw |= 0x200;
902			else {
903				argw = -argw;
904			}
905
906			SLJIT_ASSERT(argw >= 0 && (argw & 0xff) <= 0xff);
907			FAIL_IF(push_inst32(compiler, sljit_mem32[flags] | MEM_IMM8 | RT4(reg) | RN4(arg) | 0x100 | argw));
908			return -1;
909		}
910		return 0;
911	}
912
913	if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
914		if (SLJIT_UNLIKELY(flags & ARG_TEST))
915			return 1;
916
917		argw &= 0x3;
918		other_r = OFFS_REG(arg);
919		arg &= 0xf;
920
921		if (!argw && IS_3_LO_REGS(reg, arg, other_r))
922			FAIL_IF(push_inst16(compiler, sljit_mem16[flags] | RD3(reg) | RN3(arg) | RM3(other_r)));
923		else
924			FAIL_IF(push_inst32(compiler, sljit_mem32[flags] | RT4(reg) | RN4(arg) | RM4(other_r) | (argw << 4)));
925		return -1;
926	}
927
928	if (!(arg & REG_MASK) || argw > 0xfff || argw < -0xff)
929		return 0;
930
931	if (SLJIT_UNLIKELY(flags & ARG_TEST))
932		return 1;
933
934	arg &= 0xf;
935	if (IS_2_LO_REGS(reg, arg) && sljit_mem16_imm5[flags]) {
936		shift = 3;
937		if (IS_WORD_SIZE(flags)) {
938			if (OFFSET_CHECK(0x1f, 2))
939				shift = 2;
940		}
941		else if (flags & BYTE_SIZE)
942		{
943			if (OFFSET_CHECK(0x1f, 0))
944				shift = 0;
945		}
946		else {
947			SLJIT_ASSERT(flags & HALF_SIZE);
948			if (OFFSET_CHECK(0x1f, 1))
949				shift = 1;
950		}
951
952		if (shift != 3) {
953			FAIL_IF(push_inst16(compiler, sljit_mem16_imm5[flags] | RD3(reg) | RN3(arg) | (argw << (6 - shift))));
954			return -1;
955		}
956	}
957
958	/* SP based immediate. */
959	if (SLJIT_UNLIKELY(arg == SLJIT_LOCALS_REG) && OFFSET_CHECK(0xff, 2) && IS_WORD_SIZE(flags) && reg_map[reg] <= 7) {
960		FAIL_IF(push_inst16(compiler, STR_SP | ((flags & STORE) ? 0 : 0x800) | RDN3(reg) | (argw >> 2)));
961		return -1;
962	}
963
964	if (argw >= 0)
965		FAIL_IF(push_inst32(compiler, sljit_mem32[flags] | MEM_IMM12 | RT4(reg) | RN4(arg) | argw));
966	else
967		FAIL_IF(push_inst32(compiler, sljit_mem32[flags] | MEM_IMM8 | RT4(reg) | RN4(arg) | -argw));
968	return -1;
969}
970
971/* see getput_arg below.
972   Note: can_cache is called only for binary operators. Those
973   operators always uses word arguments without write back. */
974static sljit_si can_cache(sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
975{
976	sljit_sw diff;
977	if ((arg & OFFS_REG_MASK) || !(next_arg & SLJIT_MEM))
978		return 0;
979
980	if (!(arg & REG_MASK)) {
981		diff = argw - next_argw;
982		if (diff <= 0xfff && diff >= -0xfff)
983			return 1;
984		return 0;
985	}
986
987	if (argw == next_argw)
988		return 1;
989
990	diff = argw - next_argw;
991	if (arg == next_arg && diff <= 0xfff && diff >= -0xfff)
992		return 1;
993
994	return 0;
995}
996
997/* Emit the necessary instructions. See can_cache above. */
998static sljit_si getput_arg(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg,
999	sljit_si arg, sljit_sw argw, sljit_si next_arg, sljit_sw next_argw)
1000{
1001	sljit_si tmp_r, other_r;
1002	sljit_sw diff;
1003
1004	SLJIT_ASSERT(arg & SLJIT_MEM);
1005	if (!(next_arg & SLJIT_MEM)) {
1006		next_arg = 0;
1007		next_argw = 0;
1008	}
1009
1010	tmp_r = (flags & STORE) ? TMP_REG3 : reg;
1011
1012	if (SLJIT_UNLIKELY((flags & UPDATE) && (arg & REG_MASK))) {
1013		/* Update only applies if a base register exists. */
1014		/* There is no caching here. */
1015		other_r = OFFS_REG(arg);
1016		arg &= 0xf;
1017		flags &= ~UPDATE;
1018
1019		if (!other_r) {
1020			if (!(argw & ~0xfff)) {
1021				FAIL_IF(push_inst32(compiler, sljit_mem32[flags] | MEM_IMM12 | RT4(reg) | RN4(arg) | argw));
1022				return push_inst32(compiler, ADDWI | RD4(arg) | RN4(arg) | IMM12(argw));
1023			}
1024
1025			if (compiler->cache_arg == SLJIT_MEM) {
1026				if (argw == compiler->cache_argw) {
1027					other_r = TMP_REG3;
1028					argw = 0;
1029				}
1030				else if (emit_set_delta(compiler, TMP_REG3, TMP_REG3, argw - compiler->cache_argw) != SLJIT_ERR_UNSUPPORTED) {
1031					FAIL_IF(compiler->error);
1032					compiler->cache_argw = argw;
1033					other_r = TMP_REG3;
1034					argw = 0;
1035				}
1036			}
1037
1038			if (argw) {
1039				FAIL_IF(load_immediate(compiler, TMP_REG3, argw));
1040				compiler->cache_arg = SLJIT_MEM;
1041				compiler->cache_argw = argw;
1042				other_r = TMP_REG3;
1043				argw = 0;
1044			}
1045		}
1046
1047		argw &= 0x3;
1048		if (!argw && IS_3_LO_REGS(reg, arg, other_r)) {
1049			FAIL_IF(push_inst16(compiler, sljit_mem16[flags] | RD3(reg) | RN3(arg) | RM3(other_r)));
1050			return push_inst16(compiler, ADD | SET_REGS44(arg, other_r));
1051		}
1052		FAIL_IF(push_inst32(compiler, sljit_mem32[flags] | RT4(reg) | RN4(arg) | RM4(other_r) | (argw << 4)));
1053		return push_inst32(compiler, ADD_W | RD4(arg) | RN4(arg) | RM4(other_r) | (argw << 6));
1054	}
1055	flags &= ~UPDATE;
1056
1057	SLJIT_ASSERT(!(arg & OFFS_REG_MASK));
1058
1059	if (compiler->cache_arg == arg) {
1060		diff = argw - compiler->cache_argw;
1061		if (!(diff & ~0xfff))
1062			return push_inst32(compiler, sljit_mem32[flags] | MEM_IMM12 | RT4(reg) | RN4(TMP_REG3) | diff);
1063		if (!((compiler->cache_argw - argw) & ~0xff))
1064			return push_inst32(compiler, sljit_mem32[flags] | MEM_IMM8 | RT4(reg) | RN4(TMP_REG3) | (compiler->cache_argw - argw));
1065		if (emit_set_delta(compiler, TMP_REG3, TMP_REG3, diff) != SLJIT_ERR_UNSUPPORTED) {
1066			FAIL_IF(compiler->error);
1067			return push_inst32(compiler, sljit_mem32[flags] | MEM_IMM12 | RT4(reg) | RN4(TMP_REG3) | 0);
1068		}
1069	}
1070
1071	next_arg = (arg & REG_MASK) && (arg == next_arg) && (argw != next_argw);
1072	arg &= 0xf;
1073	if (arg && compiler->cache_arg == SLJIT_MEM) {
1074		if (compiler->cache_argw == argw)
1075			return push_inst32(compiler, sljit_mem32[flags] | RT4(reg) | RN4(arg) | RM4(TMP_REG3));
1076		if (emit_set_delta(compiler, TMP_REG3, TMP_REG3, argw - compiler->cache_argw) != SLJIT_ERR_UNSUPPORTED) {
1077			FAIL_IF(compiler->error);
1078			compiler->cache_argw = argw;
1079			return push_inst32(compiler, sljit_mem32[flags] | RT4(reg) | RN4(arg) | RM4(TMP_REG3));
1080		}
1081	}
1082
1083	compiler->cache_argw = argw;
1084	if (next_arg && emit_set_delta(compiler, TMP_REG3, arg, argw) != SLJIT_ERR_UNSUPPORTED) {
1085		FAIL_IF(compiler->error);
1086		compiler->cache_arg = SLJIT_MEM | arg;
1087		arg = 0;
1088	}
1089	else {
1090		FAIL_IF(load_immediate(compiler, TMP_REG3, argw));
1091		compiler->cache_arg = SLJIT_MEM;
1092
1093		diff = argw - next_argw;
1094		if (next_arg && diff <= 0xfff && diff >= -0xfff) {
1095			FAIL_IF(push_inst16(compiler, ADD | SET_REGS44(TMP_REG3, arg)));
1096			compiler->cache_arg = SLJIT_MEM | arg;
1097			arg = 0;
1098		}
1099	}
1100
1101	if (arg)
1102		return push_inst32(compiler, sljit_mem32[flags] | RT4(reg) | RN4(arg) | RM4(TMP_REG3));
1103	return push_inst32(compiler, sljit_mem32[flags] | MEM_IMM12 | RT4(reg) | RN4(TMP_REG3) | 0);
1104}
1105
1106static SLJIT_INLINE sljit_si emit_op_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
1107{
1108	if (getput_arg_fast(compiler, flags, reg, arg, argw))
1109		return compiler->error;
1110	compiler->cache_arg = 0;
1111	compiler->cache_argw = 0;
1112	return getput_arg(compiler, flags, reg, arg, argw, 0, 0);
1113}
1114
1115static SLJIT_INLINE sljit_si emit_op_mem2(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg1, sljit_sw arg1w, sljit_si arg2, sljit_sw arg2w)
1116{
1117	if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
1118		return compiler->error;
1119	return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w);
1120}
1121
1122/* --------------------------------------------------------------------- */
1123/*  Entry, exit                                                          */
1124/* --------------------------------------------------------------------- */
1125
1126SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler, sljit_si args, sljit_si scratches, sljit_si saveds, sljit_si local_size)
1127{
1128	sljit_si size;
1129	sljit_ins push;
1130
1131	CHECK_ERROR();
1132	check_sljit_emit_enter(compiler, args, scratches, saveds, local_size);
1133
1134	compiler->scratches = scratches;
1135	compiler->saveds = saveds;
1136#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
1137	compiler->logical_local_size = local_size;
1138#endif
1139
1140	push = (1 << 4);
1141	if (saveds >= 5)
1142		push |= 1 << 11;
1143	if (saveds >= 4)
1144		push |= 1 << 10;
1145	if (saveds >= 3)
1146		push |= 1 << 8;
1147	if (saveds >= 2)
1148		push |= 1 << 7;
1149	if (saveds >= 1)
1150		push |= 1 << 6;
1151        if (scratches >= 5)
1152		push |= 1 << 5;
1153	FAIL_IF(saveds >= 3
1154		? push_inst32(compiler, PUSH_W | (1 << 14) | push)
1155		: push_inst16(compiler, PUSH | push));
1156
1157	/* Stack must be aligned to 8 bytes: */
1158	size = (3 + saveds) * sizeof(sljit_uw);
1159	local_size += size;
1160	local_size = (local_size + 7) & ~7;
1161	local_size -= size;
1162	compiler->local_size = local_size;
1163	if (local_size > 0) {
1164		if (local_size <= (127 << 2))
1165			FAIL_IF(push_inst16(compiler, SUB_SP | (local_size >> 2)));
1166		else
1167			FAIL_IF(emit_op_imm(compiler, SLJIT_SUB | ARG2_IMM, SLJIT_LOCALS_REG, SLJIT_LOCALS_REG, local_size));
1168	}
1169
1170	if (args >= 1)
1171		FAIL_IF(push_inst16(compiler, MOV | SET_REGS44(SLJIT_SAVED_REG1, SLJIT_SCRATCH_REG1)));
1172	if (args >= 2)
1173		FAIL_IF(push_inst16(compiler, MOV | SET_REGS44(SLJIT_SAVED_REG2, SLJIT_SCRATCH_REG2)));
1174	if (args >= 3)
1175		FAIL_IF(push_inst16(compiler, MOV | SET_REGS44(SLJIT_SAVED_REG3, SLJIT_SCRATCH_REG3)));
1176
1177	return SLJIT_SUCCESS;
1178}
1179
1180SLJIT_API_FUNC_ATTRIBUTE void sljit_set_context(struct sljit_compiler *compiler, sljit_si args, sljit_si scratches, sljit_si saveds, sljit_si local_size)
1181{
1182	sljit_si size;
1183
1184	CHECK_ERROR_VOID();
1185	check_sljit_set_context(compiler, args, scratches, saveds, local_size);
1186
1187	compiler->scratches = scratches;
1188	compiler->saveds = saveds;
1189#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
1190	compiler->logical_local_size = local_size;
1191#endif
1192
1193	size = (3 + saveds) * sizeof(sljit_uw);
1194	local_size += size;
1195	local_size = (local_size + 7) & ~7;
1196	local_size -= size;
1197	compiler->local_size = local_size;
1198}
1199
1200SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
1201{
1202	sljit_ins pop;
1203
1204	CHECK_ERROR();
1205	check_sljit_emit_return(compiler, op, src, srcw);
1206
1207	FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
1208
1209	if (compiler->local_size > 0) {
1210		if (compiler->local_size <= (127 << 2))
1211			FAIL_IF(push_inst16(compiler, ADD_SP | (compiler->local_size >> 2)));
1212		else
1213			FAIL_IF(emit_op_imm(compiler, SLJIT_ADD | ARG2_IMM, SLJIT_LOCALS_REG, SLJIT_LOCALS_REG, compiler->local_size));
1214	}
1215
1216	pop = (1 << 4);
1217	if (compiler->saveds >= 5)
1218		pop |= 1 << 11;
1219	if (compiler->saveds >= 4)
1220		pop |= 1 << 10;
1221	if (compiler->saveds >= 3)
1222		pop |= 1 << 8;
1223	if (compiler->saveds >= 2)
1224		pop |= 1 << 7;
1225	if (compiler->saveds >= 1)
1226		pop |= 1 << 6;
1227        if (compiler->scratches >= 5)
1228		pop |= 1 << 5;
1229	return compiler->saveds >= 3
1230		? push_inst32(compiler, POP_W | (1 << 15) | pop)
1231		: push_inst16(compiler, POP | pop);
1232}
1233
1234/* --------------------------------------------------------------------- */
1235/*  Operators                                                            */
1236/* --------------------------------------------------------------------- */
1237
1238#ifdef __cplusplus
1239extern "C" {
1240#endif
1241
1242#if defined(__GNUC__)
1243extern unsigned int __aeabi_uidivmod(unsigned int numerator, int unsigned denominator);
1244extern int __aeabi_idivmod(int numerator, int denominator);
1245#else
1246#error "Software divmod functions are needed"
1247#endif
1248
1249#ifdef __cplusplus
1250}
1251#endif
1252
1253SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op)
1254{
1255	CHECK_ERROR();
1256	check_sljit_emit_op0(compiler, op);
1257
1258	op = GET_OPCODE(op);
1259	switch (op) {
1260	case SLJIT_BREAKPOINT:
1261		return push_inst16(compiler, BKPT);
1262	case SLJIT_NOP:
1263		return push_inst16(compiler, NOP);
1264	case SLJIT_UMUL:
1265	case SLJIT_SMUL:
1266		return push_inst32(compiler, (op == SLJIT_UMUL ? UMULL : SMULL)
1267			| (reg_map[SLJIT_SCRATCH_REG2] << 8)
1268			| (reg_map[SLJIT_SCRATCH_REG1] << 12)
1269			| (reg_map[SLJIT_SCRATCH_REG1] << 16)
1270			| reg_map[SLJIT_SCRATCH_REG2]);
1271	case SLJIT_UDIV:
1272	case SLJIT_SDIV:
1273		if (compiler->scratches >= 4) {
1274			FAIL_IF(push_inst32(compiler, 0xf84d2d04 /* str r2, [sp, #-4]! */));
1275			FAIL_IF(push_inst32(compiler, 0xf84dcd04 /* str ip, [sp, #-4]! */));
1276		} else if (compiler->scratches >= 3)
1277			FAIL_IF(push_inst32(compiler, 0xf84d2d08 /* str r2, [sp, #-8]! */));
1278#if defined(__GNUC__)
1279		FAIL_IF(sljit_emit_ijump(compiler, SLJIT_FAST_CALL, SLJIT_IMM,
1280			(op == SLJIT_UDIV ? SLJIT_FUNC_OFFSET(__aeabi_uidivmod) : SLJIT_FUNC_OFFSET(__aeabi_idivmod))));
1281#else
1282#error "Software divmod functions are needed"
1283#endif
1284		if (compiler->scratches >= 4) {
1285			FAIL_IF(push_inst32(compiler, 0xf85dcb04 /* ldr ip, [sp], #4 */));
1286			return push_inst32(compiler, 0xf85d2b04 /* ldr r2, [sp], #4 */);
1287		} else if (compiler->scratches >= 3)
1288			return push_inst32(compiler, 0xf85d2b08 /* ldr r2, [sp], #8 */);
1289		return SLJIT_SUCCESS;
1290	}
1291
1292	return SLJIT_SUCCESS;
1293}
1294
1295SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op,
1296	sljit_si dst, sljit_sw dstw,
1297	sljit_si src, sljit_sw srcw)
1298{
1299	sljit_si dst_r, flags;
1300	sljit_si op_flags = GET_ALL_FLAGS(op);
1301
1302	CHECK_ERROR();
1303	check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw);
1304	ADJUST_LOCAL_OFFSET(dst, dstw);
1305	ADJUST_LOCAL_OFFSET(src, srcw);
1306
1307	compiler->cache_arg = 0;
1308	compiler->cache_argw = 0;
1309
1310	dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
1311
1312	op = GET_OPCODE(op);
1313	if (op >= SLJIT_MOV && op <= SLJIT_MOVU_P) {
1314		switch (op) {
1315		case SLJIT_MOV:
1316		case SLJIT_MOV_UI:
1317		case SLJIT_MOV_SI:
1318		case SLJIT_MOV_P:
1319			flags = WORD_SIZE;
1320			break;
1321		case SLJIT_MOV_UB:
1322			flags = BYTE_SIZE;
1323			if (src & SLJIT_IMM)
1324				srcw = (sljit_ub)srcw;
1325			break;
1326		case SLJIT_MOV_SB:
1327			flags = BYTE_SIZE | SIGNED;
1328			if (src & SLJIT_IMM)
1329				srcw = (sljit_sb)srcw;
1330			break;
1331		case SLJIT_MOV_UH:
1332			flags = HALF_SIZE;
1333			if (src & SLJIT_IMM)
1334				srcw = (sljit_uh)srcw;
1335			break;
1336		case SLJIT_MOV_SH:
1337			flags = HALF_SIZE | SIGNED;
1338			if (src & SLJIT_IMM)
1339				srcw = (sljit_sh)srcw;
1340			break;
1341		case SLJIT_MOVU:
1342		case SLJIT_MOVU_UI:
1343		case SLJIT_MOVU_SI:
1344		case SLJIT_MOVU_P:
1345			flags = WORD_SIZE | UPDATE;
1346			break;
1347		case SLJIT_MOVU_UB:
1348			flags = BYTE_SIZE | UPDATE;
1349			if (src & SLJIT_IMM)
1350				srcw = (sljit_ub)srcw;
1351			break;
1352		case SLJIT_MOVU_SB:
1353			flags = BYTE_SIZE | SIGNED | UPDATE;
1354			if (src & SLJIT_IMM)
1355				srcw = (sljit_sb)srcw;
1356			break;
1357		case SLJIT_MOVU_UH:
1358			flags = HALF_SIZE | UPDATE;
1359			if (src & SLJIT_IMM)
1360				srcw = (sljit_uh)srcw;
1361			break;
1362		case SLJIT_MOVU_SH:
1363			flags = HALF_SIZE | SIGNED | UPDATE;
1364			if (src & SLJIT_IMM)
1365				srcw = (sljit_sh)srcw;
1366			break;
1367		default:
1368			SLJIT_ASSERT_STOP();
1369			flags = 0;
1370			break;
1371		}
1372
1373		if (src & SLJIT_IMM)
1374			FAIL_IF(emit_op_imm(compiler, SLJIT_MOV | ARG2_IMM, dst_r, TMP_REG1, srcw));
1375		else if (src & SLJIT_MEM) {
1376			if (getput_arg_fast(compiler, flags, dst_r, src, srcw))
1377				FAIL_IF(compiler->error);
1378			else
1379				FAIL_IF(getput_arg(compiler, flags, dst_r, src, srcw, dst, dstw));
1380		} else {
1381			if (dst_r != TMP_REG1)
1382				return emit_op_imm(compiler, op, dst_r, TMP_REG1, src);
1383			dst_r = src;
1384		}
1385
1386		if (dst & SLJIT_MEM) {
1387			if (getput_arg_fast(compiler, flags | STORE, dst_r, dst, dstw))
1388				return compiler->error;
1389			else
1390				return getput_arg(compiler, flags | STORE, dst_r, dst, dstw, 0, 0);
1391		}
1392		return SLJIT_SUCCESS;
1393	}
1394
1395	if (op == SLJIT_NEG) {
1396#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) || (defined SLJIT_DEBUG && SLJIT_DEBUG)
1397		compiler->skip_checks = 1;
1398#endif
1399		return sljit_emit_op2(compiler, SLJIT_SUB | op_flags, dst, dstw, SLJIT_IMM, 0, src, srcw);
1400	}
1401
1402	flags = (GET_FLAGS(op_flags) ? SET_FLAGS : 0) | ((op_flags & SLJIT_KEEP_FLAGS) ? KEEP_FLAGS : 0);
1403	if (src & SLJIT_MEM) {
1404		if (getput_arg_fast(compiler, WORD_SIZE, TMP_REG2, src, srcw))
1405			FAIL_IF(compiler->error);
1406		else
1407			FAIL_IF(getput_arg(compiler, WORD_SIZE, TMP_REG2, src, srcw, dst, dstw));
1408		src = TMP_REG2;
1409	}
1410
1411	if (src & SLJIT_IMM)
1412		flags |= ARG2_IMM;
1413	else
1414		srcw = src;
1415
1416	emit_op_imm(compiler, flags | op, dst_r, TMP_REG1, srcw);
1417
1418	if (dst & SLJIT_MEM) {
1419		if (getput_arg_fast(compiler, flags | STORE, dst_r, dst, dstw))
1420			return compiler->error;
1421		else
1422			return getput_arg(compiler, flags | STORE, dst_r, dst, dstw, 0, 0);
1423	}
1424	return SLJIT_SUCCESS;
1425}
1426
1427SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op,
1428	sljit_si dst, sljit_sw dstw,
1429	sljit_si src1, sljit_sw src1w,
1430	sljit_si src2, sljit_sw src2w)
1431{
1432	sljit_si dst_r, flags;
1433
1434	CHECK_ERROR();
1435	check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w);
1436	ADJUST_LOCAL_OFFSET(dst, dstw);
1437	ADJUST_LOCAL_OFFSET(src1, src1w);
1438	ADJUST_LOCAL_OFFSET(src2, src2w);
1439
1440	compiler->cache_arg = 0;
1441	compiler->cache_argw = 0;
1442
1443	dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
1444	flags = (GET_FLAGS(op) ? SET_FLAGS : 0) | ((op & SLJIT_KEEP_FLAGS) ? KEEP_FLAGS : 0);
1445
1446	if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, WORD_SIZE | STORE | ARG_TEST, TMP_REG1, dst, dstw))
1447		flags |= SLOW_DEST;
1448
1449	if (src1 & SLJIT_MEM) {
1450		if (getput_arg_fast(compiler, WORD_SIZE, TMP_REG1, src1, src1w))
1451			FAIL_IF(compiler->error);
1452		else
1453			flags |= SLOW_SRC1;
1454	}
1455	if (src2 & SLJIT_MEM) {
1456		if (getput_arg_fast(compiler, WORD_SIZE, TMP_REG2, src2, src2w))
1457			FAIL_IF(compiler->error);
1458		else
1459			flags |= SLOW_SRC2;
1460	}
1461
1462	if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
1463		if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
1464			FAIL_IF(getput_arg(compiler, WORD_SIZE, TMP_REG2, src2, src2w, src1, src1w));
1465			FAIL_IF(getput_arg(compiler, WORD_SIZE, TMP_REG1, src1, src1w, dst, dstw));
1466		}
1467		else {
1468			FAIL_IF(getput_arg(compiler, WORD_SIZE, TMP_REG1, src1, src1w, src2, src2w));
1469			FAIL_IF(getput_arg(compiler, WORD_SIZE, TMP_REG2, src2, src2w, dst, dstw));
1470		}
1471	}
1472	else if (flags & SLOW_SRC1)
1473		FAIL_IF(getput_arg(compiler, WORD_SIZE, TMP_REG1, src1, src1w, dst, dstw));
1474	else if (flags & SLOW_SRC2)
1475		FAIL_IF(getput_arg(compiler, WORD_SIZE, TMP_REG2, src2, src2w, dst, dstw));
1476
1477	if (src1 & SLJIT_MEM)
1478		src1 = TMP_REG1;
1479	if (src2 & SLJIT_MEM)
1480		src2 = TMP_REG2;
1481
1482	if (src1 & SLJIT_IMM)
1483		flags |= ARG1_IMM;
1484	else
1485		src1w = src1;
1486	if (src2 & SLJIT_IMM)
1487		flags |= ARG2_IMM;
1488	else
1489		src2w = src2;
1490
1491	if (dst == SLJIT_UNUSED)
1492		flags |= UNUSED_RETURN;
1493
1494	emit_op_imm(compiler, flags | GET_OPCODE(op), dst_r, src1w, src2w);
1495
1496	if (dst & SLJIT_MEM) {
1497		if (!(flags & SLOW_DEST)) {
1498			getput_arg_fast(compiler, WORD_SIZE | STORE, dst_r, dst, dstw);
1499			return compiler->error;
1500		}
1501		return getput_arg(compiler, WORD_SIZE | STORE, TMP_REG1, dst, dstw, 0, 0);
1502	}
1503	return SLJIT_SUCCESS;
1504}
1505
1506SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg)
1507{
1508	check_sljit_get_register_index(reg);
1509	return reg_map[reg];
1510}
1511
1512SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg)
1513{
1514	check_sljit_get_float_register_index(reg);
1515	return reg;
1516}
1517
1518SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler,
1519	void *instruction, sljit_si size)
1520{
1521	CHECK_ERROR();
1522	check_sljit_emit_op_custom(compiler, instruction, size);
1523	SLJIT_ASSERT(size == 2 || size == 4);
1524
1525	if (size == 2)
1526		return push_inst16(compiler, *(sljit_uh*)instruction);
1527	return push_inst32(compiler, *(sljit_ins*)instruction);
1528}
1529
1530/* --------------------------------------------------------------------- */
1531/*  Floating point operators                                             */
1532/* --------------------------------------------------------------------- */
1533
1534SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void)
1535{
1536#ifdef SLJIT_IS_FPU_AVAILABLE
1537	return SLJIT_IS_FPU_AVAILABLE;
1538#else
1539	/* Available by default. */
1540	return 1;
1541#endif
1542}
1543
1544#define FPU_LOAD (1 << 20)
1545
1546static sljit_si emit_fop_mem(struct sljit_compiler *compiler, sljit_si flags, sljit_si reg, sljit_si arg, sljit_sw argw)
1547{
1548	sljit_sw tmp;
1549	sljit_uw imm;
1550	sljit_sw inst = VSTR_F32 | (flags & (SLJIT_SINGLE_OP | FPU_LOAD));
1551
1552	SLJIT_ASSERT(arg & SLJIT_MEM);
1553
1554	/* Fast loads and stores. */
1555	if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
1556		FAIL_IF(push_inst32(compiler, ADD_W | RD4(TMP_REG2) | RN4(arg & REG_MASK) | RM4(OFFS_REG(arg)) | ((argw & 0x3) << 6)));
1557		arg = SLJIT_MEM | TMP_REG2;
1558		argw = 0;
1559	}
1560
1561	if ((arg & REG_MASK) && (argw & 0x3) == 0) {
1562		if (!(argw & ~0x3fc))
1563			return push_inst32(compiler, inst | 0x800000 | RN4(arg & REG_MASK) | DD4(reg) | (argw >> 2));
1564		if (!(-argw & ~0x3fc))
1565			return push_inst32(compiler, inst | RN4(arg & REG_MASK) | DD4(reg) | (-argw >> 2));
1566	}
1567
1568	/* Slow cases */
1569	SLJIT_ASSERT(!(arg & OFFS_REG_MASK));
1570	if (compiler->cache_arg == arg) {
1571		tmp = argw - compiler->cache_argw;
1572		if (!(tmp & ~0x3fc))
1573			return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG3) | DD4(reg) | (tmp >> 2));
1574		if (!(-tmp & ~0x3fc))
1575			return push_inst32(compiler, inst | RN4(TMP_REG3) | DD4(reg) | (-tmp >> 2));
1576		if (emit_set_delta(compiler, TMP_REG3, TMP_REG3, tmp) != SLJIT_ERR_UNSUPPORTED) {
1577			FAIL_IF(compiler->error);
1578			compiler->cache_argw = argw;
1579			return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG3) | DD4(reg));
1580		}
1581	}
1582
1583	if (arg & REG_MASK) {
1584		if (emit_set_delta(compiler, TMP_REG1, arg & REG_MASK, argw) != SLJIT_ERR_UNSUPPORTED) {
1585			FAIL_IF(compiler->error);
1586			return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG1) | DD4(reg));
1587		}
1588		imm = get_imm(argw & ~0x3fc);
1589		if (imm != INVALID_IMM) {
1590			FAIL_IF(push_inst32(compiler, ADD_WI | RD4(TMP_REG1) | RN4(arg & REG_MASK) | imm));
1591			return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG1) | DD4(reg) | ((argw & 0x3fc) >> 2));
1592		}
1593		imm = get_imm(-argw & ~0x3fc);
1594		if (imm != INVALID_IMM) {
1595			argw = -argw;
1596			FAIL_IF(push_inst32(compiler, SUB_WI | RD4(TMP_REG1) | RN4(arg & REG_MASK) | imm));
1597			return push_inst32(compiler, inst | RN4(TMP_REG1) | DD4(reg) | ((argw & 0x3fc) >> 2));
1598		}
1599	}
1600
1601	compiler->cache_arg = arg;
1602	compiler->cache_argw = argw;
1603
1604	FAIL_IF(load_immediate(compiler, TMP_REG3, argw));
1605	if (arg & REG_MASK)
1606		FAIL_IF(push_inst16(compiler, ADD | SET_REGS44(TMP_REG3, (arg & REG_MASK))));
1607	return push_inst32(compiler, inst | 0x800000 | RN4(TMP_REG3) | DD4(reg));
1608}
1609
1610SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op,
1611	sljit_si dst, sljit_sw dstw,
1612	sljit_si src, sljit_sw srcw)
1613{
1614	sljit_si dst_r;
1615
1616	CHECK_ERROR();
1617	check_sljit_emit_fop1(compiler, op, dst, dstw, src, srcw);
1618	SLJIT_COMPILE_ASSERT((SLJIT_SINGLE_OP == 0x100), float_transfer_bit_error);
1619
1620	compiler->cache_arg = 0;
1621	compiler->cache_argw = 0;
1622	op ^= SLJIT_SINGLE_OP;
1623
1624	if (GET_OPCODE(op) == SLJIT_CMPD) {
1625		if (dst & SLJIT_MEM) {
1626			emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG1, dst, dstw);
1627			dst = TMP_FREG1;
1628		}
1629		if (src & SLJIT_MEM) {
1630			emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG2, src, srcw);
1631			src = TMP_FREG2;
1632		}
1633		FAIL_IF(push_inst32(compiler, VCMP_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst) | DM4(src)));
1634		return push_inst32(compiler, VMRS);
1635	}
1636
1637	dst_r = (dst <= REG_MASK) ? dst : TMP_FREG1;
1638	if (src & SLJIT_MEM) {
1639		emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, dst_r, src, srcw);
1640		src = dst_r;
1641	}
1642
1643	switch (GET_OPCODE(op)) {
1644	case SLJIT_MOVD:
1645		if (src != dst_r)
1646			FAIL_IF(push_inst32(compiler, VMOV_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DM4(src)));
1647		break;
1648	case SLJIT_NEGD:
1649		FAIL_IF(push_inst32(compiler, VNEG_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DM4(src)));
1650		break;
1651	case SLJIT_ABSD:
1652		FAIL_IF(push_inst32(compiler, VABS_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DM4(src)));
1653		break;
1654	}
1655
1656	if (!(dst & SLJIT_MEM))
1657		return SLJIT_SUCCESS;
1658	return emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP), TMP_FREG1, dst, dstw);
1659}
1660
1661SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op,
1662	sljit_si dst, sljit_sw dstw,
1663	sljit_si src1, sljit_sw src1w,
1664	sljit_si src2, sljit_sw src2w)
1665{
1666	sljit_si dst_r;
1667
1668	CHECK_ERROR();
1669	check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w);
1670
1671	compiler->cache_arg = 0;
1672	compiler->cache_argw = 0;
1673	op ^= SLJIT_SINGLE_OP;
1674
1675	dst_r = (dst <= REG_MASK) ? dst : TMP_FREG1;
1676	if (src1 & SLJIT_MEM) {
1677		emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG1, src1, src1w);
1678		src1 = TMP_FREG1;
1679	}
1680	if (src2 & SLJIT_MEM) {
1681		emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP) | FPU_LOAD, TMP_FREG2, src2, src2w);
1682		src2 = TMP_FREG2;
1683	}
1684
1685	switch (GET_OPCODE(op)) {
1686	case SLJIT_ADDD:
1687		FAIL_IF(push_inst32(compiler, VADD_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
1688		break;
1689	case SLJIT_SUBD:
1690		FAIL_IF(push_inst32(compiler, VSUB_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
1691		break;
1692	case SLJIT_MULD:
1693		FAIL_IF(push_inst32(compiler, VMUL_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
1694		break;
1695	case SLJIT_DIVD:
1696		FAIL_IF(push_inst32(compiler, VDIV_F32 | (op & SLJIT_SINGLE_OP) | DD4(dst_r) | DN4(src1) | DM4(src2)));
1697		break;
1698	}
1699
1700	if (!(dst & SLJIT_MEM))
1701		return SLJIT_SUCCESS;
1702	return emit_fop_mem(compiler, (op & SLJIT_SINGLE_OP), TMP_FREG1, dst, dstw);
1703}
1704
1705#undef FPU_LOAD
1706
1707/* --------------------------------------------------------------------- */
1708/*  Other instructions                                                   */
1709/* --------------------------------------------------------------------- */
1710
1711SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
1712{
1713	CHECK_ERROR();
1714	check_sljit_emit_fast_enter(compiler, dst, dstw);
1715	ADJUST_LOCAL_OFFSET(dst, dstw);
1716
1717	/* For UNUSED dst. Uncommon, but possible. */
1718	if (dst == SLJIT_UNUSED)
1719		return SLJIT_SUCCESS;
1720
1721	if (dst <= REG_MASK)
1722		return push_inst16(compiler, MOV | SET_REGS44(dst, TMP_REG3));
1723
1724	/* Memory. */
1725	if (getput_arg_fast(compiler, WORD_SIZE | STORE, TMP_REG3, dst, dstw))
1726		return compiler->error;
1727	/* TMP_REG3 is used for caching. */
1728	FAIL_IF(push_inst16(compiler, MOV | SET_REGS44(TMP_REG2, TMP_REG3)));
1729	compiler->cache_arg = 0;
1730	compiler->cache_argw = 0;
1731	return getput_arg(compiler, WORD_SIZE | STORE, TMP_REG2, dst, dstw, 0, 0);
1732}
1733
1734SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
1735{
1736	CHECK_ERROR();
1737	check_sljit_emit_fast_return(compiler, src, srcw);
1738	ADJUST_LOCAL_OFFSET(src, srcw);
1739
1740	if (src <= REG_MASK)
1741		FAIL_IF(push_inst16(compiler, MOV | SET_REGS44(TMP_REG3, src)));
1742	else if (src & SLJIT_MEM) {
1743		if (getput_arg_fast(compiler, WORD_SIZE, TMP_REG3, src, srcw))
1744			FAIL_IF(compiler->error);
1745		else {
1746			compiler->cache_arg = 0;
1747			compiler->cache_argw = 0;
1748			FAIL_IF(getput_arg(compiler, WORD_SIZE, TMP_REG2, src, srcw, 0, 0));
1749			FAIL_IF(push_inst16(compiler, MOV | SET_REGS44(TMP_REG3, TMP_REG2)));
1750		}
1751	}
1752	else if (src & SLJIT_IMM)
1753		FAIL_IF(load_immediate(compiler, TMP_REG3, srcw));
1754	return push_inst16(compiler, BLX | RN3(TMP_REG3));
1755}
1756
1757/* --------------------------------------------------------------------- */
1758/*  Conditional instructions                                             */
1759/* --------------------------------------------------------------------- */
1760
1761static sljit_uw get_cc(sljit_si type)
1762{
1763	switch (type) {
1764	case SLJIT_C_EQUAL:
1765	case SLJIT_C_MUL_NOT_OVERFLOW:
1766	case SLJIT_C_FLOAT_EQUAL:
1767		return 0x0;
1768
1769	case SLJIT_C_NOT_EQUAL:
1770	case SLJIT_C_MUL_OVERFLOW:
1771	case SLJIT_C_FLOAT_NOT_EQUAL:
1772		return 0x1;
1773
1774	case SLJIT_C_LESS:
1775	case SLJIT_C_FLOAT_LESS:
1776		return 0x3;
1777
1778	case SLJIT_C_GREATER_EQUAL:
1779	case SLJIT_C_FLOAT_GREATER_EQUAL:
1780		return 0x2;
1781
1782	case SLJIT_C_GREATER:
1783	case SLJIT_C_FLOAT_GREATER:
1784		return 0x8;
1785
1786	case SLJIT_C_LESS_EQUAL:
1787	case SLJIT_C_FLOAT_LESS_EQUAL:
1788		return 0x9;
1789
1790	case SLJIT_C_SIG_LESS:
1791		return 0xb;
1792
1793	case SLJIT_C_SIG_GREATER_EQUAL:
1794		return 0xa;
1795
1796	case SLJIT_C_SIG_GREATER:
1797		return 0xc;
1798
1799	case SLJIT_C_SIG_LESS_EQUAL:
1800		return 0xd;
1801
1802	case SLJIT_C_OVERFLOW:
1803	case SLJIT_C_FLOAT_UNORDERED:
1804		return 0x6;
1805
1806	case SLJIT_C_NOT_OVERFLOW:
1807	case SLJIT_C_FLOAT_ORDERED:
1808		return 0x7;
1809
1810	default: /* SLJIT_JUMP */
1811		return 0xe;
1812	}
1813}
1814
1815SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
1816{
1817	struct sljit_label *label;
1818
1819	CHECK_ERROR_PTR();
1820	check_sljit_emit_label(compiler);
1821
1822	if (compiler->last_label && compiler->last_label->size == compiler->size)
1823		return compiler->last_label;
1824
1825	label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
1826	PTR_FAIL_IF(!label);
1827	set_label(label, compiler);
1828	return label;
1829}
1830
1831SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type)
1832{
1833	struct sljit_jump *jump;
1834	sljit_ins cc;
1835
1836	CHECK_ERROR_PTR();
1837	check_sljit_emit_jump(compiler, type);
1838
1839	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1840	PTR_FAIL_IF(!jump);
1841	set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
1842	type &= 0xff;
1843
1844	/* In ARM, we don't need to touch the arguments. */
1845	PTR_FAIL_IF(emit_imm32_const(compiler, TMP_REG1, 0));
1846	if (type < SLJIT_JUMP) {
1847		jump->flags |= IS_COND;
1848		cc = get_cc(type);
1849		jump->flags |= cc << 8;
1850		PTR_FAIL_IF(push_inst16(compiler, IT | (cc << 4) | 0x8));
1851	}
1852
1853	jump->addr = compiler->size;
1854	if (type <= SLJIT_JUMP)
1855		PTR_FAIL_IF(push_inst16(compiler, BX | RN3(TMP_REG1)));
1856	else {
1857		jump->flags |= IS_BL;
1858		PTR_FAIL_IF(push_inst16(compiler, BLX | RN3(TMP_REG1)));
1859	}
1860
1861	return jump;
1862}
1863
1864SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw)
1865{
1866	struct sljit_jump *jump;
1867
1868	CHECK_ERROR();
1869	check_sljit_emit_ijump(compiler, type, src, srcw);
1870	ADJUST_LOCAL_OFFSET(src, srcw);
1871
1872	/* In ARM, we don't need to touch the arguments. */
1873	if (!(src & SLJIT_IMM)) {
1874		if (FAST_IS_REG(src))
1875			return push_inst16(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RN3(src));
1876
1877		FAIL_IF(emit_op_mem(compiler, WORD_SIZE, type <= SLJIT_JUMP ? TMP_PC : TMP_REG1, src, srcw));
1878		if (type >= SLJIT_FAST_CALL)
1879			return push_inst16(compiler, BLX | RN3(TMP_REG1));
1880	}
1881
1882	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1883	FAIL_IF(!jump);
1884	set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_BL : 0));
1885	jump->u.target = srcw;
1886
1887	FAIL_IF(emit_imm32_const(compiler, TMP_REG1, 0));
1888	jump->addr = compiler->size;
1889	return push_inst16(compiler, (type <= SLJIT_JUMP ? BX : BLX) | RN3(TMP_REG1));
1890}
1891
1892SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op,
1893	sljit_si dst, sljit_sw dstw,
1894	sljit_si src, sljit_sw srcw,
1895	sljit_si type)
1896{
1897	sljit_si dst_r, flags = GET_ALL_FLAGS(op);
1898	sljit_ins cc, ins;
1899
1900	CHECK_ERROR();
1901	check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type);
1902	ADJUST_LOCAL_OFFSET(dst, dstw);
1903	ADJUST_LOCAL_OFFSET(src, srcw);
1904
1905	if (dst == SLJIT_UNUSED)
1906		return SLJIT_SUCCESS;
1907
1908	op = GET_OPCODE(op);
1909	cc = get_cc(type);
1910	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
1911
1912	if (op < SLJIT_ADD) {
1913		FAIL_IF(push_inst16(compiler, IT | (cc << 4) | (((cc & 0x1) ^ 0x1) << 3) | 0x4));
1914		if (reg_map[dst_r] > 7) {
1915			FAIL_IF(push_inst32(compiler, MOV_WI | RD4(dst_r) | 1));
1916			FAIL_IF(push_inst32(compiler, MOV_WI | RD4(dst_r) | 0));
1917		} else {
1918			FAIL_IF(push_inst16(compiler, MOVSI | RDN3(dst_r) | 1));
1919			FAIL_IF(push_inst16(compiler, MOVSI | RDN3(dst_r) | 0));
1920		}
1921		if (dst_r != TMP_REG2)
1922			return SLJIT_SUCCESS;
1923		return emit_op_mem(compiler, WORD_SIZE | STORE, TMP_REG2, dst, dstw);
1924	}
1925
1926	ins = (op == SLJIT_AND ? ANDI : (op == SLJIT_OR ? ORRI : EORI));
1927	if ((op == SLJIT_OR || op == SLJIT_XOR) && FAST_IS_REG(dst) && dst == src) {
1928		/* Does not change the other bits. */
1929		FAIL_IF(push_inst16(compiler, IT | (cc << 4) | 0x8));
1930		FAIL_IF(push_inst32(compiler, ins | RN4(src) | RD4(dst) | 1));
1931		if (flags & SLJIT_SET_E) {
1932			/* The condition must always be set, even if the ORRI/EORI is not executed above. */
1933			if (reg_map[dst] <= 7)
1934				return push_inst16(compiler, MOVS | RD3(TMP_REG1) | RN3(dst));
1935			return push_inst32(compiler, MOV_W | SET_FLAGS | RD4(TMP_REG1) | RM4(dst));
1936		}
1937		return SLJIT_SUCCESS;
1938	}
1939
1940	compiler->cache_arg = 0;
1941	compiler->cache_argw = 0;
1942	if (src & SLJIT_MEM) {
1943		FAIL_IF(emit_op_mem2(compiler, WORD_SIZE, TMP_REG2, src, srcw, dst, dstw));
1944		src = TMP_REG2;
1945		srcw = 0;
1946	} else if (src & SLJIT_IMM) {
1947		FAIL_IF(load_immediate(compiler, TMP_REG2, srcw));
1948		src = TMP_REG2;
1949		srcw = 0;
1950	}
1951
1952	if (op == SLJIT_AND || src != dst_r) {
1953		FAIL_IF(push_inst16(compiler, IT | (cc << 4) | (((cc & 0x1) ^ 0x1) << 3) | 0x4));
1954		FAIL_IF(push_inst32(compiler, ins | RN4(src) | RD4(dst_r) | 1));
1955		FAIL_IF(push_inst32(compiler, ins | RN4(src) | RD4(dst_r) | 0));
1956	}
1957	else {
1958		FAIL_IF(push_inst16(compiler, IT | (cc << 4) | 0x8));
1959		FAIL_IF(push_inst32(compiler, ins | RN4(src) | RD4(dst_r) | 1));
1960	}
1961
1962	if (dst_r == TMP_REG2)
1963		FAIL_IF(emit_op_mem2(compiler, WORD_SIZE | STORE, TMP_REG2, dst, dstw, 0, 0));
1964
1965	if (flags & SLJIT_SET_E) {
1966		/* The condition must always be set, even if the ORR/EORI is not executed above. */
1967		if (reg_map[dst_r] <= 7)
1968			return push_inst16(compiler, MOVS | RD3(TMP_REG1) | RN3(dst_r));
1969		return push_inst32(compiler, MOV_W | SET_FLAGS | RD4(TMP_REG1) | RM4(dst_r));
1970	}
1971	return SLJIT_SUCCESS;
1972}
1973
1974SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value)
1975{
1976	struct sljit_const *const_;
1977	sljit_si dst_r;
1978
1979	CHECK_ERROR_PTR();
1980	check_sljit_emit_const(compiler, dst, dstw, init_value);
1981	ADJUST_LOCAL_OFFSET(dst, dstw);
1982
1983	const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
1984	PTR_FAIL_IF(!const_);
1985	set_const(const_, compiler);
1986
1987	dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
1988	PTR_FAIL_IF(emit_imm32_const(compiler, dst_r, init_value));
1989
1990	if (dst & SLJIT_MEM)
1991		PTR_FAIL_IF(emit_op_mem(compiler, WORD_SIZE | STORE, dst_r, dst, dstw));
1992	return const_;
1993}
1994
1995SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr)
1996{
1997	sljit_uh *inst = (sljit_uh*)addr;
1998	modify_imm32_const(inst, new_addr);
1999	SLJIT_CACHE_FLUSH(inst, inst + 4);
2000}
2001
2002SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant)
2003{
2004	sljit_uh *inst = (sljit_uh*)addr;
2005	modify_imm32_const(inst, new_constant);
2006	SLJIT_CACHE_FLUSH(inst, inst + 4);
2007}
2008