1 1.4 alnsn /* $NetBSD: sljitNativeSPARC_common.c,v 1.4 2019/01/20 23:14:16 alnsn Exp $ */ 2 1.2 alnsn 3 1.1 alnsn /* 4 1.1 alnsn * Stack-less Just-In-Time compiler 5 1.1 alnsn * 6 1.4 alnsn * Copyright Zoltan Herczeg (hzmester (at) freemail.hu). All rights reserved. 7 1.1 alnsn * 8 1.1 alnsn * Redistribution and use in source and binary forms, with or without modification, are 9 1.1 alnsn * permitted provided that the following conditions are met: 10 1.1 alnsn * 11 1.1 alnsn * 1. Redistributions of source code must retain the above copyright notice, this list of 12 1.1 alnsn * conditions and the following disclaimer. 13 1.1 alnsn * 14 1.1 alnsn * 2. Redistributions in binary form must reproduce the above copyright notice, this list 15 1.1 alnsn * of conditions and the following disclaimer in the documentation and/or other materials 16 1.1 alnsn * provided with the distribution. 17 1.1 alnsn * 18 1.1 alnsn * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY 19 1.1 alnsn * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 20 1.1 alnsn * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT 21 1.1 alnsn * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 22 1.1 alnsn * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 23 1.1 alnsn * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 24 1.1 alnsn * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 25 1.1 alnsn * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 26 1.1 alnsn * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 1.1 alnsn */ 28 1.1 alnsn 29 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void) 30 1.1 alnsn { 31 1.1 alnsn return "SPARC" SLJIT_CPUINFO; 32 1.1 alnsn } 33 1.1 alnsn 34 1.1 alnsn /* Length of an instruction word 35 1.1 alnsn Both for sparc-32 and sparc-64 */ 36 1.3 alnsn typedef sljit_u32 sljit_ins; 37 1.3 alnsn 38 1.3 alnsn #if (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL) 39 1.1 alnsn 40 1.1 alnsn static void sparc_cache_flush(sljit_ins *from, sljit_ins *to) 41 1.1 alnsn { 42 1.2 alnsn #if defined(__SUNPRO_C) && __SUNPRO_C < 0x590 43 1.2 alnsn __asm ( 44 1.2 alnsn /* if (from == to) return */ 45 1.2 alnsn "cmp %i0, %i1\n" 46 1.2 alnsn "be .leave\n" 47 1.2 alnsn "nop\n" 48 1.2 alnsn 49 1.2 alnsn /* loop until from >= to */ 50 1.2 alnsn ".mainloop:\n" 51 1.2 alnsn "flush %i0\n" 52 1.2 alnsn "add %i0, 8, %i0\n" 53 1.2 alnsn "cmp %i0, %i1\n" 54 1.2 alnsn "bcs .mainloop\n" 55 1.2 alnsn "nop\n" 56 1.2 alnsn 57 1.2 alnsn /* The comparison was done above. */ 58 1.2 alnsn "bne .leave\n" 59 1.2 alnsn /* nop is not necessary here, since the 60 1.2 alnsn sub operation has no side effect. */ 61 1.2 alnsn "sub %i0, 4, %i0\n" 62 1.2 alnsn "flush %i0\n" 63 1.2 alnsn ".leave:" 64 1.2 alnsn ); 65 1.2 alnsn #else 66 1.1 alnsn if (SLJIT_UNLIKELY(from == to)) 67 1.1 alnsn return; 68 1.1 alnsn 69 1.1 alnsn do { 70 1.1 alnsn __asm__ volatile ( 71 1.1 alnsn "flush %0\n" 72 1.1 alnsn : : "r"(from) 73 1.1 alnsn ); 74 1.1 alnsn /* Operates at least on doubleword. */ 75 1.1 alnsn from += 2; 76 1.1 alnsn } while (from < to); 77 1.1 alnsn 78 1.1 alnsn if (from == to) { 79 1.1 alnsn /* Flush the last word. */ 80 1.2 alnsn from --; 81 1.1 alnsn __asm__ volatile ( 82 1.1 alnsn "flush %0\n" 83 1.2 alnsn : : "r"(from) 84 1.1 alnsn ); 85 1.1 alnsn } 86 1.2 alnsn #endif 87 1.1 alnsn } 88 1.1 alnsn 89 1.3 alnsn #endif /* (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL) */ 90 1.3 alnsn 91 1.1 alnsn /* TMP_REG2 is not used by getput_arg */ 92 1.3 alnsn #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2) 93 1.3 alnsn #define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3) 94 1.3 alnsn #define TMP_REG3 (SLJIT_NUMBER_OF_REGISTERS + 4) 95 1.3 alnsn #define TMP_LINK (SLJIT_NUMBER_OF_REGISTERS + 5) 96 1.1 alnsn 97 1.2 alnsn #define TMP_FREG1 (0) 98 1.3 alnsn #define TMP_FREG2 ((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1) << 1) 99 1.1 alnsn 100 1.3 alnsn static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = { 101 1.3 alnsn 0, 8, 9, 10, 13, 29, 28, 27, 23, 22, 21, 20, 19, 18, 17, 16, 26, 25, 24, 14, 1, 11, 12, 15 102 1.1 alnsn }; 103 1.1 alnsn 104 1.1 alnsn /* --------------------------------------------------------------------- */ 105 1.1 alnsn /* Instrucion forms */ 106 1.1 alnsn /* --------------------------------------------------------------------- */ 107 1.1 alnsn 108 1.1 alnsn #define D(d) (reg_map[d] << 25) 109 1.1 alnsn #define DA(d) ((d) << 25) 110 1.1 alnsn #define S1(s1) (reg_map[s1] << 14) 111 1.1 alnsn #define S2(s2) (reg_map[s2]) 112 1.1 alnsn #define S1A(s1) ((s1) << 14) 113 1.1 alnsn #define S2A(s2) (s2) 114 1.1 alnsn #define IMM_ARG 0x2000 115 1.1 alnsn #define DOP(op) ((op) << 5) 116 1.1 alnsn #define IMM(imm) (((imm) & 0x1fff) | IMM_ARG) 117 1.1 alnsn 118 1.1 alnsn #define DR(dr) (reg_map[dr]) 119 1.1 alnsn #define OPC1(opcode) ((opcode) << 30) 120 1.1 alnsn #define OPC2(opcode) ((opcode) << 22) 121 1.1 alnsn #define OPC3(opcode) ((opcode) << 19) 122 1.1 alnsn #define SET_FLAGS OPC3(0x10) 123 1.1 alnsn 124 1.1 alnsn #define ADD (OPC1(0x2) | OPC3(0x00)) 125 1.1 alnsn #define ADDC (OPC1(0x2) | OPC3(0x08)) 126 1.1 alnsn #define AND (OPC1(0x2) | OPC3(0x01)) 127 1.1 alnsn #define ANDN (OPC1(0x2) | OPC3(0x05)) 128 1.1 alnsn #define CALL (OPC1(0x1)) 129 1.1 alnsn #define FABSS (OPC1(0x2) | OPC3(0x34) | DOP(0x09)) 130 1.1 alnsn #define FADDD (OPC1(0x2) | OPC3(0x34) | DOP(0x42)) 131 1.2 alnsn #define FADDS (OPC1(0x2) | OPC3(0x34) | DOP(0x41)) 132 1.1 alnsn #define FCMPD (OPC1(0x2) | OPC3(0x35) | DOP(0x52)) 133 1.2 alnsn #define FCMPS (OPC1(0x2) | OPC3(0x35) | DOP(0x51)) 134 1.1 alnsn #define FDIVD (OPC1(0x2) | OPC3(0x34) | DOP(0x4e)) 135 1.2 alnsn #define FDIVS (OPC1(0x2) | OPC3(0x34) | DOP(0x4d)) 136 1.3 alnsn #define FDTOI (OPC1(0x2) | OPC3(0x34) | DOP(0xd2)) 137 1.3 alnsn #define FDTOS (OPC1(0x2) | OPC3(0x34) | DOP(0xc6)) 138 1.3 alnsn #define FITOD (OPC1(0x2) | OPC3(0x34) | DOP(0xc8)) 139 1.3 alnsn #define FITOS (OPC1(0x2) | OPC3(0x34) | DOP(0xc4)) 140 1.1 alnsn #define FMOVS (OPC1(0x2) | OPC3(0x34) | DOP(0x01)) 141 1.1 alnsn #define FMULD (OPC1(0x2) | OPC3(0x34) | DOP(0x4a)) 142 1.2 alnsn #define FMULS (OPC1(0x2) | OPC3(0x34) | DOP(0x49)) 143 1.1 alnsn #define FNEGS (OPC1(0x2) | OPC3(0x34) | DOP(0x05)) 144 1.3 alnsn #define FSTOD (OPC1(0x2) | OPC3(0x34) | DOP(0xc9)) 145 1.3 alnsn #define FSTOI (OPC1(0x2) | OPC3(0x34) | DOP(0xd1)) 146 1.1 alnsn #define FSUBD (OPC1(0x2) | OPC3(0x34) | DOP(0x46)) 147 1.2 alnsn #define FSUBS (OPC1(0x2) | OPC3(0x34) | DOP(0x45)) 148 1.1 alnsn #define JMPL (OPC1(0x2) | OPC3(0x38)) 149 1.1 alnsn #define NOP (OPC1(0x0) | OPC2(0x04)) 150 1.1 alnsn #define OR (OPC1(0x2) | OPC3(0x02)) 151 1.1 alnsn #define ORN (OPC1(0x2) | OPC3(0x06)) 152 1.1 alnsn #define RDY (OPC1(0x2) | OPC3(0x28) | S1A(0)) 153 1.1 alnsn #define RESTORE (OPC1(0x2) | OPC3(0x3d)) 154 1.1 alnsn #define SAVE (OPC1(0x2) | OPC3(0x3c)) 155 1.1 alnsn #define SETHI (OPC1(0x0) | OPC2(0x04)) 156 1.1 alnsn #define SLL (OPC1(0x2) | OPC3(0x25)) 157 1.1 alnsn #define SLLX (OPC1(0x2) | OPC3(0x25) | (1 << 12)) 158 1.1 alnsn #define SRA (OPC1(0x2) | OPC3(0x27)) 159 1.1 alnsn #define SRAX (OPC1(0x2) | OPC3(0x27) | (1 << 12)) 160 1.1 alnsn #define SRL (OPC1(0x2) | OPC3(0x26)) 161 1.1 alnsn #define SRLX (OPC1(0x2) | OPC3(0x26) | (1 << 12)) 162 1.1 alnsn #define SUB (OPC1(0x2) | OPC3(0x04)) 163 1.1 alnsn #define SUBC (OPC1(0x2) | OPC3(0x0c)) 164 1.1 alnsn #define TA (OPC1(0x2) | OPC3(0x3a) | (8 << 25)) 165 1.1 alnsn #define WRY (OPC1(0x2) | OPC3(0x30) | DA(0)) 166 1.1 alnsn #define XOR (OPC1(0x2) | OPC3(0x03)) 167 1.1 alnsn #define XNOR (OPC1(0x2) | OPC3(0x07)) 168 1.1 alnsn 169 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 170 1.1 alnsn #define MAX_DISP (0x1fffff) 171 1.1 alnsn #define MIN_DISP (-0x200000) 172 1.1 alnsn #define DISP_MASK (0x3fffff) 173 1.1 alnsn 174 1.1 alnsn #define BICC (OPC1(0x0) | OPC2(0x2)) 175 1.1 alnsn #define FBFCC (OPC1(0x0) | OPC2(0x6)) 176 1.1 alnsn #define SLL_W SLL 177 1.1 alnsn #define SDIV (OPC1(0x2) | OPC3(0x0f)) 178 1.1 alnsn #define SMUL (OPC1(0x2) | OPC3(0x0b)) 179 1.1 alnsn #define UDIV (OPC1(0x2) | OPC3(0x0e)) 180 1.1 alnsn #define UMUL (OPC1(0x2) | OPC3(0x0a)) 181 1.1 alnsn #else 182 1.1 alnsn #define SLL_W SLLX 183 1.1 alnsn #endif 184 1.1 alnsn 185 1.1 alnsn #define SIMM_MAX (0x0fff) 186 1.1 alnsn #define SIMM_MIN (-0x1000) 187 1.1 alnsn 188 1.1 alnsn /* dest_reg is the absolute name of the register 189 1.1 alnsn Useful for reordering instructions in the delay slot. */ 190 1.3 alnsn static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_s32 delay_slot) 191 1.1 alnsn { 192 1.2 alnsn sljit_ins *ptr; 193 1.1 alnsn SLJIT_ASSERT((delay_slot & DST_INS_MASK) == UNMOVABLE_INS 194 1.1 alnsn || (delay_slot & DST_INS_MASK) == MOVABLE_INS 195 1.1 alnsn || (delay_slot & DST_INS_MASK) == ((ins >> 25) & 0x1f)); 196 1.2 alnsn ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins)); 197 1.1 alnsn FAIL_IF(!ptr); 198 1.1 alnsn *ptr = ins; 199 1.1 alnsn compiler->size++; 200 1.1 alnsn compiler->delay_slot = delay_slot; 201 1.1 alnsn return SLJIT_SUCCESS; 202 1.1 alnsn } 203 1.1 alnsn 204 1.4 alnsn static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset) 205 1.1 alnsn { 206 1.2 alnsn sljit_sw diff; 207 1.1 alnsn sljit_uw target_addr; 208 1.1 alnsn sljit_ins *inst; 209 1.1 alnsn sljit_ins saved_inst; 210 1.1 alnsn 211 1.1 alnsn if (jump->flags & SLJIT_REWRITABLE_JUMP) 212 1.1 alnsn return code_ptr; 213 1.1 alnsn 214 1.1 alnsn if (jump->flags & JUMP_ADDR) 215 1.1 alnsn target_addr = jump->u.target; 216 1.1 alnsn else { 217 1.1 alnsn SLJIT_ASSERT(jump->flags & JUMP_LABEL); 218 1.4 alnsn target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset; 219 1.1 alnsn } 220 1.1 alnsn inst = (sljit_ins*)jump->addr; 221 1.1 alnsn 222 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 223 1.1 alnsn if (jump->flags & IS_CALL) { 224 1.1 alnsn /* Call is always patchable on sparc 32. */ 225 1.1 alnsn jump->flags |= PATCH_CALL; 226 1.1 alnsn if (jump->flags & IS_MOVABLE) { 227 1.1 alnsn inst[0] = inst[-1]; 228 1.1 alnsn inst[-1] = CALL; 229 1.1 alnsn jump->addr -= sizeof(sljit_ins); 230 1.1 alnsn return inst; 231 1.1 alnsn } 232 1.1 alnsn inst[0] = CALL; 233 1.1 alnsn inst[1] = NOP; 234 1.1 alnsn return inst + 1; 235 1.1 alnsn } 236 1.1 alnsn #else 237 1.1 alnsn /* Both calls and BPr instructions shall not pass this point. */ 238 1.1 alnsn #error "Implementation required" 239 1.1 alnsn #endif 240 1.1 alnsn 241 1.1 alnsn if (jump->flags & IS_COND) 242 1.1 alnsn inst--; 243 1.1 alnsn 244 1.4 alnsn diff = ((sljit_sw)target_addr - (sljit_sw)(inst - 1) - executable_offset) >> 2; 245 1.4 alnsn 246 1.1 alnsn if (jump->flags & IS_MOVABLE) { 247 1.1 alnsn if (diff <= MAX_DISP && diff >= MIN_DISP) { 248 1.1 alnsn jump->flags |= PATCH_B; 249 1.1 alnsn inst--; 250 1.1 alnsn if (jump->flags & IS_COND) { 251 1.1 alnsn saved_inst = inst[0]; 252 1.1 alnsn inst[0] = inst[1] ^ (1 << 28); 253 1.1 alnsn inst[1] = saved_inst; 254 1.1 alnsn } else { 255 1.1 alnsn inst[1] = inst[0]; 256 1.1 alnsn inst[0] = BICC | DA(0x8); 257 1.1 alnsn } 258 1.1 alnsn jump->addr = (sljit_uw)inst; 259 1.1 alnsn return inst + 1; 260 1.1 alnsn } 261 1.1 alnsn } 262 1.1 alnsn 263 1.4 alnsn diff += sizeof(sljit_ins); 264 1.4 alnsn 265 1.1 alnsn if (diff <= MAX_DISP && diff >= MIN_DISP) { 266 1.1 alnsn jump->flags |= PATCH_B; 267 1.1 alnsn if (jump->flags & IS_COND) 268 1.1 alnsn inst[0] ^= (1 << 28); 269 1.1 alnsn else 270 1.1 alnsn inst[0] = BICC | DA(0x8); 271 1.1 alnsn inst[1] = NOP; 272 1.1 alnsn jump->addr = (sljit_uw)inst; 273 1.1 alnsn return inst + 1; 274 1.1 alnsn } 275 1.1 alnsn 276 1.1 alnsn return code_ptr; 277 1.1 alnsn } 278 1.1 alnsn 279 1.1 alnsn SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler) 280 1.1 alnsn { 281 1.1 alnsn struct sljit_memory_fragment *buf; 282 1.1 alnsn sljit_ins *code; 283 1.1 alnsn sljit_ins *code_ptr; 284 1.1 alnsn sljit_ins *buf_ptr; 285 1.1 alnsn sljit_ins *buf_end; 286 1.1 alnsn sljit_uw word_count; 287 1.4 alnsn sljit_sw executable_offset; 288 1.1 alnsn sljit_uw addr; 289 1.1 alnsn 290 1.1 alnsn struct sljit_label *label; 291 1.1 alnsn struct sljit_jump *jump; 292 1.1 alnsn struct sljit_const *const_; 293 1.1 alnsn 294 1.1 alnsn CHECK_ERROR_PTR(); 295 1.3 alnsn CHECK_PTR(check_sljit_generate_code(compiler)); 296 1.1 alnsn reverse_buf(compiler); 297 1.1 alnsn 298 1.1 alnsn code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins)); 299 1.1 alnsn PTR_FAIL_WITH_EXEC_IF(code); 300 1.1 alnsn buf = compiler->buf; 301 1.1 alnsn 302 1.1 alnsn code_ptr = code; 303 1.1 alnsn word_count = 0; 304 1.4 alnsn executable_offset = SLJIT_EXEC_OFFSET(code); 305 1.4 alnsn 306 1.1 alnsn label = compiler->labels; 307 1.1 alnsn jump = compiler->jumps; 308 1.1 alnsn const_ = compiler->consts; 309 1.4 alnsn 310 1.1 alnsn do { 311 1.1 alnsn buf_ptr = (sljit_ins*)buf->memory; 312 1.1 alnsn buf_end = buf_ptr + (buf->used_size >> 2); 313 1.1 alnsn do { 314 1.1 alnsn *code_ptr = *buf_ptr++; 315 1.1 alnsn SLJIT_ASSERT(!label || label->size >= word_count); 316 1.1 alnsn SLJIT_ASSERT(!jump || jump->addr >= word_count); 317 1.1 alnsn SLJIT_ASSERT(!const_ || const_->addr >= word_count); 318 1.1 alnsn /* These structures are ordered by their address. */ 319 1.1 alnsn if (label && label->size == word_count) { 320 1.1 alnsn /* Just recording the address. */ 321 1.4 alnsn label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); 322 1.1 alnsn label->size = code_ptr - code; 323 1.1 alnsn label = label->next; 324 1.1 alnsn } 325 1.1 alnsn if (jump && jump->addr == word_count) { 326 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 327 1.1 alnsn jump->addr = (sljit_uw)(code_ptr - 3); 328 1.1 alnsn #else 329 1.1 alnsn jump->addr = (sljit_uw)(code_ptr - 6); 330 1.1 alnsn #endif 331 1.4 alnsn code_ptr = detect_jump_type(jump, code_ptr, code, executable_offset); 332 1.1 alnsn jump = jump->next; 333 1.1 alnsn } 334 1.1 alnsn if (const_ && const_->addr == word_count) { 335 1.1 alnsn /* Just recording the address. */ 336 1.1 alnsn const_->addr = (sljit_uw)code_ptr; 337 1.1 alnsn const_ = const_->next; 338 1.1 alnsn } 339 1.1 alnsn code_ptr ++; 340 1.1 alnsn word_count ++; 341 1.1 alnsn } while (buf_ptr < buf_end); 342 1.1 alnsn 343 1.1 alnsn buf = buf->next; 344 1.1 alnsn } while (buf); 345 1.1 alnsn 346 1.1 alnsn if (label && label->size == word_count) { 347 1.4 alnsn label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); 348 1.1 alnsn label->size = code_ptr - code; 349 1.1 alnsn label = label->next; 350 1.1 alnsn } 351 1.1 alnsn 352 1.1 alnsn SLJIT_ASSERT(!label); 353 1.1 alnsn SLJIT_ASSERT(!jump); 354 1.1 alnsn SLJIT_ASSERT(!const_); 355 1.3 alnsn SLJIT_ASSERT(code_ptr - code <= (sljit_s32)compiler->size); 356 1.1 alnsn 357 1.1 alnsn jump = compiler->jumps; 358 1.1 alnsn while (jump) { 359 1.1 alnsn do { 360 1.1 alnsn addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target; 361 1.4 alnsn buf_ptr = (sljit_ins *)jump->addr; 362 1.1 alnsn 363 1.1 alnsn if (jump->flags & PATCH_CALL) { 364 1.4 alnsn addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2; 365 1.2 alnsn SLJIT_ASSERT((sljit_sw)addr <= 0x1fffffff && (sljit_sw)addr >= -0x20000000); 366 1.1 alnsn buf_ptr[0] = CALL | (addr & 0x3fffffff); 367 1.1 alnsn break; 368 1.1 alnsn } 369 1.1 alnsn if (jump->flags & PATCH_B) { 370 1.4 alnsn addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2; 371 1.2 alnsn SLJIT_ASSERT((sljit_sw)addr <= MAX_DISP && (sljit_sw)addr >= MIN_DISP); 372 1.1 alnsn buf_ptr[0] = (buf_ptr[0] & ~DISP_MASK) | (addr & DISP_MASK); 373 1.1 alnsn break; 374 1.1 alnsn } 375 1.1 alnsn 376 1.1 alnsn /* Set the fields of immediate loads. */ 377 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 378 1.1 alnsn buf_ptr[0] = (buf_ptr[0] & 0xffc00000) | ((addr >> 10) & 0x3fffff); 379 1.1 alnsn buf_ptr[1] = (buf_ptr[1] & 0xfffffc00) | (addr & 0x3ff); 380 1.1 alnsn #else 381 1.1 alnsn #error "Implementation required" 382 1.1 alnsn #endif 383 1.1 alnsn } while (0); 384 1.1 alnsn jump = jump->next; 385 1.1 alnsn } 386 1.1 alnsn 387 1.1 alnsn 388 1.1 alnsn compiler->error = SLJIT_ERR_COMPILED; 389 1.4 alnsn compiler->executable_offset = executable_offset; 390 1.2 alnsn compiler->executable_size = (code_ptr - code) * sizeof(sljit_ins); 391 1.4 alnsn 392 1.4 alnsn code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset); 393 1.4 alnsn code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); 394 1.4 alnsn 395 1.1 alnsn SLJIT_CACHE_FLUSH(code, code_ptr); 396 1.1 alnsn return code; 397 1.1 alnsn } 398 1.1 alnsn 399 1.1 alnsn /* --------------------------------------------------------------------- */ 400 1.1 alnsn /* Entry, exit */ 401 1.1 alnsn /* --------------------------------------------------------------------- */ 402 1.1 alnsn 403 1.1 alnsn /* Creates an index in data_transfer_insts array. */ 404 1.1 alnsn #define LOAD_DATA 0x01 405 1.1 alnsn #define WORD_DATA 0x00 406 1.1 alnsn #define BYTE_DATA 0x02 407 1.1 alnsn #define HALF_DATA 0x04 408 1.1 alnsn #define INT_DATA 0x06 409 1.1 alnsn #define SIGNED_DATA 0x08 410 1.1 alnsn /* Separates integer and floating point registers */ 411 1.1 alnsn #define GPR_REG 0x0f 412 1.1 alnsn #define DOUBLE_DATA 0x10 413 1.3 alnsn #define SINGLE_DATA 0x12 414 1.1 alnsn 415 1.1 alnsn #define MEM_MASK 0x1f 416 1.1 alnsn 417 1.1 alnsn #define WRITE_BACK 0x00020 418 1.1 alnsn #define ARG_TEST 0x00040 419 1.2 alnsn #define ALT_KEEP_CACHE 0x00080 420 1.2 alnsn #define CUMULATIVE_OP 0x00100 421 1.2 alnsn #define IMM_OP 0x00200 422 1.2 alnsn #define SRC2_IMM 0x00400 423 1.2 alnsn 424 1.2 alnsn #define REG_DEST 0x00800 425 1.2 alnsn #define REG2_SOURCE 0x01000 426 1.2 alnsn #define SLOW_SRC1 0x02000 427 1.2 alnsn #define SLOW_SRC2 0x04000 428 1.2 alnsn #define SLOW_DEST 0x08000 429 1.2 alnsn 430 1.1 alnsn /* SET_FLAGS (0x10 << 19) also belong here! */ 431 1.1 alnsn 432 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 433 1.1 alnsn #include "sljitNativeSPARC_32.c" 434 1.1 alnsn #else 435 1.1 alnsn #include "sljitNativeSPARC_64.c" 436 1.1 alnsn #endif 437 1.1 alnsn 438 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler, 439 1.3 alnsn sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds, 440 1.3 alnsn sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size) 441 1.1 alnsn { 442 1.1 alnsn CHECK_ERROR(); 443 1.3 alnsn CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size)); 444 1.3 alnsn set_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size); 445 1.1 alnsn 446 1.3 alnsn local_size = (local_size + SLJIT_LOCALS_OFFSET + 7) & ~0x7; 447 1.1 alnsn compiler->local_size = local_size; 448 1.1 alnsn 449 1.1 alnsn if (local_size <= SIMM_MAX) { 450 1.3 alnsn FAIL_IF(push_inst(compiler, SAVE | D(SLJIT_SP) | S1(SLJIT_SP) | IMM(-local_size), UNMOVABLE_INS)); 451 1.1 alnsn } 452 1.1 alnsn else { 453 1.1 alnsn FAIL_IF(load_immediate(compiler, TMP_REG1, -local_size)); 454 1.3 alnsn FAIL_IF(push_inst(compiler, SAVE | D(SLJIT_SP) | S1(SLJIT_SP) | S2(TMP_REG1), UNMOVABLE_INS)); 455 1.1 alnsn } 456 1.1 alnsn 457 1.3 alnsn /* Arguments are in their appropriate registers. */ 458 1.1 alnsn 459 1.1 alnsn return SLJIT_SUCCESS; 460 1.1 alnsn } 461 1.1 alnsn 462 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler, 463 1.3 alnsn sljit_s32 options, sljit_s32 args, sljit_s32 scratches, sljit_s32 saveds, 464 1.3 alnsn sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size) 465 1.1 alnsn { 466 1.3 alnsn CHECK_ERROR(); 467 1.3 alnsn CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size)); 468 1.3 alnsn set_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size); 469 1.1 alnsn 470 1.3 alnsn compiler->local_size = (local_size + SLJIT_LOCALS_OFFSET + 7) & ~0x7; 471 1.3 alnsn return SLJIT_SUCCESS; 472 1.1 alnsn } 473 1.1 alnsn 474 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw) 475 1.1 alnsn { 476 1.1 alnsn CHECK_ERROR(); 477 1.3 alnsn CHECK(check_sljit_emit_return(compiler, op, src, srcw)); 478 1.1 alnsn 479 1.2 alnsn if (op != SLJIT_MOV || !FAST_IS_REG(src)) { 480 1.1 alnsn FAIL_IF(emit_mov_before_return(compiler, op, src, srcw)); 481 1.3 alnsn src = SLJIT_R0; 482 1.1 alnsn } 483 1.1 alnsn 484 1.1 alnsn FAIL_IF(push_inst(compiler, JMPL | D(0) | S1A(31) | IMM(8), UNMOVABLE_INS)); 485 1.3 alnsn return push_inst(compiler, RESTORE | D(SLJIT_R0) | S1(src) | S2(0), UNMOVABLE_INS); 486 1.1 alnsn } 487 1.1 alnsn 488 1.1 alnsn /* --------------------------------------------------------------------- */ 489 1.1 alnsn /* Operators */ 490 1.1 alnsn /* --------------------------------------------------------------------- */ 491 1.1 alnsn 492 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 493 1.1 alnsn #define ARCH_32_64(a, b) a 494 1.1 alnsn #else 495 1.1 alnsn #define ARCH_32_64(a, b) b 496 1.1 alnsn #endif 497 1.1 alnsn 498 1.3 alnsn static const sljit_ins data_transfer_insts[16 + 4] = { 499 1.1 alnsn /* u w s */ ARCH_32_64(OPC1(3) | OPC3(0x04) /* stw */, OPC1(3) | OPC3(0x0e) /* stx */), 500 1.1 alnsn /* u w l */ ARCH_32_64(OPC1(3) | OPC3(0x00) /* lduw */, OPC1(3) | OPC3(0x0b) /* ldx */), 501 1.1 alnsn /* u b s */ OPC1(3) | OPC3(0x05) /* stb */, 502 1.1 alnsn /* u b l */ OPC1(3) | OPC3(0x01) /* ldub */, 503 1.1 alnsn /* u h s */ OPC1(3) | OPC3(0x06) /* sth */, 504 1.1 alnsn /* u h l */ OPC1(3) | OPC3(0x02) /* lduh */, 505 1.1 alnsn /* u i s */ OPC1(3) | OPC3(0x04) /* stw */, 506 1.1 alnsn /* u i l */ OPC1(3) | OPC3(0x00) /* lduw */, 507 1.1 alnsn 508 1.1 alnsn /* s w s */ ARCH_32_64(OPC1(3) | OPC3(0x04) /* stw */, OPC1(3) | OPC3(0x0e) /* stx */), 509 1.1 alnsn /* s w l */ ARCH_32_64(OPC1(3) | OPC3(0x00) /* lduw */, OPC1(3) | OPC3(0x0b) /* ldx */), 510 1.1 alnsn /* s b s */ OPC1(3) | OPC3(0x05) /* stb */, 511 1.1 alnsn /* s b l */ OPC1(3) | OPC3(0x09) /* ldsb */, 512 1.1 alnsn /* s h s */ OPC1(3) | OPC3(0x06) /* sth */, 513 1.1 alnsn /* s h l */ OPC1(3) | OPC3(0x0a) /* ldsh */, 514 1.1 alnsn /* s i s */ OPC1(3) | OPC3(0x04) /* stw */, 515 1.1 alnsn /* s i l */ ARCH_32_64(OPC1(3) | OPC3(0x00) /* lduw */, OPC1(3) | OPC3(0x08) /* ldsw */), 516 1.1 alnsn 517 1.1 alnsn /* d s */ OPC1(3) | OPC3(0x27), 518 1.1 alnsn /* d l */ OPC1(3) | OPC3(0x23), 519 1.2 alnsn /* s s */ OPC1(3) | OPC3(0x24), 520 1.2 alnsn /* s l */ OPC1(3) | OPC3(0x20), 521 1.1 alnsn }; 522 1.1 alnsn 523 1.1 alnsn #undef ARCH_32_64 524 1.1 alnsn 525 1.1 alnsn /* Can perform an operation using at most 1 instruction. */ 526 1.3 alnsn static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw) 527 1.1 alnsn { 528 1.1 alnsn SLJIT_ASSERT(arg & SLJIT_MEM); 529 1.1 alnsn 530 1.2 alnsn if (!(flags & WRITE_BACK) || !(arg & REG_MASK)) { 531 1.2 alnsn if ((!(arg & OFFS_REG_MASK) && argw <= SIMM_MAX && argw >= SIMM_MIN) 532 1.2 alnsn || ((arg & OFFS_REG_MASK) && (argw & 0x3) == 0)) { 533 1.1 alnsn /* Works for both absoulte and relative addresses (immediate case). */ 534 1.1 alnsn if (SLJIT_UNLIKELY(flags & ARG_TEST)) 535 1.1 alnsn return 1; 536 1.1 alnsn FAIL_IF(push_inst(compiler, data_transfer_insts[flags & MEM_MASK] 537 1.1 alnsn | ((flags & MEM_MASK) <= GPR_REG ? D(reg) : DA(reg)) 538 1.2 alnsn | S1(arg & REG_MASK) | ((arg & OFFS_REG_MASK) ? S2(OFFS_REG(arg)) : IMM(argw)), 539 1.1 alnsn ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA)) ? DR(reg) : MOVABLE_INS)); 540 1.1 alnsn return -1; 541 1.1 alnsn } 542 1.1 alnsn } 543 1.1 alnsn return 0; 544 1.1 alnsn } 545 1.1 alnsn 546 1.1 alnsn /* See getput_arg below. 547 1.1 alnsn Note: can_cache is called only for binary operators. Those 548 1.1 alnsn operators always uses word arguments without write back. */ 549 1.3 alnsn static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw) 550 1.1 alnsn { 551 1.1 alnsn SLJIT_ASSERT((arg & SLJIT_MEM) && (next_arg & SLJIT_MEM)); 552 1.1 alnsn 553 1.1 alnsn /* Simple operation except for updates. */ 554 1.2 alnsn if (arg & OFFS_REG_MASK) { 555 1.1 alnsn argw &= 0x3; 556 1.1 alnsn SLJIT_ASSERT(argw); 557 1.1 alnsn next_argw &= 0x3; 558 1.2 alnsn if ((arg & OFFS_REG_MASK) == (next_arg & OFFS_REG_MASK) && argw == next_argw) 559 1.1 alnsn return 1; 560 1.1 alnsn return 0; 561 1.1 alnsn } 562 1.1 alnsn 563 1.1 alnsn if (((next_argw - argw) <= SIMM_MAX && (next_argw - argw) >= SIMM_MIN)) 564 1.1 alnsn return 1; 565 1.1 alnsn return 0; 566 1.1 alnsn } 567 1.1 alnsn 568 1.1 alnsn /* Emit the necessary instructions. See can_cache above. */ 569 1.3 alnsn static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw) 570 1.1 alnsn { 571 1.3 alnsn sljit_s32 base, arg2, delay_slot; 572 1.1 alnsn sljit_ins dest; 573 1.1 alnsn 574 1.1 alnsn SLJIT_ASSERT(arg & SLJIT_MEM); 575 1.1 alnsn if (!(next_arg & SLJIT_MEM)) { 576 1.1 alnsn next_arg = 0; 577 1.1 alnsn next_argw = 0; 578 1.1 alnsn } 579 1.1 alnsn 580 1.2 alnsn base = arg & REG_MASK; 581 1.2 alnsn if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) { 582 1.1 alnsn argw &= 0x3; 583 1.1 alnsn 584 1.1 alnsn /* Using the cache. */ 585 1.2 alnsn if (((SLJIT_MEM | (arg & OFFS_REG_MASK)) == compiler->cache_arg) && (argw == compiler->cache_argw)) 586 1.1 alnsn arg2 = TMP_REG3; 587 1.1 alnsn else { 588 1.2 alnsn if ((arg & OFFS_REG_MASK) == (next_arg & OFFS_REG_MASK) && argw == (next_argw & 0x3)) { 589 1.2 alnsn compiler->cache_arg = SLJIT_MEM | (arg & OFFS_REG_MASK); 590 1.1 alnsn compiler->cache_argw = argw; 591 1.1 alnsn arg2 = TMP_REG3; 592 1.1 alnsn } 593 1.2 alnsn else if ((flags & LOAD_DATA) && ((flags & MEM_MASK) <= GPR_REG) && reg != base && reg != OFFS_REG(arg)) 594 1.1 alnsn arg2 = reg; 595 1.1 alnsn else /* It must be a mov operation, so tmp1 must be free to use. */ 596 1.1 alnsn arg2 = TMP_REG1; 597 1.2 alnsn FAIL_IF(push_inst(compiler, SLL_W | D(arg2) | S1(OFFS_REG(arg)) | IMM_ARG | argw, DR(arg2))); 598 1.1 alnsn } 599 1.1 alnsn } 600 1.1 alnsn else { 601 1.1 alnsn /* Using the cache. */ 602 1.1 alnsn if ((compiler->cache_arg == SLJIT_MEM) && (argw - compiler->cache_argw) <= SIMM_MAX && (argw - compiler->cache_argw) >= SIMM_MIN) { 603 1.1 alnsn if (argw != compiler->cache_argw) { 604 1.1 alnsn FAIL_IF(push_inst(compiler, ADD | D(TMP_REG3) | S1(TMP_REG3) | IMM(argw - compiler->cache_argw), DR(TMP_REG3))); 605 1.1 alnsn compiler->cache_argw = argw; 606 1.1 alnsn } 607 1.1 alnsn arg2 = TMP_REG3; 608 1.1 alnsn } else { 609 1.1 alnsn if ((next_argw - argw) <= SIMM_MAX && (next_argw - argw) >= SIMM_MIN) { 610 1.1 alnsn compiler->cache_arg = SLJIT_MEM; 611 1.1 alnsn compiler->cache_argw = argw; 612 1.1 alnsn arg2 = TMP_REG3; 613 1.1 alnsn } 614 1.1 alnsn else if ((flags & LOAD_DATA) && ((flags & MEM_MASK) <= GPR_REG) && reg != base) 615 1.1 alnsn arg2 = reg; 616 1.1 alnsn else /* It must be a mov operation, so tmp1 must be free to use. */ 617 1.1 alnsn arg2 = TMP_REG1; 618 1.1 alnsn FAIL_IF(load_immediate(compiler, arg2, argw)); 619 1.1 alnsn } 620 1.1 alnsn } 621 1.1 alnsn 622 1.1 alnsn dest = ((flags & MEM_MASK) <= GPR_REG ? D(reg) : DA(reg)); 623 1.1 alnsn delay_slot = ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA)) ? DR(reg) : MOVABLE_INS; 624 1.1 alnsn if (!base) 625 1.1 alnsn return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | dest | S1(arg2) | IMM(0), delay_slot); 626 1.1 alnsn if (!(flags & WRITE_BACK)) 627 1.1 alnsn return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | dest | S1(base) | S2(arg2), delay_slot); 628 1.1 alnsn FAIL_IF(push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | dest | S1(base) | S2(arg2), delay_slot)); 629 1.1 alnsn return push_inst(compiler, ADD | D(base) | S1(base) | S2(arg2), DR(base)); 630 1.1 alnsn } 631 1.1 alnsn 632 1.3 alnsn static SLJIT_INLINE sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw) 633 1.1 alnsn { 634 1.1 alnsn if (getput_arg_fast(compiler, flags, reg, arg, argw)) 635 1.1 alnsn return compiler->error; 636 1.1 alnsn compiler->cache_arg = 0; 637 1.1 alnsn compiler->cache_argw = 0; 638 1.1 alnsn return getput_arg(compiler, flags, reg, arg, argw, 0, 0); 639 1.1 alnsn } 640 1.1 alnsn 641 1.3 alnsn static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w) 642 1.1 alnsn { 643 1.1 alnsn if (getput_arg_fast(compiler, flags, reg, arg1, arg1w)) 644 1.1 alnsn return compiler->error; 645 1.1 alnsn return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w); 646 1.1 alnsn } 647 1.1 alnsn 648 1.3 alnsn static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags, 649 1.3 alnsn sljit_s32 dst, sljit_sw dstw, 650 1.3 alnsn sljit_s32 src1, sljit_sw src1w, 651 1.3 alnsn sljit_s32 src2, sljit_sw src2w) 652 1.1 alnsn { 653 1.1 alnsn /* arg1 goes to TMP_REG1 or src reg 654 1.1 alnsn arg2 goes to TMP_REG2, imm or src reg 655 1.1 alnsn TMP_REG3 can be used for caching 656 1.1 alnsn result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */ 657 1.3 alnsn sljit_s32 dst_r = TMP_REG2; 658 1.3 alnsn sljit_s32 src1_r; 659 1.2 alnsn sljit_sw src2_r = 0; 660 1.3 alnsn sljit_s32 sugg_src2_r = TMP_REG2; 661 1.1 alnsn 662 1.2 alnsn if (!(flags & ALT_KEEP_CACHE)) { 663 1.2 alnsn compiler->cache_arg = 0; 664 1.2 alnsn compiler->cache_argw = 0; 665 1.2 alnsn } 666 1.1 alnsn 667 1.2 alnsn if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) { 668 1.3 alnsn if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM)) 669 1.2 alnsn return SLJIT_SUCCESS; 670 1.2 alnsn } 671 1.2 alnsn else if (FAST_IS_REG(dst)) { 672 1.1 alnsn dst_r = dst; 673 1.1 alnsn flags |= REG_DEST; 674 1.3 alnsn if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32) 675 1.1 alnsn sugg_src2_r = dst_r; 676 1.1 alnsn } 677 1.1 alnsn else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1, dst, dstw)) 678 1.1 alnsn flags |= SLOW_DEST; 679 1.1 alnsn 680 1.1 alnsn if (flags & IMM_OP) { 681 1.1 alnsn if ((src2 & SLJIT_IMM) && src2w) { 682 1.1 alnsn if (src2w <= SIMM_MAX && src2w >= SIMM_MIN) { 683 1.1 alnsn flags |= SRC2_IMM; 684 1.1 alnsn src2_r = src2w; 685 1.1 alnsn } 686 1.1 alnsn } 687 1.1 alnsn if (!(flags & SRC2_IMM) && (flags & CUMULATIVE_OP) && (src1 & SLJIT_IMM) && src1w) { 688 1.1 alnsn if (src1w <= SIMM_MAX && src1w >= SIMM_MIN) { 689 1.1 alnsn flags |= SRC2_IMM; 690 1.1 alnsn src2_r = src1w; 691 1.1 alnsn 692 1.1 alnsn /* And swap arguments. */ 693 1.1 alnsn src1 = src2; 694 1.1 alnsn src1w = src2w; 695 1.1 alnsn src2 = SLJIT_IMM; 696 1.1 alnsn /* src2w = src2_r unneeded. */ 697 1.1 alnsn } 698 1.1 alnsn } 699 1.1 alnsn } 700 1.1 alnsn 701 1.1 alnsn /* Source 1. */ 702 1.2 alnsn if (FAST_IS_REG(src1)) 703 1.1 alnsn src1_r = src1; 704 1.1 alnsn else if (src1 & SLJIT_IMM) { 705 1.1 alnsn if (src1w) { 706 1.1 alnsn FAIL_IF(load_immediate(compiler, TMP_REG1, src1w)); 707 1.1 alnsn src1_r = TMP_REG1; 708 1.1 alnsn } 709 1.1 alnsn else 710 1.1 alnsn src1_r = 0; 711 1.1 alnsn } 712 1.1 alnsn else { 713 1.1 alnsn if (getput_arg_fast(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w)) 714 1.1 alnsn FAIL_IF(compiler->error); 715 1.1 alnsn else 716 1.1 alnsn flags |= SLOW_SRC1; 717 1.1 alnsn src1_r = TMP_REG1; 718 1.1 alnsn } 719 1.1 alnsn 720 1.1 alnsn /* Source 2. */ 721 1.2 alnsn if (FAST_IS_REG(src2)) { 722 1.1 alnsn src2_r = src2; 723 1.1 alnsn flags |= REG2_SOURCE; 724 1.3 alnsn if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOVU_S32) 725 1.1 alnsn dst_r = src2_r; 726 1.1 alnsn } 727 1.1 alnsn else if (src2 & SLJIT_IMM) { 728 1.1 alnsn if (!(flags & SRC2_IMM)) { 729 1.2 alnsn if (src2w) { 730 1.1 alnsn FAIL_IF(load_immediate(compiler, sugg_src2_r, src2w)); 731 1.1 alnsn src2_r = sugg_src2_r; 732 1.1 alnsn } 733 1.2 alnsn else { 734 1.1 alnsn src2_r = 0; 735 1.3 alnsn if ((op >= SLJIT_MOV && op <= SLJIT_MOVU_S32) && (dst & SLJIT_MEM)) 736 1.2 alnsn dst_r = 0; 737 1.2 alnsn } 738 1.1 alnsn } 739 1.1 alnsn } 740 1.1 alnsn else { 741 1.1 alnsn if (getput_arg_fast(compiler, flags | LOAD_DATA, sugg_src2_r, src2, src2w)) 742 1.1 alnsn FAIL_IF(compiler->error); 743 1.1 alnsn else 744 1.1 alnsn flags |= SLOW_SRC2; 745 1.1 alnsn src2_r = sugg_src2_r; 746 1.1 alnsn } 747 1.1 alnsn 748 1.1 alnsn if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) { 749 1.1 alnsn SLJIT_ASSERT(src2_r == TMP_REG2); 750 1.1 alnsn if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) { 751 1.1 alnsn FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, src1, src1w)); 752 1.1 alnsn FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw)); 753 1.1 alnsn } 754 1.1 alnsn else { 755 1.1 alnsn FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, src2, src2w)); 756 1.1 alnsn FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, dst, dstw)); 757 1.1 alnsn } 758 1.1 alnsn } 759 1.1 alnsn else if (flags & SLOW_SRC1) 760 1.1 alnsn FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw)); 761 1.1 alnsn else if (flags & SLOW_SRC2) 762 1.1 alnsn FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, sugg_src2_r, src2, src2w, dst, dstw)); 763 1.1 alnsn 764 1.1 alnsn FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r)); 765 1.1 alnsn 766 1.1 alnsn if (dst & SLJIT_MEM) { 767 1.1 alnsn if (!(flags & SLOW_DEST)) { 768 1.1 alnsn getput_arg_fast(compiler, flags, dst_r, dst, dstw); 769 1.1 alnsn return compiler->error; 770 1.1 alnsn } 771 1.1 alnsn return getput_arg(compiler, flags, dst_r, dst, dstw, 0, 0); 772 1.1 alnsn } 773 1.1 alnsn 774 1.1 alnsn return SLJIT_SUCCESS; 775 1.1 alnsn } 776 1.1 alnsn 777 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op) 778 1.1 alnsn { 779 1.1 alnsn CHECK_ERROR(); 780 1.3 alnsn CHECK(check_sljit_emit_op0(compiler, op)); 781 1.1 alnsn 782 1.1 alnsn op = GET_OPCODE(op); 783 1.1 alnsn switch (op) { 784 1.1 alnsn case SLJIT_BREAKPOINT: 785 1.1 alnsn return push_inst(compiler, TA, UNMOVABLE_INS); 786 1.1 alnsn case SLJIT_NOP: 787 1.1 alnsn return push_inst(compiler, NOP, UNMOVABLE_INS); 788 1.3 alnsn case SLJIT_LMUL_UW: 789 1.3 alnsn case SLJIT_LMUL_SW: 790 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 791 1.3 alnsn FAIL_IF(push_inst(compiler, (op == SLJIT_LMUL_UW ? UMUL : SMUL) | D(SLJIT_R0) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R0))); 792 1.3 alnsn return push_inst(compiler, RDY | D(SLJIT_R1), DR(SLJIT_R1)); 793 1.1 alnsn #else 794 1.1 alnsn #error "Implementation required" 795 1.1 alnsn #endif 796 1.3 alnsn case SLJIT_DIVMOD_UW: 797 1.3 alnsn case SLJIT_DIVMOD_SW: 798 1.3 alnsn case SLJIT_DIV_UW: 799 1.3 alnsn case SLJIT_DIV_SW: 800 1.3 alnsn SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments); 801 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 802 1.3 alnsn if ((op | 0x2) == SLJIT_DIV_UW) 803 1.1 alnsn FAIL_IF(push_inst(compiler, WRY | S1(0), MOVABLE_INS)); 804 1.1 alnsn else { 805 1.3 alnsn FAIL_IF(push_inst(compiler, SRA | D(TMP_REG1) | S1(SLJIT_R0) | IMM(31), DR(TMP_REG1))); 806 1.1 alnsn FAIL_IF(push_inst(compiler, WRY | S1(TMP_REG1), MOVABLE_INS)); 807 1.1 alnsn } 808 1.3 alnsn if (op <= SLJIT_DIVMOD_SW) 809 1.3 alnsn FAIL_IF(push_inst(compiler, OR | D(TMP_REG2) | S1(0) | S2(SLJIT_R0), DR(TMP_REG2))); 810 1.3 alnsn FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_DIV_UW ? UDIV : SDIV) | D(SLJIT_R0) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R0))); 811 1.3 alnsn if (op >= SLJIT_DIV_UW) 812 1.3 alnsn return SLJIT_SUCCESS; 813 1.3 alnsn FAIL_IF(push_inst(compiler, SMUL | D(SLJIT_R1) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R1))); 814 1.3 alnsn return push_inst(compiler, SUB | D(SLJIT_R1) | S1(TMP_REG2) | S2(SLJIT_R1), DR(SLJIT_R1)); 815 1.1 alnsn #else 816 1.1 alnsn #error "Implementation required" 817 1.1 alnsn #endif 818 1.1 alnsn } 819 1.1 alnsn 820 1.1 alnsn return SLJIT_SUCCESS; 821 1.1 alnsn } 822 1.1 alnsn 823 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op, 824 1.3 alnsn sljit_s32 dst, sljit_sw dstw, 825 1.3 alnsn sljit_s32 src, sljit_sw srcw) 826 1.1 alnsn { 827 1.4 alnsn sljit_s32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0; 828 1.1 alnsn 829 1.1 alnsn CHECK_ERROR(); 830 1.3 alnsn CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw)); 831 1.1 alnsn ADJUST_LOCAL_OFFSET(dst, dstw); 832 1.1 alnsn ADJUST_LOCAL_OFFSET(src, srcw); 833 1.1 alnsn 834 1.1 alnsn op = GET_OPCODE(op); 835 1.1 alnsn switch (op) { 836 1.1 alnsn case SLJIT_MOV: 837 1.2 alnsn case SLJIT_MOV_P: 838 1.1 alnsn return emit_op(compiler, SLJIT_MOV, flags | WORD_DATA, dst, dstw, TMP_REG1, 0, src, srcw); 839 1.1 alnsn 840 1.3 alnsn case SLJIT_MOV_U32: 841 1.3 alnsn return emit_op(compiler, SLJIT_MOV_U32, flags | INT_DATA, dst, dstw, TMP_REG1, 0, src, srcw); 842 1.1 alnsn 843 1.3 alnsn case SLJIT_MOV_S32: 844 1.3 alnsn return emit_op(compiler, SLJIT_MOV_S32, flags | INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, srcw); 845 1.1 alnsn 846 1.3 alnsn case SLJIT_MOV_U8: 847 1.3 alnsn return emit_op(compiler, SLJIT_MOV_U8, flags | BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw); 848 1.1 alnsn 849 1.3 alnsn case SLJIT_MOV_S8: 850 1.3 alnsn return emit_op(compiler, SLJIT_MOV_S8, flags | BYTE_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw); 851 1.1 alnsn 852 1.3 alnsn case SLJIT_MOV_U16: 853 1.3 alnsn return emit_op(compiler, SLJIT_MOV_U16, flags | HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw); 854 1.1 alnsn 855 1.3 alnsn case SLJIT_MOV_S16: 856 1.3 alnsn return emit_op(compiler, SLJIT_MOV_S16, flags | HALF_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw); 857 1.1 alnsn 858 1.1 alnsn case SLJIT_MOVU: 859 1.2 alnsn case SLJIT_MOVU_P: 860 1.1 alnsn return emit_op(compiler, SLJIT_MOV, flags | WORD_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw); 861 1.1 alnsn 862 1.3 alnsn case SLJIT_MOVU_U32: 863 1.3 alnsn return emit_op(compiler, SLJIT_MOV_U32, flags | INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw); 864 1.1 alnsn 865 1.3 alnsn case SLJIT_MOVU_S32: 866 1.3 alnsn return emit_op(compiler, SLJIT_MOV_S32, flags | INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw); 867 1.1 alnsn 868 1.3 alnsn case SLJIT_MOVU_U8: 869 1.3 alnsn return emit_op(compiler, SLJIT_MOV_U8, flags | BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw); 870 1.1 alnsn 871 1.3 alnsn case SLJIT_MOVU_S8: 872 1.3 alnsn return emit_op(compiler, SLJIT_MOV_S8, flags | BYTE_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw); 873 1.1 alnsn 874 1.3 alnsn case SLJIT_MOVU_U16: 875 1.3 alnsn return emit_op(compiler, SLJIT_MOV_U16, flags | HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw); 876 1.1 alnsn 877 1.3 alnsn case SLJIT_MOVU_S16: 878 1.3 alnsn return emit_op(compiler, SLJIT_MOV_S16, flags | HALF_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw); 879 1.1 alnsn 880 1.1 alnsn case SLJIT_NOT: 881 1.1 alnsn case SLJIT_CLZ: 882 1.1 alnsn return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw); 883 1.1 alnsn 884 1.1 alnsn case SLJIT_NEG: 885 1.1 alnsn return emit_op(compiler, SLJIT_SUB, flags | IMM_OP, dst, dstw, SLJIT_IMM, 0, src, srcw); 886 1.1 alnsn } 887 1.1 alnsn 888 1.1 alnsn return SLJIT_SUCCESS; 889 1.1 alnsn } 890 1.1 alnsn 891 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op, 892 1.3 alnsn sljit_s32 dst, sljit_sw dstw, 893 1.3 alnsn sljit_s32 src1, sljit_sw src1w, 894 1.3 alnsn sljit_s32 src2, sljit_sw src2w) 895 1.1 alnsn { 896 1.4 alnsn sljit_s32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0; 897 1.1 alnsn 898 1.1 alnsn CHECK_ERROR(); 899 1.3 alnsn CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w)); 900 1.1 alnsn ADJUST_LOCAL_OFFSET(dst, dstw); 901 1.1 alnsn ADJUST_LOCAL_OFFSET(src1, src1w); 902 1.1 alnsn ADJUST_LOCAL_OFFSET(src2, src2w); 903 1.1 alnsn 904 1.1 alnsn op = GET_OPCODE(op); 905 1.1 alnsn switch (op) { 906 1.1 alnsn case SLJIT_ADD: 907 1.1 alnsn case SLJIT_ADDC: 908 1.1 alnsn case SLJIT_MUL: 909 1.1 alnsn case SLJIT_AND: 910 1.1 alnsn case SLJIT_OR: 911 1.1 alnsn case SLJIT_XOR: 912 1.1 alnsn return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w); 913 1.1 alnsn 914 1.1 alnsn case SLJIT_SUB: 915 1.1 alnsn case SLJIT_SUBC: 916 1.1 alnsn return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w); 917 1.1 alnsn 918 1.1 alnsn case SLJIT_SHL: 919 1.1 alnsn case SLJIT_LSHR: 920 1.1 alnsn case SLJIT_ASHR: 921 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 922 1.1 alnsn if (src2 & SLJIT_IMM) 923 1.1 alnsn src2w &= 0x1f; 924 1.1 alnsn #else 925 1.4 alnsn SLJIT_UNREACHABLE(); 926 1.1 alnsn #endif 927 1.1 alnsn return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w); 928 1.1 alnsn } 929 1.1 alnsn 930 1.1 alnsn return SLJIT_SUCCESS; 931 1.1 alnsn } 932 1.1 alnsn 933 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg) 934 1.1 alnsn { 935 1.3 alnsn CHECK_REG_INDEX(check_sljit_get_register_index(reg)); 936 1.1 alnsn return reg_map[reg]; 937 1.1 alnsn } 938 1.1 alnsn 939 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg) 940 1.2 alnsn { 941 1.3 alnsn CHECK_REG_INDEX(check_sljit_get_float_register_index(reg)); 942 1.2 alnsn return reg << 1; 943 1.2 alnsn } 944 1.2 alnsn 945 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler, 946 1.3 alnsn void *instruction, sljit_s32 size) 947 1.1 alnsn { 948 1.1 alnsn CHECK_ERROR(); 949 1.3 alnsn CHECK(check_sljit_emit_op_custom(compiler, instruction, size)); 950 1.1 alnsn 951 1.1 alnsn return push_inst(compiler, *(sljit_ins*)instruction, UNMOVABLE_INS); 952 1.1 alnsn } 953 1.1 alnsn 954 1.1 alnsn /* --------------------------------------------------------------------- */ 955 1.1 alnsn /* Floating point operators */ 956 1.1 alnsn /* --------------------------------------------------------------------- */ 957 1.1 alnsn 958 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void) 959 1.1 alnsn { 960 1.2 alnsn #ifdef SLJIT_IS_FPU_AVAILABLE 961 1.2 alnsn return SLJIT_IS_FPU_AVAILABLE; 962 1.2 alnsn #else 963 1.2 alnsn /* Available by default. */ 964 1.1 alnsn return 1; 965 1.2 alnsn #endif 966 1.1 alnsn } 967 1.1 alnsn 968 1.3 alnsn #define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 7)) 969 1.3 alnsn #define SELECT_FOP(op, single, double) ((op & SLJIT_F32_OP) ? single : double) 970 1.3 alnsn #define FLOAT_TMP_MEM_OFFSET (22 * sizeof(sljit_sw)) 971 1.3 alnsn 972 1.3 alnsn static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op, 973 1.3 alnsn sljit_s32 dst, sljit_sw dstw, 974 1.3 alnsn sljit_s32 src, sljit_sw srcw) 975 1.3 alnsn { 976 1.3 alnsn if (src & SLJIT_MEM) { 977 1.3 alnsn FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw)); 978 1.3 alnsn src = TMP_FREG1; 979 1.3 alnsn } 980 1.3 alnsn else 981 1.3 alnsn src <<= 1; 982 1.3 alnsn 983 1.3 alnsn FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSTOI, FDTOI) | DA(TMP_FREG1) | S2A(src), MOVABLE_INS)); 984 1.2 alnsn 985 1.3 alnsn if (dst == SLJIT_UNUSED) 986 1.3 alnsn return SLJIT_SUCCESS; 987 1.3 alnsn 988 1.3 alnsn if (FAST_IS_REG(dst)) { 989 1.3 alnsn FAIL_IF(emit_op_mem2(compiler, SINGLE_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET)); 990 1.3 alnsn return emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, dst, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET); 991 1.3 alnsn } 992 1.3 alnsn 993 1.3 alnsn /* Store the integer value from a VFP register. */ 994 1.3 alnsn return emit_op_mem2(compiler, SINGLE_DATA, TMP_FREG1, dst, dstw, 0, 0); 995 1.3 alnsn } 996 1.3 alnsn 997 1.3 alnsn static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op, 998 1.3 alnsn sljit_s32 dst, sljit_sw dstw, 999 1.3 alnsn sljit_s32 src, sljit_sw srcw) 1000 1.1 alnsn { 1001 1.3 alnsn sljit_s32 dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG1; 1002 1.3 alnsn 1003 1.3 alnsn if (src & SLJIT_IMM) { 1004 1.3 alnsn #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1005 1.3 alnsn if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) 1006 1.3 alnsn srcw = (sljit_s32)srcw; 1007 1.3 alnsn #endif 1008 1.3 alnsn FAIL_IF(load_immediate(compiler, TMP_REG1, srcw)); 1009 1.3 alnsn src = TMP_REG1; 1010 1.3 alnsn srcw = 0; 1011 1.3 alnsn } 1012 1.3 alnsn 1013 1.3 alnsn if (FAST_IS_REG(src)) { 1014 1.3 alnsn FAIL_IF(emit_op_mem2(compiler, WORD_DATA, src, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET)); 1015 1.3 alnsn src = SLJIT_MEM1(SLJIT_SP); 1016 1.3 alnsn srcw = FLOAT_TMP_MEM_OFFSET; 1017 1.3 alnsn } 1018 1.3 alnsn 1019 1.3 alnsn FAIL_IF(emit_op_mem2(compiler, SINGLE_DATA | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw)); 1020 1.3 alnsn FAIL_IF(push_inst(compiler, SELECT_FOP(op, FITOS, FITOD) | DA(dst_r) | S2A(TMP_FREG1), MOVABLE_INS)); 1021 1.3 alnsn 1022 1.3 alnsn if (dst & SLJIT_MEM) 1023 1.3 alnsn return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0); 1024 1.3 alnsn return SLJIT_SUCCESS; 1025 1.3 alnsn } 1026 1.3 alnsn 1027 1.3 alnsn static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op, 1028 1.3 alnsn sljit_s32 src1, sljit_sw src1w, 1029 1.3 alnsn sljit_s32 src2, sljit_sw src2w) 1030 1.3 alnsn { 1031 1.3 alnsn if (src1 & SLJIT_MEM) { 1032 1.3 alnsn FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w)); 1033 1.3 alnsn src1 = TMP_FREG1; 1034 1.3 alnsn } 1035 1.3 alnsn else 1036 1.3 alnsn src1 <<= 1; 1037 1.3 alnsn 1038 1.3 alnsn if (src2 & SLJIT_MEM) { 1039 1.3 alnsn FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, 0, 0)); 1040 1.3 alnsn src2 = TMP_FREG2; 1041 1.3 alnsn } 1042 1.3 alnsn else 1043 1.3 alnsn src2 <<= 1; 1044 1.3 alnsn 1045 1.3 alnsn return push_inst(compiler, SELECT_FOP(op, FCMPS, FCMPD) | S1A(src1) | S2A(src2), FCC_IS_SET | MOVABLE_INS); 1046 1.3 alnsn } 1047 1.3 alnsn 1048 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op, 1049 1.3 alnsn sljit_s32 dst, sljit_sw dstw, 1050 1.3 alnsn sljit_s32 src, sljit_sw srcw) 1051 1.3 alnsn { 1052 1.3 alnsn sljit_s32 dst_r; 1053 1.1 alnsn 1054 1.1 alnsn CHECK_ERROR(); 1055 1.1 alnsn compiler->cache_arg = 0; 1056 1.1 alnsn compiler->cache_argw = 0; 1057 1.1 alnsn 1058 1.3 alnsn SLJIT_COMPILE_ASSERT((SLJIT_F32_OP == 0x100) && !(DOUBLE_DATA & 0x2), float_transfer_bit_error); 1059 1.3 alnsn SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw); 1060 1.1 alnsn 1061 1.3 alnsn if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) 1062 1.3 alnsn op ^= SLJIT_F32_OP; 1063 1.1 alnsn 1064 1.3 alnsn dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG1; 1065 1.1 alnsn 1066 1.2 alnsn if (src & SLJIT_MEM) { 1067 1.3 alnsn FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, dst_r, src, srcw, dst, dstw)); 1068 1.3 alnsn src = dst_r; 1069 1.1 alnsn } 1070 1.1 alnsn else 1071 1.1 alnsn src <<= 1; 1072 1.1 alnsn 1073 1.2 alnsn switch (GET_OPCODE(op)) { 1074 1.3 alnsn case SLJIT_MOV_F64: 1075 1.3 alnsn if (src != dst_r) { 1076 1.3 alnsn if (dst_r != TMP_FREG1) { 1077 1.3 alnsn FAIL_IF(push_inst(compiler, FMOVS | DA(dst_r) | S2A(src), MOVABLE_INS)); 1078 1.3 alnsn if (!(op & SLJIT_F32_OP)) 1079 1.3 alnsn FAIL_IF(push_inst(compiler, FMOVS | DA(dst_r | 1) | S2A(src | 1), MOVABLE_INS)); 1080 1.1 alnsn } 1081 1.3 alnsn else 1082 1.3 alnsn dst_r = src; 1083 1.3 alnsn } 1084 1.3 alnsn break; 1085 1.3 alnsn case SLJIT_NEG_F64: 1086 1.3 alnsn FAIL_IF(push_inst(compiler, FNEGS | DA(dst_r) | S2A(src), MOVABLE_INS)); 1087 1.3 alnsn if (dst_r != src && !(op & SLJIT_F32_OP)) 1088 1.3 alnsn FAIL_IF(push_inst(compiler, FMOVS | DA(dst_r | 1) | S2A(src | 1), MOVABLE_INS)); 1089 1.3 alnsn break; 1090 1.3 alnsn case SLJIT_ABS_F64: 1091 1.3 alnsn FAIL_IF(push_inst(compiler, FABSS | DA(dst_r) | S2A(src), MOVABLE_INS)); 1092 1.3 alnsn if (dst_r != src && !(op & SLJIT_F32_OP)) 1093 1.3 alnsn FAIL_IF(push_inst(compiler, FMOVS | DA(dst_r | 1) | S2A(src | 1), MOVABLE_INS)); 1094 1.3 alnsn break; 1095 1.3 alnsn case SLJIT_CONV_F64_FROM_F32: 1096 1.3 alnsn FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSTOD, FDTOS) | DA(dst_r) | S2A(src), MOVABLE_INS)); 1097 1.3 alnsn op ^= SLJIT_F32_OP; 1098 1.3 alnsn break; 1099 1.1 alnsn } 1100 1.1 alnsn 1101 1.3 alnsn if (dst & SLJIT_MEM) 1102 1.3 alnsn FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op), dst_r, dst, dstw, 0, 0)); 1103 1.1 alnsn return SLJIT_SUCCESS; 1104 1.1 alnsn } 1105 1.1 alnsn 1106 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op, 1107 1.3 alnsn sljit_s32 dst, sljit_sw dstw, 1108 1.3 alnsn sljit_s32 src1, sljit_sw src1w, 1109 1.3 alnsn sljit_s32 src2, sljit_sw src2w) 1110 1.1 alnsn { 1111 1.3 alnsn sljit_s32 dst_r, flags = 0; 1112 1.1 alnsn 1113 1.1 alnsn CHECK_ERROR(); 1114 1.3 alnsn CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w)); 1115 1.3 alnsn ADJUST_LOCAL_OFFSET(dst, dstw); 1116 1.3 alnsn ADJUST_LOCAL_OFFSET(src1, src1w); 1117 1.3 alnsn ADJUST_LOCAL_OFFSET(src2, src2w); 1118 1.1 alnsn 1119 1.1 alnsn compiler->cache_arg = 0; 1120 1.1 alnsn compiler->cache_argw = 0; 1121 1.1 alnsn 1122 1.3 alnsn dst_r = FAST_IS_REG(dst) ? (dst << 1) : TMP_FREG2; 1123 1.1 alnsn 1124 1.2 alnsn if (src1 & SLJIT_MEM) { 1125 1.2 alnsn if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w)) { 1126 1.1 alnsn FAIL_IF(compiler->error); 1127 1.1 alnsn src1 = TMP_FREG1; 1128 1.1 alnsn } else 1129 1.1 alnsn flags |= SLOW_SRC1; 1130 1.1 alnsn } 1131 1.1 alnsn else 1132 1.1 alnsn src1 <<= 1; 1133 1.1 alnsn 1134 1.2 alnsn if (src2 & SLJIT_MEM) { 1135 1.2 alnsn if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w)) { 1136 1.1 alnsn FAIL_IF(compiler->error); 1137 1.1 alnsn src2 = TMP_FREG2; 1138 1.1 alnsn } else 1139 1.1 alnsn flags |= SLOW_SRC2; 1140 1.1 alnsn } 1141 1.1 alnsn else 1142 1.1 alnsn src2 <<= 1; 1143 1.1 alnsn 1144 1.1 alnsn if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) { 1145 1.1 alnsn if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) { 1146 1.2 alnsn FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, src1, src1w)); 1147 1.2 alnsn FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw)); 1148 1.1 alnsn } 1149 1.1 alnsn else { 1150 1.2 alnsn FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w)); 1151 1.2 alnsn FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw)); 1152 1.1 alnsn } 1153 1.1 alnsn } 1154 1.1 alnsn else if (flags & SLOW_SRC1) 1155 1.2 alnsn FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw)); 1156 1.1 alnsn else if (flags & SLOW_SRC2) 1157 1.2 alnsn FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw)); 1158 1.1 alnsn 1159 1.1 alnsn if (flags & SLOW_SRC1) 1160 1.1 alnsn src1 = TMP_FREG1; 1161 1.1 alnsn if (flags & SLOW_SRC2) 1162 1.1 alnsn src2 = TMP_FREG2; 1163 1.1 alnsn 1164 1.2 alnsn switch (GET_OPCODE(op)) { 1165 1.3 alnsn case SLJIT_ADD_F64: 1166 1.3 alnsn FAIL_IF(push_inst(compiler, SELECT_FOP(op, FADDS, FADDD) | DA(dst_r) | S1A(src1) | S2A(src2), MOVABLE_INS)); 1167 1.1 alnsn break; 1168 1.1 alnsn 1169 1.3 alnsn case SLJIT_SUB_F64: 1170 1.3 alnsn FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSUBS, FSUBD) | DA(dst_r) | S1A(src1) | S2A(src2), MOVABLE_INS)); 1171 1.1 alnsn break; 1172 1.1 alnsn 1173 1.3 alnsn case SLJIT_MUL_F64: 1174 1.3 alnsn FAIL_IF(push_inst(compiler, SELECT_FOP(op, FMULS, FMULD) | DA(dst_r) | S1A(src1) | S2A(src2), MOVABLE_INS)); 1175 1.1 alnsn break; 1176 1.1 alnsn 1177 1.3 alnsn case SLJIT_DIV_F64: 1178 1.3 alnsn FAIL_IF(push_inst(compiler, SELECT_FOP(op, FDIVS, FDIVD) | DA(dst_r) | S1A(src1) | S2A(src2), MOVABLE_INS)); 1179 1.1 alnsn break; 1180 1.1 alnsn } 1181 1.1 alnsn 1182 1.3 alnsn if (dst_r == TMP_FREG2) 1183 1.2 alnsn FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG2, dst, dstw, 0, 0)); 1184 1.1 alnsn 1185 1.1 alnsn return SLJIT_SUCCESS; 1186 1.1 alnsn } 1187 1.1 alnsn 1188 1.2 alnsn #undef FLOAT_DATA 1189 1.2 alnsn #undef SELECT_FOP 1190 1.2 alnsn 1191 1.1 alnsn /* --------------------------------------------------------------------- */ 1192 1.1 alnsn /* Other instructions */ 1193 1.1 alnsn /* --------------------------------------------------------------------- */ 1194 1.1 alnsn 1195 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw) 1196 1.1 alnsn { 1197 1.1 alnsn CHECK_ERROR(); 1198 1.3 alnsn CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw)); 1199 1.1 alnsn ADJUST_LOCAL_OFFSET(dst, dstw); 1200 1.1 alnsn 1201 1.2 alnsn /* For UNUSED dst. Uncommon, but possible. */ 1202 1.2 alnsn if (dst == SLJIT_UNUSED) 1203 1.2 alnsn return SLJIT_SUCCESS; 1204 1.2 alnsn 1205 1.2 alnsn if (FAST_IS_REG(dst)) 1206 1.2 alnsn return push_inst(compiler, OR | D(dst) | S1(0) | S2(TMP_LINK), DR(dst)); 1207 1.1 alnsn 1208 1.2 alnsn /* Memory. */ 1209 1.2 alnsn return emit_op_mem(compiler, WORD_DATA, TMP_LINK, dst, dstw); 1210 1.1 alnsn } 1211 1.1 alnsn 1212 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw) 1213 1.1 alnsn { 1214 1.1 alnsn CHECK_ERROR(); 1215 1.3 alnsn CHECK(check_sljit_emit_fast_return(compiler, src, srcw)); 1216 1.1 alnsn ADJUST_LOCAL_OFFSET(src, srcw); 1217 1.1 alnsn 1218 1.2 alnsn if (FAST_IS_REG(src)) 1219 1.2 alnsn FAIL_IF(push_inst(compiler, OR | D(TMP_LINK) | S1(0) | S2(src), DR(TMP_LINK))); 1220 1.1 alnsn else if (src & SLJIT_MEM) 1221 1.2 alnsn FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_LINK, src, srcw)); 1222 1.1 alnsn else if (src & SLJIT_IMM) 1223 1.2 alnsn FAIL_IF(load_immediate(compiler, TMP_LINK, srcw)); 1224 1.1 alnsn 1225 1.2 alnsn FAIL_IF(push_inst(compiler, JMPL | D(0) | S1(TMP_LINK) | IMM(8), UNMOVABLE_INS)); 1226 1.1 alnsn return push_inst(compiler, NOP, UNMOVABLE_INS); 1227 1.1 alnsn } 1228 1.1 alnsn 1229 1.1 alnsn /* --------------------------------------------------------------------- */ 1230 1.1 alnsn /* Conditional instructions */ 1231 1.1 alnsn /* --------------------------------------------------------------------- */ 1232 1.1 alnsn 1233 1.1 alnsn SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler) 1234 1.1 alnsn { 1235 1.1 alnsn struct sljit_label *label; 1236 1.1 alnsn 1237 1.1 alnsn CHECK_ERROR_PTR(); 1238 1.3 alnsn CHECK_PTR(check_sljit_emit_label(compiler)); 1239 1.1 alnsn 1240 1.1 alnsn if (compiler->last_label && compiler->last_label->size == compiler->size) 1241 1.1 alnsn return compiler->last_label; 1242 1.1 alnsn 1243 1.1 alnsn label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label)); 1244 1.1 alnsn PTR_FAIL_IF(!label); 1245 1.1 alnsn set_label(label, compiler); 1246 1.1 alnsn compiler->delay_slot = UNMOVABLE_INS; 1247 1.1 alnsn return label; 1248 1.1 alnsn } 1249 1.1 alnsn 1250 1.3 alnsn static sljit_ins get_cc(sljit_s32 type) 1251 1.1 alnsn { 1252 1.1 alnsn switch (type) { 1253 1.3 alnsn case SLJIT_EQUAL: 1254 1.3 alnsn case SLJIT_MUL_NOT_OVERFLOW: 1255 1.3 alnsn case SLJIT_NOT_EQUAL_F64: /* Unordered. */ 1256 1.1 alnsn return DA(0x1); 1257 1.1 alnsn 1258 1.3 alnsn case SLJIT_NOT_EQUAL: 1259 1.3 alnsn case SLJIT_MUL_OVERFLOW: 1260 1.3 alnsn case SLJIT_EQUAL_F64: 1261 1.1 alnsn return DA(0x9); 1262 1.1 alnsn 1263 1.3 alnsn case SLJIT_LESS: 1264 1.3 alnsn case SLJIT_GREATER_F64: /* Unordered. */ 1265 1.1 alnsn return DA(0x5); 1266 1.1 alnsn 1267 1.3 alnsn case SLJIT_GREATER_EQUAL: 1268 1.3 alnsn case SLJIT_LESS_EQUAL_F64: 1269 1.1 alnsn return DA(0xd); 1270 1.1 alnsn 1271 1.3 alnsn case SLJIT_GREATER: 1272 1.3 alnsn case SLJIT_GREATER_EQUAL_F64: /* Unordered. */ 1273 1.1 alnsn return DA(0xc); 1274 1.1 alnsn 1275 1.3 alnsn case SLJIT_LESS_EQUAL: 1276 1.3 alnsn case SLJIT_LESS_F64: 1277 1.1 alnsn return DA(0x4); 1278 1.1 alnsn 1279 1.3 alnsn case SLJIT_SIG_LESS: 1280 1.1 alnsn return DA(0x3); 1281 1.1 alnsn 1282 1.3 alnsn case SLJIT_SIG_GREATER_EQUAL: 1283 1.1 alnsn return DA(0xb); 1284 1.1 alnsn 1285 1.3 alnsn case SLJIT_SIG_GREATER: 1286 1.1 alnsn return DA(0xa); 1287 1.1 alnsn 1288 1.3 alnsn case SLJIT_SIG_LESS_EQUAL: 1289 1.1 alnsn return DA(0x2); 1290 1.1 alnsn 1291 1.3 alnsn case SLJIT_OVERFLOW: 1292 1.3 alnsn case SLJIT_UNORDERED_F64: 1293 1.1 alnsn return DA(0x7); 1294 1.1 alnsn 1295 1.3 alnsn case SLJIT_NOT_OVERFLOW: 1296 1.3 alnsn case SLJIT_ORDERED_F64: 1297 1.1 alnsn return DA(0xf); 1298 1.1 alnsn 1299 1.1 alnsn default: 1300 1.4 alnsn SLJIT_UNREACHABLE(); 1301 1.1 alnsn return DA(0x8); 1302 1.1 alnsn } 1303 1.1 alnsn } 1304 1.1 alnsn 1305 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type) 1306 1.1 alnsn { 1307 1.1 alnsn struct sljit_jump *jump; 1308 1.1 alnsn 1309 1.1 alnsn CHECK_ERROR_PTR(); 1310 1.3 alnsn CHECK_PTR(check_sljit_emit_jump(compiler, type)); 1311 1.1 alnsn 1312 1.1 alnsn jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump)); 1313 1.1 alnsn PTR_FAIL_IF(!jump); 1314 1.1 alnsn set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP); 1315 1.1 alnsn type &= 0xff; 1316 1.1 alnsn 1317 1.3 alnsn if (type < SLJIT_EQUAL_F64) { 1318 1.1 alnsn jump->flags |= IS_COND; 1319 1.1 alnsn if (((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS) && !(compiler->delay_slot & ICC_IS_SET)) 1320 1.1 alnsn jump->flags |= IS_MOVABLE; 1321 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 1322 1.1 alnsn PTR_FAIL_IF(push_inst(compiler, BICC | get_cc(type ^ 1) | 5, UNMOVABLE_INS)); 1323 1.1 alnsn #else 1324 1.1 alnsn #error "Implementation required" 1325 1.1 alnsn #endif 1326 1.1 alnsn } 1327 1.1 alnsn else if (type < SLJIT_JUMP) { 1328 1.1 alnsn jump->flags |= IS_COND; 1329 1.1 alnsn if (((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS) && !(compiler->delay_slot & FCC_IS_SET)) 1330 1.1 alnsn jump->flags |= IS_MOVABLE; 1331 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 1332 1.1 alnsn PTR_FAIL_IF(push_inst(compiler, FBFCC | get_cc(type ^ 1) | 5, UNMOVABLE_INS)); 1333 1.1 alnsn #else 1334 1.1 alnsn #error "Implementation required" 1335 1.1 alnsn #endif 1336 1.1 alnsn } else { 1337 1.1 alnsn if ((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS) 1338 1.1 alnsn jump->flags |= IS_MOVABLE; 1339 1.1 alnsn if (type >= SLJIT_FAST_CALL) 1340 1.1 alnsn jump->flags |= IS_CALL; 1341 1.1 alnsn } 1342 1.1 alnsn 1343 1.1 alnsn PTR_FAIL_IF(emit_const(compiler, TMP_REG2, 0)); 1344 1.2 alnsn PTR_FAIL_IF(push_inst(compiler, JMPL | D(type >= SLJIT_FAST_CALL ? TMP_LINK : 0) | S1(TMP_REG2) | IMM(0), UNMOVABLE_INS)); 1345 1.1 alnsn jump->addr = compiler->size; 1346 1.1 alnsn PTR_FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS)); 1347 1.1 alnsn 1348 1.1 alnsn return jump; 1349 1.1 alnsn } 1350 1.1 alnsn 1351 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw) 1352 1.1 alnsn { 1353 1.1 alnsn struct sljit_jump *jump = NULL; 1354 1.3 alnsn sljit_s32 src_r; 1355 1.1 alnsn 1356 1.1 alnsn CHECK_ERROR(); 1357 1.3 alnsn CHECK(check_sljit_emit_ijump(compiler, type, src, srcw)); 1358 1.1 alnsn ADJUST_LOCAL_OFFSET(src, srcw); 1359 1.1 alnsn 1360 1.2 alnsn if (FAST_IS_REG(src)) 1361 1.1 alnsn src_r = src; 1362 1.1 alnsn else if (src & SLJIT_IMM) { 1363 1.1 alnsn jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump)); 1364 1.1 alnsn FAIL_IF(!jump); 1365 1.1 alnsn set_jump(jump, compiler, JUMP_ADDR); 1366 1.1 alnsn jump->u.target = srcw; 1367 1.1 alnsn if ((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS) 1368 1.1 alnsn jump->flags |= IS_MOVABLE; 1369 1.1 alnsn if (type >= SLJIT_FAST_CALL) 1370 1.1 alnsn jump->flags |= IS_CALL; 1371 1.1 alnsn 1372 1.1 alnsn FAIL_IF(emit_const(compiler, TMP_REG2, 0)); 1373 1.1 alnsn src_r = TMP_REG2; 1374 1.1 alnsn } 1375 1.1 alnsn else { 1376 1.1 alnsn FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG2, src, srcw)); 1377 1.1 alnsn src_r = TMP_REG2; 1378 1.1 alnsn } 1379 1.1 alnsn 1380 1.2 alnsn FAIL_IF(push_inst(compiler, JMPL | D(type >= SLJIT_FAST_CALL ? TMP_LINK : 0) | S1(src_r) | IMM(0), UNMOVABLE_INS)); 1381 1.1 alnsn if (jump) 1382 1.1 alnsn jump->addr = compiler->size; 1383 1.1 alnsn return push_inst(compiler, NOP, UNMOVABLE_INS); 1384 1.1 alnsn } 1385 1.1 alnsn 1386 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op, 1387 1.3 alnsn sljit_s32 dst, sljit_sw dstw, 1388 1.3 alnsn sljit_s32 src, sljit_sw srcw, 1389 1.3 alnsn sljit_s32 type) 1390 1.1 alnsn { 1391 1.4 alnsn sljit_s32 reg, flags = HAS_FLAGS(op) ? SET_FLAGS : 0; 1392 1.1 alnsn 1393 1.1 alnsn CHECK_ERROR(); 1394 1.3 alnsn CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type)); 1395 1.1 alnsn ADJUST_LOCAL_OFFSET(dst, dstw); 1396 1.1 alnsn 1397 1.1 alnsn if (dst == SLJIT_UNUSED) 1398 1.1 alnsn return SLJIT_SUCCESS; 1399 1.1 alnsn 1400 1.1 alnsn #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32) 1401 1.2 alnsn op = GET_OPCODE(op); 1402 1.2 alnsn reg = (op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2; 1403 1.2 alnsn 1404 1.2 alnsn compiler->cache_arg = 0; 1405 1.2 alnsn compiler->cache_argw = 0; 1406 1.2 alnsn if (op >= SLJIT_ADD && (src & SLJIT_MEM)) { 1407 1.2 alnsn ADJUST_LOCAL_OFFSET(src, srcw); 1408 1.2 alnsn FAIL_IF(emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw)); 1409 1.2 alnsn src = TMP_REG1; 1410 1.2 alnsn srcw = 0; 1411 1.2 alnsn } 1412 1.1 alnsn 1413 1.3 alnsn type &= 0xff; 1414 1.3 alnsn if (type < SLJIT_EQUAL_F64) 1415 1.1 alnsn FAIL_IF(push_inst(compiler, BICC | get_cc(type) | 3, UNMOVABLE_INS)); 1416 1.1 alnsn else 1417 1.1 alnsn FAIL_IF(push_inst(compiler, FBFCC | get_cc(type) | 3, UNMOVABLE_INS)); 1418 1.1 alnsn 1419 1.1 alnsn FAIL_IF(push_inst(compiler, OR | D(reg) | S1(0) | IMM(1), UNMOVABLE_INS)); 1420 1.1 alnsn FAIL_IF(push_inst(compiler, OR | D(reg) | S1(0) | IMM(0), UNMOVABLE_INS)); 1421 1.1 alnsn 1422 1.2 alnsn if (op >= SLJIT_ADD) 1423 1.2 alnsn return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP | ALT_KEEP_CACHE, dst, dstw, src, srcw, TMP_REG2, 0); 1424 1.1 alnsn 1425 1.1 alnsn return (reg == TMP_REG2) ? emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw) : SLJIT_SUCCESS; 1426 1.1 alnsn #else 1427 1.1 alnsn #error "Implementation required" 1428 1.1 alnsn #endif 1429 1.1 alnsn } 1430 1.1 alnsn 1431 1.3 alnsn SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value) 1432 1.1 alnsn { 1433 1.3 alnsn sljit_s32 reg; 1434 1.1 alnsn struct sljit_const *const_; 1435 1.1 alnsn 1436 1.1 alnsn CHECK_ERROR_PTR(); 1437 1.3 alnsn CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value)); 1438 1.1 alnsn ADJUST_LOCAL_OFFSET(dst, dstw); 1439 1.1 alnsn 1440 1.1 alnsn const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const)); 1441 1.1 alnsn PTR_FAIL_IF(!const_); 1442 1.1 alnsn set_const(const_, compiler); 1443 1.1 alnsn 1444 1.2 alnsn reg = SLOW_IS_REG(dst) ? dst : TMP_REG2; 1445 1.1 alnsn 1446 1.1 alnsn PTR_FAIL_IF(emit_const(compiler, reg, init_value)); 1447 1.1 alnsn 1448 1.1 alnsn if (dst & SLJIT_MEM) 1449 1.1 alnsn PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw)); 1450 1.1 alnsn return const_; 1451 1.1 alnsn } 1452