/[pcre]/code/trunk/sljit/sljitNativeMIPS_common.c
ViewVC logotype

Contents of /code/trunk/sljit/sljitNativeMIPS_common.c

Parent Directory Parent Directory | Revision Log Revision Log


Revision 714 - (show annotations)
Wed Sep 28 17:40:47 2011 UTC (8 years, 6 months ago) by zherczeg
File MIME type: text/plain
File size: 56093 byte(s)
JIT compiler update: MIPS III support
1 /*
2 * Stack-less Just-In-Time compiler
3 *
4 * Copyright 2009-2010 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright notice, this list of
10 * conditions and the following disclaimer.
11 *
12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
13 * of conditions and the following disclaimer in the documentation and/or other materials
14 * provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 SLJIT_CONST char* sljit_get_platform_name()
28 {
29 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
30 return "mips-32";
31 #else
32 #error "mips-64 is not yet supported"
33 #endif
34 }
35
36 /* Latest MIPS architecture. */
37 /* Detect SLJIT_MIPS_32_64 */
38
39 /* Length of an instruction word
40 Both for mips-32 and mips-64 */
41 typedef sljit_ui sljit_ins;
42
43 #define TMP_REG1 (SLJIT_NO_REGISTERS + 1)
44 #define TMP_REG2 (SLJIT_NO_REGISTERS + 2)
45 #define TMP_REG3 (SLJIT_NO_REGISTERS + 3)
46 #define REAL_STACK_PTR (SLJIT_NO_REGISTERS + 4)
47
48 /* For position independent code, t9 must contain the function address. */
49 #define PIC_ADDR_REG TMP_REG2
50
51 /* TMP_EREG1 is used mainly for literal encoding on 64 bit. */
52 #define TMP_EREG1 15
53 #define TMP_EREG2 24
54 /* Floating point status register. */
55 #define FCSR_REG 31
56 /* Return address register. */
57 #define RETURN_ADDR_REG 31
58
59 /* Flags are keept in volatile registers. */
60 #define EQUAL_FLAG 7
61 /* And carry flag as well. */
62 #define ULESS_FLAG 10
63 #define UGREATER_FLAG 11
64 #define LESS_FLAG 12
65 #define GREATER_FLAG 13
66 #define OVERFLOW_FLAG 14
67
68 #define TMP_FREG1 (SLJIT_FLOAT_REG4 + 1)
69 #define TMP_FREG2 (SLJIT_FLOAT_REG4 + 2)
70
71 /* --------------------------------------------------------------------- */
72 /* Instrucion forms */
73 /* --------------------------------------------------------------------- */
74
75 #define S(s) (reg_map[s] << 21)
76 #define T(t) (reg_map[t] << 16)
77 #define D(d) (reg_map[d] << 11)
78 /* Absolute registers. */
79 #define SA(s) ((s) << 21)
80 #define TA(t) ((t) << 16)
81 #define DA(d) ((d) << 11)
82 #define FT(t) ((t) << (16 + 1))
83 #define FS(s) ((s) << (11 + 1))
84 #define FD(d) ((d) << (6 + 1))
85 #define IMM(imm) ((imm) & 0xffff)
86 #define SH_IMM(imm) ((imm & 0x1f) << 6)
87
88 #define DR(dr) (reg_map[dr])
89 #define HI(opcode) ((opcode) << 26)
90 #define LO(opcode) (opcode)
91 #define FMT_D (17 << 21)
92
93 #define ABS_D (HI(17) | FMT_D | LO(5))
94 #define ADD_D (HI(17) | FMT_D | LO(0))
95 #define ADDU (HI(0) | LO(33))
96 #define ADDIU (HI(9))
97 #define AND (HI(0) | LO(36))
98 #define ANDI (HI(12))
99 #define B (HI(4))
100 #define BC1F (HI(17) | (8 << 21))
101 #define BC1T (HI(17) | (8 << 21) | (1 << 16))
102 #define BEQ (HI(4))
103 #define BGEZ (HI(1) | (1 << 16))
104 #define BGTZ (HI(7))
105 #define BLEZ (HI(6))
106 #define BLTZ (HI(1) | (0 << 16))
107 #define BNE (HI(5))
108 #define BREAK (HI(0) | LO(13))
109 #define C_UN_D (HI(17) | FMT_D | LO(49))
110 #define C_UEQ_D (HI(17) | FMT_D | LO(51))
111 #define C_ULT_D (HI(17) | FMT_D | LO(53))
112 #define DIV_D (HI(17) | FMT_D | LO(3))
113 #define J (HI(2))
114 #define JALR (HI(0) | LO(9))
115 #define JR (HI(0) | LO(8))
116 #define LD (HI(55))
117 #define LDC1 (HI(53))
118 #define LUI (HI(15))
119 #define LW (HI(35))
120 #define NEG_D (HI(17) | FMT_D | LO(7))
121 #define MFHI (HI(0) | LO(16))
122 #define MFLO (HI(0) | LO(18))
123 #define MOV_D (HI(17) | FMT_D | LO(6))
124 #define CFC1 (HI(17) | (2 << 21))
125 #define MOVN (HI(0) | LO(11))
126 #define MOVZ (HI(0) | LO(10))
127 #define MUL_D (HI(17) | FMT_D | LO(2))
128 #define MULT (HI(0) | LO(24))
129 #define NOP (HI(0) | LO(0))
130 #define NOR (HI(0) | LO(39))
131 #define OR (HI(0) | LO(37))
132 #define ORI (HI(13))
133 #define SD (HI(63))
134 #define SDC1 (HI(61))
135 #define SLT (HI(0) | LO(42))
136 #define SLTI (HI(10))
137 #define SLTIU (HI(11))
138 #define SLTU (HI(0) | LO(43))
139 #define SLL (HI(0) | LO(0))
140 #define SLLV (HI(0) | LO(4))
141 #define SRL (HI(0) | LO(2))
142 #define SRLV (HI(0) | LO(6))
143 #define SRA (HI(0) | LO(3))
144 #define SRAV (HI(0) | LO(7))
145 #define SUB_D (HI(17) | FMT_D | LO(1))
146 #define SUBU (HI(0) | LO(35))
147 #define SW (HI(43))
148 #define XOR (HI(0) | LO(38))
149 #define XORI (HI(14))
150
151 #if (defined SLJIT_MIPS_32_64 && SLJIT_MIPS_32_64)
152 #define CLZ (HI(28) | LO(32))
153 #define MUL (HI(28) | LO(2))
154 #define SEB (HI(31) | (16 << 6) | LO(32))
155 #define SEH (HI(31) | (24 << 6) | LO(32))
156 #endif
157
158 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
159 #define ADDU_W ADDU
160 #define ADDIU_W ADDIU
161 #define SLL_W SLL
162 #define SUBU_W SUBU
163 #else
164 #define ADDU_W DADDU
165 #define ADDIU_W DADDIU
166 #define SLL_W DSLL
167 #define SUBU_W DSUBU
168 #endif
169
170 #define SIMM_MAX (0x7fff)
171 #define SIMM_MIN (-0x8000)
172 #define UIMM_MAX (0xffff)
173
174 static SLJIT_CONST sljit_ub reg_map[SLJIT_NO_REGISTERS + 6] = {
175 0, 2, 5, 6, 3, 8, 17, 18, 19, 20, 21, 16, 4, 25, 9, 29
176 };
177
178 /* dest_reg is the absolute name of the register
179 Useful for reordering instructions in the delay slot. */
180 static int push_inst(struct sljit_compiler *compiler, sljit_ins ins, int delay_slot)
181 {
182 sljit_ins *ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
183 FAIL_IF(!ptr);
184 *ptr = ins;
185 compiler->size++;
186 compiler->delay_slot = delay_slot;
187 return SLJIT_SUCCESS;
188 }
189
190 static SLJIT_INLINE sljit_ins invert_branch(int flags)
191 {
192 return (flags & IS_BIT26_COND) ? (1 << 26) : (1 << 16);
193 }
194
195 static SLJIT_INLINE sljit_ins* optimize_jump(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code)
196 {
197 sljit_w diff;
198 sljit_uw target_addr;
199 sljit_ins *inst;
200 sljit_ins saved_inst;
201
202 if (jump->flags & (SLJIT_REWRITABLE_JUMP | IS_JAL))
203 return code_ptr;
204
205 if (jump->flags & JUMP_ADDR)
206 target_addr = jump->u.target;
207 else {
208 SLJIT_ASSERT(jump->flags & JUMP_LABEL);
209 target_addr = (sljit_uw)(code + jump->u.label->size);
210 }
211 inst = (sljit_ins*)jump->addr;
212 if (jump->flags & IS_COND)
213 inst--;
214
215 /* B instructions. */
216 if (jump->flags & IS_MOVABLE) {
217 diff = ((sljit_w)target_addr - (sljit_w)(inst)) >> 2;
218 if (diff <= SIMM_MAX && diff >= SIMM_MIN) {
219 jump->flags |= PATCH_B;
220
221 if (!(jump->flags & IS_COND)) {
222 inst[0] = inst[-1];
223 inst[-1] = B;
224 jump->addr -= sizeof(sljit_ins);
225 return inst;
226 }
227 saved_inst = inst[0];
228 inst[0] = inst[-1];
229 inst[-1] = saved_inst ^ invert_branch(jump->flags);
230 jump->addr -= 2 * sizeof(sljit_ins);
231 return inst;
232 }
233 }
234
235 diff = ((sljit_w)target_addr - (sljit_w)(inst + 1)) >> 2;
236 if (diff <= SIMM_MAX && diff >= SIMM_MIN) {
237 jump->flags |= PATCH_B;
238
239 if (!(jump->flags & IS_COND)) {
240 inst[0] = B;
241 inst[1] = NOP;
242 return inst + 1;
243 }
244 inst[0] = inst[0] ^ invert_branch(jump->flags);
245 inst[1] = NOP;
246 jump->addr -= sizeof(sljit_ins);
247 return inst + 1;
248 }
249
250 if (jump->flags & IS_COND) {
251 if ((target_addr & ~0xfffffff) == ((jump->addr + 3 * sizeof(sljit_ins)) & ~0xfffffff)) {
252 jump->flags |= PATCH_J;
253 inst[0] = (inst[0] & 0xffff0000) | 3;
254 inst[1] = NOP;
255 inst[2] = J;
256 inst[3] = NOP;
257 jump->addr += sizeof(sljit_ins);
258 return inst + 3;
259 }
260 return code_ptr;
261 }
262
263 /* J instuctions. */
264 if (jump->flags & IS_MOVABLE) {
265 if ((target_addr & ~0xfffffff) == (jump->addr & ~0xfffffff)) {
266 jump->flags |= PATCH_J;
267 inst[0] = inst[-1];
268 inst[-1] = J;
269 jump->addr -= sizeof(sljit_ins);
270 return inst;
271 }
272 }
273
274 if ((target_addr & ~0xfffffff) == ((jump->addr + sizeof(sljit_ins)) & ~0xfffffff)) {
275 jump->flags |= PATCH_J;
276 inst[0] = J;
277 inst[1] = NOP;
278 return inst + 1;
279 }
280
281 return code_ptr;
282 }
283
284 #ifdef __GNUC__
285 static __attribute__ ((noinline)) void sljit_cache_flush(void* code, void* code_ptr)
286 {
287 SLJIT_CACHE_FLUSH(code, code_ptr);
288 }
289 #endif
290
291 void* sljit_generate_code(struct sljit_compiler *compiler)
292 {
293 struct sljit_memory_fragment *buf;
294 sljit_ins *code;
295 sljit_ins *code_ptr;
296 sljit_ins *buf_ptr;
297 sljit_ins *buf_end;
298 sljit_uw word_count;
299 sljit_uw addr;
300
301 struct sljit_label *label;
302 struct sljit_jump *jump;
303 struct sljit_const *const_;
304
305 CHECK_ERROR_PTR();
306 check_sljit_generate_code(compiler);
307 reverse_buf(compiler);
308
309 code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins));
310 PTR_FAIL_WITH_EXEC_IF(code);
311 buf = compiler->buf;
312
313 code_ptr = code;
314 word_count = 0;
315 label = compiler->labels;
316 jump = compiler->jumps;
317 const_ = compiler->consts;
318 do {
319 buf_ptr = (sljit_ins*)buf->memory;
320 buf_end = buf_ptr + (buf->used_size >> 2);
321 do {
322 *code_ptr = *buf_ptr++;
323 SLJIT_ASSERT(!label || label->size >= word_count);
324 SLJIT_ASSERT(!jump || jump->addr >= word_count);
325 SLJIT_ASSERT(!const_ || const_->addr >= word_count);
326 /* These structures are ordered by their address. */
327 if (label && label->size == word_count) {
328 /* Just recording the address. */
329 label->addr = (sljit_uw)code_ptr;
330 label->size = code_ptr - code;
331 label = label->next;
332 }
333 if (jump && jump->addr == word_count) {
334 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
335 jump->addr = (sljit_uw)(code_ptr - 3);
336 #else
337 jump->addr = (sljit_uw)(code_ptr - 6);
338 #endif
339 code_ptr = optimize_jump(jump, code_ptr, code);
340 jump = jump->next;
341 }
342 if (const_ && const_->addr == word_count) {
343 /* Just recording the address. */
344 const_->addr = (sljit_uw)code_ptr;
345 const_ = const_->next;
346 }
347 code_ptr ++;
348 word_count ++;
349 } while (buf_ptr < buf_end);
350
351 buf = buf->next;
352 } while (buf);
353
354 if (label && label->size == word_count) {
355 label->addr = (sljit_uw)code_ptr;
356 label->size = code_ptr - code;
357 label = label->next;
358 }
359
360 SLJIT_ASSERT(!label);
361 SLJIT_ASSERT(!jump);
362 SLJIT_ASSERT(!const_);
363 SLJIT_ASSERT(code_ptr - code <= (int)compiler->size);
364
365 jump = compiler->jumps;
366 while (jump) {
367 do {
368 addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
369 buf_ptr = (sljit_ins*)jump->addr;
370
371 if (jump->flags & PATCH_B) {
372 addr = (sljit_w)(addr - (jump->addr + sizeof(sljit_ins))) >> 2;
373 SLJIT_ASSERT((sljit_w)addr <= SIMM_MAX && (sljit_w)addr >= SIMM_MIN);
374 buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | (addr & 0xffff);
375 break;
376 }
377 if (jump->flags & PATCH_J) {
378 SLJIT_ASSERT((addr & ~0xfffffff) == ((jump->addr + sizeof(sljit_ins)) & ~0xfffffff));
379 buf_ptr[0] |= (addr >> 2) & 0x03ffffff;
380 break;
381 }
382
383 /* Set the fields of immediate loads. */
384 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
385 buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | ((addr >> 16) & 0xffff);
386 buf_ptr[1] = (buf_ptr[1] & 0xffff0000) | (addr & 0xffff);
387 #else
388 buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | ((addr >> 48) & 0xffff);
389 buf_ptr[1] = (buf_ptr[1] & 0xffff0000) | ((addr >> 32) & 0xffff);
390 buf_ptr[3] = (buf_ptr[3] & 0xffff0000) | ((addr >> 16) & 0xffff);
391 buf_ptr[4] = (buf_ptr[4] & 0xffff0000) | (addr & 0xffff);
392 #endif
393 } while (0);
394 jump = jump->next;
395 }
396
397 compiler->error = SLJIT_ERR_COMPILED;
398 #ifndef __GNUC__
399 SLJIT_CACHE_FLUSH(code, code_ptr);
400 #else
401 /* GCC workaround for invalid code generation with -O2. */
402 sljit_cache_flush(code, code_ptr);
403 #endif
404 return code;
405 }
406
407 /* Creates an index in data_transfer_insts array. */
408 #define WORD_DATA 0x00
409 #define BYTE_DATA 0x01
410 #define HALF_DATA 0x02
411 #define INT_DATA 0x03
412 #define SIGNED_DATA 0x04
413 #define LOAD_DATA 0x08
414
415 #define MEM_MASK 0x0f
416
417 #define WRITE_BACK 0x00010
418 #define ARG_TEST 0x00020
419 #define CUMULATIVE_OP 0x00040
420 #define LOGICAL_OP 0x00080
421 #define IMM_OP 0x00100
422 #define SRC2_IMM 0x00200
423
424 #define UNUSED_DEST 0x00400
425 #define REG_DEST 0x00800
426 #define REG1_SOURCE 0x01000
427 #define REG2_SOURCE 0x02000
428 #define SLOW_SRC1 0x04000
429 #define SLOW_SRC2 0x08000
430 #define SLOW_DEST 0x10000
431
432 /* Only these flags are set. UNUSED_DEST is not set when no flags should be set. */
433 #define CHECK_FLAGS(list) \
434 (!(flags & UNUSED_DEST) || (op & GET_FLAGS(~(list))))
435
436 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
437 #include "sljitNativeMIPS_32.c"
438 #else
439 #include "sljitNativeMIPS_64.c"
440 #endif
441
442 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
443 #define STACK_STORE SW
444 #define STACK_LOAD LW
445 #else
446 #define STACK_STORE SD
447 #define STACK_LOAD LD
448 #endif
449
450 static int emit_op(struct sljit_compiler *compiler, int op, int inp_flags,
451 int dst, sljit_w dstw,
452 int src1, sljit_w src1w,
453 int src2, sljit_w src2w);
454
455 int sljit_emit_enter(struct sljit_compiler *compiler, int args, int temporaries, int generals, int local_size)
456 {
457 sljit_ins base;
458
459 CHECK_ERROR();
460 check_sljit_emit_enter(compiler, args, temporaries, generals, local_size);
461
462 compiler->temporaries = temporaries;
463 compiler->generals = generals;
464
465 compiler->has_locals = local_size > 0;
466 local_size += (generals + 2 + 4) * sizeof(sljit_w);
467 local_size = (local_size + 15) & ~0xf;
468 compiler->local_size = local_size;
469
470 if (local_size <= SIMM_MAX) {
471 /* Frequent case. */
472 FAIL_IF(push_inst(compiler, ADDIU_W | S(REAL_STACK_PTR) | T(REAL_STACK_PTR) | IMM(-local_size), DR(REAL_STACK_PTR)));
473 base = S(REAL_STACK_PTR);
474 }
475 else {
476 FAIL_IF(load_immediate(compiler, DR(TMP_REG1), local_size));
477 FAIL_IF(push_inst(compiler, ADDU_W | S(REAL_STACK_PTR) | TA(0) | D(TMP_REG2), DR(TMP_REG2)));
478 FAIL_IF(push_inst(compiler, SUBU_W | S(REAL_STACK_PTR) | T(TMP_REG1) | D(REAL_STACK_PTR), DR(REAL_STACK_PTR)));
479 base = S(TMP_REG2);
480 local_size = 0;
481 }
482
483 FAIL_IF(push_inst(compiler, STACK_STORE | base | TA(RETURN_ADDR_REG) | IMM(local_size - 1 * (int)sizeof(sljit_w)), MOVABLE_INS));
484 if (compiler->has_locals)
485 FAIL_IF(push_inst(compiler, STACK_STORE | base | T(SLJIT_LOCALS_REG) | IMM(local_size - 2 * (int)sizeof(sljit_w)), MOVABLE_INS));
486 if (generals >= 1)
487 FAIL_IF(push_inst(compiler, STACK_STORE | base | T(SLJIT_GENERAL_REG1) | IMM(local_size - 3 * (int)sizeof(sljit_w)), MOVABLE_INS));
488 if (generals >= 2)
489 FAIL_IF(push_inst(compiler, STACK_STORE | base | T(SLJIT_GENERAL_REG2) | IMM(local_size - 4 * (int)sizeof(sljit_w)), MOVABLE_INS));
490 if (generals >= 3)
491 FAIL_IF(push_inst(compiler, STACK_STORE | base | T(SLJIT_GENERAL_REG3) | IMM(local_size - 5 * (int)sizeof(sljit_w)), MOVABLE_INS));
492 if (generals >= 4)
493 FAIL_IF(push_inst(compiler, STACK_STORE | base | T(SLJIT_GENERAL_EREG1) | IMM(local_size - 6 * (int)sizeof(sljit_w)), MOVABLE_INS));
494 if (generals >= 5)
495 FAIL_IF(push_inst(compiler, STACK_STORE | base | T(SLJIT_GENERAL_EREG2) | IMM(local_size - 7 * (int)sizeof(sljit_w)), MOVABLE_INS));
496
497 if (compiler->has_locals)
498 FAIL_IF(push_inst(compiler, ADDIU_W | S(REAL_STACK_PTR) | T(SLJIT_LOCALS_REG) | IMM(4 * sizeof(sljit_w)), DR(SLJIT_LOCALS_REG)));
499
500 if (args >= 1)
501 FAIL_IF(push_inst(compiler, ADDU_W | SA(4) | TA(0) | D(SLJIT_GENERAL_REG1), DR(SLJIT_GENERAL_REG1)));
502 if (args >= 2)
503 FAIL_IF(push_inst(compiler, ADDU_W | SA(5) | TA(0) | D(SLJIT_GENERAL_REG2), DR(SLJIT_GENERAL_REG2)));
504 if (args >= 3)
505 FAIL_IF(push_inst(compiler, ADDU_W | SA(6) | TA(0) | D(SLJIT_GENERAL_REG3), DR(SLJIT_GENERAL_REG3)));
506
507 return SLJIT_SUCCESS;
508 }
509
510 void sljit_fake_enter(struct sljit_compiler *compiler, int args, int temporaries, int generals, int local_size)
511 {
512 CHECK_ERROR_VOID();
513 check_sljit_fake_enter(compiler, args, temporaries, generals, local_size);
514
515 compiler->temporaries = temporaries;
516 compiler->generals = generals;
517
518 compiler->has_locals = local_size > 0;
519 local_size += (generals + 2 + 4) * sizeof(sljit_w);
520 compiler->local_size = (local_size + 15) & ~0xf;
521 }
522
523 int sljit_emit_return(struct sljit_compiler *compiler, int src, sljit_w srcw)
524 {
525 int local_size;
526 sljit_ins base;
527
528 CHECK_ERROR();
529 check_sljit_emit_return(compiler, src, srcw);
530
531 local_size = compiler->local_size;
532
533 if (src != SLJIT_UNUSED && src != SLJIT_RETURN_REG)
534 FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, SLJIT_RETURN_REG, 0, TMP_REG1, 0, src, srcw));
535
536 if (local_size <= SIMM_MAX)
537 base = S(REAL_STACK_PTR);
538 else {
539 FAIL_IF(load_immediate(compiler, DR(TMP_REG1), local_size));
540 FAIL_IF(push_inst(compiler, ADDU_W | S(REAL_STACK_PTR) | T(TMP_REG1) | D(TMP_REG1), DR(TMP_REG1)));
541 base = S(TMP_REG1);
542 local_size = 0;
543 }
544
545 FAIL_IF(push_inst(compiler, STACK_LOAD | base | TA(RETURN_ADDR_REG) | IMM(local_size - 1 * (int)sizeof(sljit_w)), RETURN_ADDR_REG));
546 if (compiler->generals >= 5)
547 FAIL_IF(push_inst(compiler, STACK_LOAD | base | T(SLJIT_GENERAL_EREG2) | IMM(local_size - 7 * (int)sizeof(sljit_w)), DR(SLJIT_GENERAL_EREG2)));
548 if (compiler->generals >= 4)
549 FAIL_IF(push_inst(compiler, STACK_LOAD | base | T(SLJIT_GENERAL_EREG1) | IMM(local_size - 6 * (int)sizeof(sljit_w)), DR(SLJIT_GENERAL_EREG1)));
550 if (compiler->generals >= 3)
551 FAIL_IF(push_inst(compiler, STACK_LOAD | base | T(SLJIT_GENERAL_REG3) | IMM(local_size - 5 * (int)sizeof(sljit_w)), DR(SLJIT_GENERAL_REG3)));
552 if (compiler->generals >= 2)
553 FAIL_IF(push_inst(compiler, STACK_LOAD | base | T(SLJIT_GENERAL_REG2) | IMM(local_size - 4 * (int)sizeof(sljit_w)), DR(SLJIT_GENERAL_REG2)));
554 if (compiler->generals >= 1)
555 FAIL_IF(push_inst(compiler, STACK_LOAD | base | T(SLJIT_GENERAL_REG1) | IMM(local_size - 3 * (int)sizeof(sljit_w)), DR(SLJIT_GENERAL_REG1)));
556 if (compiler->has_locals)
557 FAIL_IF(push_inst(compiler, STACK_LOAD | base | T(SLJIT_LOCALS_REG) | IMM(local_size - 2 * (int)sizeof(sljit_w)), DR(SLJIT_LOCALS_REG)));
558
559 FAIL_IF(push_inst(compiler, JR | SA(RETURN_ADDR_REG), UNMOVABLE_INS));
560 if (compiler->local_size <= SIMM_MAX)
561 return push_inst(compiler, ADDIU_W | S(REAL_STACK_PTR) | T(REAL_STACK_PTR) | IMM(compiler->local_size), UNMOVABLE_INS);
562 else
563 return push_inst(compiler, ADDU_W | S(TMP_REG1) | TA(0) | D(REAL_STACK_PTR), UNMOVABLE_INS);
564 }
565
566 #undef STACK_STORE
567 #undef STACK_LOAD
568
569 /* --------------------------------------------------------------------- */
570 /* Operators */
571 /* --------------------------------------------------------------------- */
572
573 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
574 #define ARCH_DEPEND(a, b) a
575 #else
576 #define ARCH_DEPEND(a, b) b
577 #endif
578
579 static SLJIT_CONST sljit_ins data_transfer_insts[16] = {
580 /* s u w */ ARCH_DEPEND(HI(43) /* sw */, HI(63) /* sd */),
581 /* s u b */ HI(40) /* sb */,
582 /* s u h */ HI(41) /* sh*/,
583 /* s u i */ HI(43) /* sw */,
584
585 /* s s w */ ARCH_DEPEND(HI(43) /* sw */, HI(63) /* sd */),
586 /* s s b */ HI(40) /* sb */,
587 /* s s h */ HI(41) /* sh*/,
588 /* s s i */ HI(43) /* sw */,
589
590 /* l u w */ ARCH_DEPEND(HI(35) /* lw */, HI(55) /* ld */),
591 /* l u b */ HI(36) /* lbu */,
592 /* l u h */ HI(37) /* lhu */,
593 /* l u i */ ARCH_DEPEND(HI(35) /* lw */, HI(39) /* lwu */),
594
595 /* l s w */ ARCH_DEPEND(HI(35) /* lw */, HI(55) /* ld */),
596 /* l s b */ HI(32) /* lb */,
597 /* l s h */ HI(33) /* lh */,
598 /* l s i */ HI(35) /* lw */,
599 };
600
601 /* reg_ar is an absoulute register! */
602
603 /* Can perform an operation using at most 1 instruction. */
604 static int getput_arg_fast(struct sljit_compiler *compiler, int flags, int reg_ar, int arg, sljit_w argw)
605 {
606 SLJIT_ASSERT(arg & SLJIT_MEM);
607
608 if (!(flags & WRITE_BACK) && !(arg & 0xf0) && argw <= SIMM_MAX && argw >= SIMM_MIN) {
609 /* Works for both absoulte and relative addresses. */
610 if (SLJIT_UNLIKELY(flags & ARG_TEST))
611 return 1;
612 FAIL_IF(push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(arg & 0xf) | TA(reg_ar) | IMM(argw), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS));
613 return -1;
614 }
615 return (flags & ARG_TEST) ? SLJIT_SUCCESS : 0;
616 }
617
618 /* See getput_arg below.
619 Note: can_cache is called only for binary operators. Those
620 operators always uses word arguments without write back. */
621 static int can_cache(int arg, sljit_w argw, int next_arg, sljit_w next_argw)
622 {
623 if (!(next_arg & SLJIT_MEM))
624 return 0;
625
626 /* Simple operation except for updates. */
627 if (arg & 0xf0) {
628 argw &= 0x3;
629 next_argw &= 0x3;
630 if (argw && argw == next_argw && (arg == next_arg || (arg & 0xf0) == (next_arg & 0xf0)))
631 return 1;
632 return 0;
633 }
634
635 if (arg == next_arg) {
636 if (((sljit_uw)(next_argw - argw) <= SIMM_MAX && (sljit_uw)(next_argw - argw) >= SIMM_MIN))
637 return 1;
638 return 0;
639 }
640
641 return 0;
642 }
643
644 /* Emit the necessary instructions. See can_cache above. */
645 static int getput_arg(struct sljit_compiler *compiler, int flags, int reg_ar, int arg, sljit_w argw, int next_arg, sljit_w next_argw)
646 {
647 int tmp_ar;
648 int base;
649
650 SLJIT_ASSERT(arg & SLJIT_MEM);
651 if (!(next_arg & SLJIT_MEM)) {
652 next_arg = 0;
653 next_argw = 0;
654 }
655
656 tmp_ar = (flags & LOAD_DATA) ? reg_ar : DR(TMP_REG3);
657 base = arg & 0xf;
658
659 if (SLJIT_UNLIKELY(arg & 0xf0)) {
660 argw &= 0x3;
661 if ((flags & WRITE_BACK) && reg_ar == DR(base)) {
662 SLJIT_ASSERT(!(flags & LOAD_DATA) && DR(TMP_REG1) != reg_ar);
663 FAIL_IF(push_inst(compiler, ADDU_W | SA(reg_ar) | TA(0) | D(TMP_REG1), DR(TMP_REG1)));
664 reg_ar = DR(TMP_REG1);
665 }
666
667 /* Using the cache. */
668 if (argw == compiler->cache_argw) {
669 if (!(flags & WRITE_BACK)) {
670 if (arg == compiler->cache_arg)
671 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
672 if ((SLJIT_MEM | (arg & 0xf0)) == compiler->cache_arg) {
673 if (arg == next_arg && argw == (next_argw & 0x3)) {
674 compiler->cache_arg = arg;
675 compiler->cache_argw = argw;
676 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | D(TMP_REG3), DR(TMP_REG3)));
677 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
678 }
679 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | DA(tmp_ar), tmp_ar));
680 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | SA(tmp_ar) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
681 }
682 }
683 else {
684 if ((SLJIT_MEM | (arg & 0xf0)) == compiler->cache_arg) {
685 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | D(base), DR(base)));
686 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
687 }
688 }
689 }
690
691 if (SLJIT_UNLIKELY(argw)) {
692 compiler->cache_arg = SLJIT_MEM | (arg & 0xf0);
693 compiler->cache_argw = argw;
694 FAIL_IF(push_inst(compiler, SLL_W | T((arg >> 4) & 0xf) | D(TMP_REG3) | SH_IMM(argw), DR(TMP_REG3)));
695 }
696
697 if (!(flags & WRITE_BACK)) {
698 if (arg == next_arg && argw == (next_argw & 0x3)) {
699 compiler->cache_arg = arg;
700 compiler->cache_argw = argw;
701 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(!argw ? ((arg >> 4) & 0xf) : TMP_REG3) | D(TMP_REG3), DR(TMP_REG3)));
702 tmp_ar = DR(TMP_REG3);
703 }
704 else
705 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(!argw ? ((arg >> 4) & 0xf) : TMP_REG3) | DA(tmp_ar), tmp_ar));
706 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | SA(tmp_ar) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
707 }
708 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(!argw ? ((arg >> 4) & 0xf) : TMP_REG3) | D(base), DR(base)));
709 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
710 }
711
712 if (SLJIT_UNLIKELY(flags & WRITE_BACK) && base) {
713 /* Update only applies if a base register exists. */
714 if (reg_ar == DR(base)) {
715 SLJIT_ASSERT(!(flags & LOAD_DATA) && DR(TMP_REG1) != reg_ar);
716 if (argw <= SIMM_MAX && argw >= SIMM_MIN) {
717 FAIL_IF(push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar) | IMM(argw), MOVABLE_INS));
718 if (argw)
719 return push_inst(compiler, ADDIU_W | S(base) | T(base) | IMM(argw), DR(base));
720 return SLJIT_SUCCESS;
721 }
722 FAIL_IF(push_inst(compiler, ADDU_W | SA(reg_ar) | TA(0) | D(TMP_REG1), DR(TMP_REG1)));
723 reg_ar = DR(TMP_REG1);
724 }
725
726 if (argw <= SIMM_MAX && argw >= SIMM_MIN) {
727 if (argw)
728 FAIL_IF(push_inst(compiler, ADDIU_W | S(base) | T(base) | IMM(argw), DR(base)));
729 }
730 else {
731 if (compiler->cache_arg == SLJIT_MEM && argw - compiler->cache_argw <= SIMM_MAX && argw - compiler->cache_argw >= SIMM_MIN) {
732 if (argw != compiler->cache_argw) {
733 FAIL_IF(push_inst(compiler, ADDIU_W | S(TMP_REG3) | T(TMP_REG3) | IMM(argw - compiler->cache_argw), DR(TMP_REG3)));
734 compiler->cache_argw = argw;
735 }
736 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | D(base), DR(base)));
737 }
738 else {
739 compiler->cache_arg = SLJIT_MEM;
740 compiler->cache_argw = argw;
741 FAIL_IF(load_immediate(compiler, DR(TMP_REG3), argw));
742 FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | D(base), DR(base)));
743 }
744 }
745 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
746 }
747
748 if (compiler->cache_arg == arg && argw - compiler->cache_argw <= SIMM_MAX && argw - compiler->cache_argw >= SIMM_MIN) {
749 if (argw != compiler->cache_argw) {
750 FAIL_IF(push_inst(compiler, ADDIU_W | S(TMP_REG3) | T(TMP_REG3) | IMM(argw - compiler->cache_argw), DR(TMP_REG3)));
751 compiler->cache_argw = argw;
752 }
753 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
754 }
755
756 if (compiler->cache_arg == SLJIT_MEM && argw - compiler->cache_argw <= SIMM_MAX && argw - compiler->cache_argw >= SIMM_MIN) {
757 if (argw != compiler->cache_argw)
758 FAIL_IF(push_inst(compiler, ADDIU_W | S(TMP_REG3) | T(TMP_REG3) | IMM(argw - compiler->cache_argw), DR(TMP_REG3)));
759 }
760 else {
761 compiler->cache_arg = SLJIT_MEM;
762 FAIL_IF(load_immediate(compiler, DR(TMP_REG3), argw));
763 }
764 compiler->cache_argw = argw;
765
766 if (!base)
767 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
768
769 if (arg == next_arg && next_argw - argw <= SIMM_MAX && next_argw - argw >= SIMM_MIN) {
770 compiler->cache_arg = arg;
771 FAIL_IF(push_inst(compiler, ADDU_W | S(TMP_REG3) | T(base) | D(TMP_REG3), DR(TMP_REG3)));
772 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
773 }
774
775 FAIL_IF(push_inst(compiler, ADDU_W | S(TMP_REG3) | T(base) | DA(tmp_ar), tmp_ar));
776 return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | SA(tmp_ar) | TA(reg_ar), (flags & LOAD_DATA) ? reg_ar : MOVABLE_INS);
777 }
778
779 static SLJIT_INLINE int emit_op_mem(struct sljit_compiler *compiler, int flags, int reg_ar, int arg, sljit_w argw)
780 {
781 if (getput_arg_fast(compiler, flags, reg_ar, arg, argw))
782 return compiler->error;
783 compiler->cache_arg = 0;
784 compiler->cache_argw = 0;
785 return getput_arg(compiler, flags, reg_ar, arg, argw, 0, 0);
786 }
787
788 static int emit_op(struct sljit_compiler *compiler, int op, int flags,
789 int dst, sljit_w dstw,
790 int src1, sljit_w src1w,
791 int src2, sljit_w src2w)
792 {
793 /* arg1 goes to TMP_REG1 or src reg
794 arg2 goes to TMP_REG2, imm or src reg
795 TMP_REG3 can be used for caching
796 result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
797 int dst_r = TMP_REG2;
798 int src1_r;
799 sljit_w src2_r = 0;
800 int sugg_src2_r = TMP_REG2;
801
802 compiler->cache_arg = 0;
803 compiler->cache_argw = 0;
804
805 if (dst >= SLJIT_TEMPORARY_REG1 && dst <= TMP_REG3) {
806 dst_r = dst;
807 flags |= REG_DEST;
808 if (GET_OPCODE(op) >= SLJIT_MOV && GET_OPCODE(op) <= SLJIT_MOVU_SI)
809 sugg_src2_r = dst_r;
810 }
811 else if (dst == SLJIT_UNUSED) {
812 if (op >= SLJIT_MOV && op <= SLJIT_MOVU_SI && !(src2 & SLJIT_MEM))
813 return SLJIT_SUCCESS;
814 if (GET_FLAGS(op))
815 flags |= UNUSED_DEST;
816 }
817 else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, DR(TMP_REG1), dst, dstw))
818 flags |= SLOW_DEST;
819
820 if (flags & IMM_OP) {
821 if ((src2 & SLJIT_IMM) && src2w) {
822 if ((!(flags & LOGICAL_OP) && (src2w <= SIMM_MAX && src2w >= SIMM_MIN))
823 || ((flags & LOGICAL_OP) && !(src2w & ~UIMM_MAX))) {
824 flags |= SRC2_IMM;
825 src2_r = src2w;
826 }
827 }
828 if ((src1 & SLJIT_IMM) && src1w && (flags & CUMULATIVE_OP) && !(flags & SRC2_IMM)) {
829 if ((!(flags & LOGICAL_OP) && (src1w <= SIMM_MAX && src1w >= SIMM_MIN))
830 || ((flags & LOGICAL_OP) && !(src1w & ~UIMM_MAX))) {
831 flags |= SRC2_IMM;
832 src2_r = src1w;
833
834 /* And swap arguments. */
835 src1 = src2;
836 src1w = src2w;
837 src2 = SLJIT_IMM;
838 /* src2w = src2_r unneeded. */
839 }
840 }
841 }
842
843 /* Source 1. */
844 if (src1 >= SLJIT_TEMPORARY_REG1 && src1 <= TMP_REG3) {
845 src1_r = src1;
846 flags |= REG1_SOURCE;
847 }
848 else if (src1 & SLJIT_IMM) {
849 if (src1w) {
850 FAIL_IF(load_immediate(compiler, DR(TMP_REG1), src1w));
851 src1_r = TMP_REG1;
852 }
853 else
854 src1_r = 0;
855 }
856 else {
857 if (getput_arg_fast(compiler, flags | LOAD_DATA, DR(TMP_REG1), src1, src1w))
858 FAIL_IF(compiler->error);
859 else
860 flags |= SLOW_SRC1;
861 src1_r = TMP_REG1;
862 }
863
864 /* Source 2. */
865 if (src2 >= SLJIT_TEMPORARY_REG1 && src2 <= TMP_REG3) {
866 src2_r = src2;
867 flags |= REG2_SOURCE;
868 if (!(flags & REG_DEST) && GET_OPCODE(op) >= SLJIT_MOV && GET_OPCODE(op) <= SLJIT_MOVU_SI)
869 dst_r = src2_r;
870 }
871 else if (src2 & SLJIT_IMM) {
872 if (!(flags & SRC2_IMM)) {
873 if (src2w || (GET_OPCODE(op) >= SLJIT_MOV && GET_OPCODE(op) <= SLJIT_MOVU_SI)) {
874 FAIL_IF(load_immediate(compiler, DR(sugg_src2_r), src2w));
875 src2_r = sugg_src2_r;
876 }
877 else
878 src2_r = 0;
879 }
880 }
881 else {
882 if (getput_arg_fast(compiler, flags | LOAD_DATA, DR(sugg_src2_r), src2, src2w))
883 FAIL_IF(compiler->error);
884 else
885 flags |= SLOW_SRC2;
886 src2_r = sugg_src2_r;
887 }
888
889 if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
890 SLJIT_ASSERT(src2_r == TMP_REG2);
891 if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
892 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG2), src2, src2w, src1, src1w));
893 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG1), src1, src1w, dst, dstw));
894 }
895 else {
896 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG1), src1, src1w, src2, src2w));
897 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG2), src2, src2w, dst, dstw));
898 }
899 }
900 else if (flags & SLOW_SRC1)
901 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(TMP_REG1), src1, src1w, dst, dstw));
902 else if (flags & SLOW_SRC2)
903 FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, DR(sugg_src2_r), src2, src2w, dst, dstw));
904
905 FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r));
906
907 if (dst & SLJIT_MEM) {
908 if (!(flags & SLOW_DEST)) {
909 getput_arg_fast(compiler, flags, DR(dst_r), dst, dstw);
910 return compiler->error;
911 }
912 return getput_arg(compiler, flags, DR(dst_r), dst, dstw, 0, 0);
913 }
914
915 return SLJIT_SUCCESS;
916 }
917
918 int sljit_emit_op0(struct sljit_compiler *compiler, int op)
919 {
920 CHECK_ERROR();
921 check_sljit_emit_op0(compiler, op);
922
923 op = GET_OPCODE(op);
924 switch (op) {
925 case SLJIT_BREAKPOINT:
926 return push_inst(compiler, BREAK, UNMOVABLE_INS);
927 case SLJIT_NOP:
928 return push_inst(compiler, NOP, UNMOVABLE_INS);
929 }
930
931 return SLJIT_SUCCESS;
932 }
933
934 int sljit_emit_op1(struct sljit_compiler *compiler, int op,
935 int dst, sljit_w dstw,
936 int src, sljit_w srcw)
937 {
938 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
939 #define inp_flags 0
940 #endif
941
942 CHECK_ERROR();
943 check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw);
944
945 SLJIT_ASSERT(SLJIT_MOV + 7 == SLJIT_MOVU);
946
947 switch (GET_OPCODE(op)) {
948 case SLJIT_MOV:
949 return emit_op(compiler, SLJIT_MOV, inp_flags | WORD_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
950
951 case SLJIT_MOV_UI:
952 return emit_op(compiler, SLJIT_MOV_UI, inp_flags | INT_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
953
954 case SLJIT_MOV_SI:
955 return emit_op(compiler, SLJIT_MOV_SI, inp_flags | INT_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
956
957 case SLJIT_MOV_UB:
958 return emit_op(compiler, SLJIT_MOV_UB, inp_flags | BYTE_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (unsigned char)srcw : srcw);
959
960 case SLJIT_MOV_SB:
961 return emit_op(compiler, SLJIT_MOV_SB, inp_flags | BYTE_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (signed char)srcw : srcw);
962
963 case SLJIT_MOV_UH:
964 return emit_op(compiler, SLJIT_MOV_UH, inp_flags | HALF_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (unsigned short)srcw : srcw);
965
966 case SLJIT_MOV_SH:
967 return emit_op(compiler, SLJIT_MOV_SH, inp_flags | HALF_DATA | SIGNED_DATA, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (signed short)srcw : srcw);
968
969 case SLJIT_MOVU:
970 return emit_op(compiler, SLJIT_MOV, inp_flags | WORD_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
971
972 case SLJIT_MOVU_UI:
973 return emit_op(compiler, SLJIT_MOV_UI, inp_flags | INT_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
974
975 case SLJIT_MOVU_SI:
976 return emit_op(compiler, SLJIT_MOV_SI, inp_flags | INT_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, srcw);
977
978 case SLJIT_MOVU_UB:
979 return emit_op(compiler, SLJIT_MOV_UB, inp_flags | BYTE_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (unsigned char)srcw : srcw);
980
981 case SLJIT_MOVU_SB:
982 return emit_op(compiler, SLJIT_MOV_SB, inp_flags | BYTE_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (signed char)srcw : srcw);
983
984 case SLJIT_MOVU_UH:
985 return emit_op(compiler, SLJIT_MOV_UH, inp_flags | HALF_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (unsigned short)srcw : srcw);
986
987 case SLJIT_MOVU_SH:
988 return emit_op(compiler, SLJIT_MOV_SH, inp_flags | HALF_DATA | SIGNED_DATA | WRITE_BACK, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (signed short)srcw : srcw);
989
990 case SLJIT_NOT:
991 return emit_op(compiler, op, inp_flags, dst, dstw, TMP_REG1, 0, src, srcw);
992
993 case SLJIT_NEG:
994 return emit_op(compiler, SLJIT_SUB | GET_ALL_FLAGS(op), inp_flags | IMM_OP, dst, dstw, SLJIT_IMM, 0, src, srcw);
995
996 case SLJIT_CLZ:
997 return emit_op(compiler, op, inp_flags, dst, dstw, TMP_REG1, 0, src, srcw);
998 }
999
1000 return SLJIT_SUCCESS;
1001 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1002 #undef inp_flags
1003 #endif
1004 }
1005
1006 int sljit_emit_op2(struct sljit_compiler *compiler, int op,
1007 int dst, sljit_w dstw,
1008 int src1, sljit_w src1w,
1009 int src2, sljit_w src2w)
1010 {
1011 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1012 #define inp_flags 0
1013 #endif
1014
1015 CHECK_ERROR();
1016 check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w);
1017
1018 switch (GET_OPCODE(op)) {
1019 case SLJIT_ADD:
1020 case SLJIT_ADDC:
1021 return emit_op(compiler, op, inp_flags | CUMULATIVE_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1022
1023 case SLJIT_SUB:
1024 case SLJIT_SUBC:
1025 return emit_op(compiler, op, inp_flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1026
1027 case SLJIT_MUL:
1028 return emit_op(compiler, op, inp_flags | CUMULATIVE_OP, dst, dstw, src1, src1w, src2, src2w);
1029
1030 case SLJIT_AND:
1031 case SLJIT_OR:
1032 case SLJIT_XOR:
1033 return emit_op(compiler, op, inp_flags | CUMULATIVE_OP | LOGICAL_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1034
1035 case SLJIT_SHL:
1036 case SLJIT_LSHR:
1037 case SLJIT_ASHR:
1038 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1039 if (src2 & SLJIT_IMM)
1040 src2w &= 0x1f;
1041 #else
1042 if (src2 & SLJIT_IMM)
1043 src2w &= 0x3f;
1044 #endif
1045 return emit_op(compiler, op, inp_flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1046 }
1047
1048 return SLJIT_SUCCESS;
1049 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1050 #undef inp_flags
1051 #endif
1052 }
1053
1054 /* --------------------------------------------------------------------- */
1055 /* Floating point operators */
1056 /* --------------------------------------------------------------------- */
1057
1058 int sljit_is_fpu_available(void)
1059 {
1060 #if (defined SLJIT_QEMU && SLJIT_QEMU)
1061 /* Qemu says fir is 0 by default. */
1062 return 1;
1063 #elif defined(__GNUC__)
1064 sljit_w fir;
1065 asm ("cfc1 %0, $0" : "=r"(fir));
1066 return (fir >> 22) & 0x1;
1067 #else
1068 #error "FIR check is not implemented for this architecture"
1069 #endif
1070 }
1071
1072 static int emit_fpu_data_transfer(struct sljit_compiler *compiler, int fpu_reg, int load, int arg, sljit_w argw)
1073 {
1074 int hi_reg;
1075
1076 SLJIT_ASSERT(arg & SLJIT_MEM);
1077
1078 /* Fast loads and stores. */
1079 if (!(arg & 0xf0)) {
1080 /* Both for (arg & 0xf) == SLJIT_UNUSED and (arg & 0xf) != SLJIT_UNUSED. */
1081 if (argw <= SIMM_MAX && argw >= SIMM_MIN)
1082 return push_inst(compiler, (load ? LDC1 : SDC1) | S(arg & 0xf) | FT(fpu_reg) | IMM(argw), MOVABLE_INS);
1083 }
1084
1085 if (arg & 0xf0) {
1086 argw &= 0x3;
1087 hi_reg = (arg >> 4) & 0xf;
1088 if (argw) {
1089 FAIL_IF(push_inst(compiler, SLL_W | T(hi_reg) | D(TMP_REG1) | SH_IMM(argw), DR(TMP_REG1)));
1090 hi_reg = TMP_REG1;
1091 }
1092 FAIL_IF(push_inst(compiler, ADDU_W | S(hi_reg) | T(arg & 0xf) | D(TMP_REG1), DR(TMP_REG1)));
1093 return push_inst(compiler, (load ? LDC1 : SDC1) | S(TMP_REG1) | FT(fpu_reg) | IMM(0), MOVABLE_INS);
1094 }
1095
1096 /* Use cache. */
1097 if (compiler->cache_arg == arg && argw - compiler->cache_argw <= SIMM_MAX && argw - compiler->cache_argw >= SIMM_MIN)
1098 return push_inst(compiler, (load ? LDC1 : SDC1) | S(TMP_REG3) | FT(fpu_reg) | IMM(argw - compiler->cache_argw), MOVABLE_INS);
1099
1100 /* Put value to cache. */
1101 compiler->cache_arg = arg;
1102 compiler->cache_argw = argw;
1103
1104 FAIL_IF(load_immediate(compiler, DR(TMP_REG3), argw));
1105 if (arg & 0xf)
1106 FAIL_IF(push_inst(compiler, ADDU_W | S(TMP_REG3) | T(arg & 0xf) | D(TMP_REG3), DR(TMP_REG3)));
1107 return push_inst(compiler, (load ? LDC1 : SDC1) | S(TMP_REG3) | FT(fpu_reg) | IMM(0), MOVABLE_INS);
1108 }
1109
1110 int sljit_emit_fop1(struct sljit_compiler *compiler, int op,
1111 int dst, sljit_w dstw,
1112 int src, sljit_w srcw)
1113 {
1114 int dst_fr;
1115
1116 CHECK_ERROR();
1117 check_sljit_emit_fop1(compiler, op, dst, dstw, src, srcw);
1118
1119 compiler->cache_arg = 0;
1120 compiler->cache_argw = 0;
1121
1122 if (GET_OPCODE(op) == SLJIT_FCMP) {
1123 if (dst > SLJIT_FLOAT_REG4) {
1124 FAIL_IF(emit_fpu_data_transfer(compiler, TMP_FREG1, 1, dst, dstw));
1125 dst = TMP_FREG1;
1126 }
1127 if (src > SLJIT_FLOAT_REG4) {
1128 FAIL_IF(emit_fpu_data_transfer(compiler, TMP_FREG2, 1, src, srcw));
1129 src = TMP_FREG2;
1130 }
1131
1132 /* src and dst are swapped. */
1133 if (op & SLJIT_SET_E) {
1134 FAIL_IF(push_inst(compiler, C_UEQ_D | FT(src) | FS(dst), UNMOVABLE_INS));
1135 FAIL_IF(push_inst(compiler, CFC1 | TA(EQUAL_FLAG) | DA(FCSR_REG), EQUAL_FLAG));
1136 FAIL_IF(push_inst(compiler, SRL | TA(EQUAL_FLAG) | DA(EQUAL_FLAG) | SH_IMM(23), EQUAL_FLAG));
1137 FAIL_IF(push_inst(compiler, ANDI | SA(EQUAL_FLAG) | TA(EQUAL_FLAG) | IMM(1), EQUAL_FLAG));
1138 }
1139 if (op & SLJIT_SET_S) {
1140 /* Mixing the instructions for the two checks. */
1141 FAIL_IF(push_inst(compiler, C_ULT_D | FT(src) | FS(dst), UNMOVABLE_INS));
1142 FAIL_IF(push_inst(compiler, CFC1 | TA(ULESS_FLAG) | DA(FCSR_REG), ULESS_FLAG));
1143 FAIL_IF(push_inst(compiler, C_ULT_D | FT(dst) | FS(src), UNMOVABLE_INS));
1144 FAIL_IF(push_inst(compiler, SRL | TA(ULESS_FLAG) | DA(ULESS_FLAG) | SH_IMM(23), ULESS_FLAG));
1145 FAIL_IF(push_inst(compiler, ANDI | SA(ULESS_FLAG) | TA(ULESS_FLAG) | IMM(1), ULESS_FLAG));
1146 FAIL_IF(push_inst(compiler, CFC1 | TA(UGREATER_FLAG) | DA(FCSR_REG), UGREATER_FLAG));
1147 FAIL_IF(push_inst(compiler, SRL | TA(UGREATER_FLAG) | DA(UGREATER_FLAG) | SH_IMM(23), UGREATER_FLAG));
1148 FAIL_IF(push_inst(compiler, ANDI | SA(UGREATER_FLAG) | TA(UGREATER_FLAG) | IMM(1), UGREATER_FLAG));
1149 }
1150 return push_inst(compiler, C_UN_D | FT(src) | FS(dst), FCSR_FCC);
1151 }
1152
1153 dst_fr = (dst > SLJIT_FLOAT_REG4) ? TMP_FREG1 : dst;
1154
1155 if (src > SLJIT_FLOAT_REG4) {
1156 FAIL_IF(emit_fpu_data_transfer(compiler, dst_fr, 1, src, srcw));
1157 src = dst_fr;
1158 }
1159
1160 switch (op) {
1161 case SLJIT_FMOV:
1162 if (src != dst_fr && dst_fr != TMP_FREG1)
1163 FAIL_IF(push_inst(compiler, MOV_D | FS(src) | FD(dst_fr), MOVABLE_INS));
1164 break;
1165 case SLJIT_FNEG:
1166 FAIL_IF(push_inst(compiler, NEG_D | FS(src) | FD(dst_fr), MOVABLE_INS));
1167 break;
1168 case SLJIT_FABS:
1169 FAIL_IF(push_inst(compiler, ABS_D | FS(src) | FD(dst_fr), MOVABLE_INS));
1170 break;
1171 }
1172
1173 if (dst_fr == TMP_FREG1)
1174 FAIL_IF(emit_fpu_data_transfer(compiler, src, 0, dst, dstw));
1175
1176 return SLJIT_SUCCESS;
1177 }
1178
1179 int sljit_emit_fop2(struct sljit_compiler *compiler, int op,
1180 int dst, sljit_w dstw,
1181 int src1, sljit_w src1w,
1182 int src2, sljit_w src2w)
1183 {
1184 int dst_fr;
1185
1186 CHECK_ERROR();
1187 check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w);
1188
1189 compiler->cache_arg = 0;
1190 compiler->cache_argw = 0;
1191
1192 dst_fr = (dst > SLJIT_FLOAT_REG4) ? TMP_FREG1 : dst;
1193
1194 if (src2 > SLJIT_FLOAT_REG4) {
1195 FAIL_IF(emit_fpu_data_transfer(compiler, TMP_FREG2, 1, src2, src2w));
1196 src2 = TMP_FREG2;
1197 }
1198
1199 if (src1 > SLJIT_FLOAT_REG4) {
1200 FAIL_IF(emit_fpu_data_transfer(compiler, TMP_FREG1, 1, src1, src1w));
1201 src1 = TMP_FREG1;
1202 }
1203
1204 switch (op) {
1205 case SLJIT_FADD:
1206 FAIL_IF(push_inst(compiler, ADD_D | FT(src2) | FS(src1) | FD(dst_fr), MOVABLE_INS));
1207 break;
1208
1209 case SLJIT_FSUB:
1210 FAIL_IF(push_inst(compiler, SUB_D | FT(src2) | FS(src1) | FD(dst_fr), MOVABLE_INS));
1211 break;
1212
1213 case SLJIT_FMUL:
1214 FAIL_IF(push_inst(compiler, MUL_D | FT(src2) | FS(src1) | FD(dst_fr), MOVABLE_INS));
1215 break;
1216
1217 case SLJIT_FDIV:
1218 FAIL_IF(push_inst(compiler, DIV_D | FT(src2) | FS(src1) | FD(dst_fr), MOVABLE_INS));
1219 break;
1220 }
1221
1222 if (dst_fr == TMP_FREG1)
1223 FAIL_IF(emit_fpu_data_transfer(compiler, TMP_FREG1, 0, dst, dstw));
1224
1225 return SLJIT_SUCCESS;
1226 }
1227
1228 /* --------------------------------------------------------------------- */
1229 /* Other instructions */
1230 /* --------------------------------------------------------------------- */
1231
1232 int sljit_emit_fast_enter(struct sljit_compiler *compiler, int dst, sljit_w dstw, int args, int temporaries, int generals, int local_size)
1233 {
1234 CHECK_ERROR();
1235 check_sljit_emit_fast_enter(compiler, dst, dstw, args, temporaries, generals, local_size);
1236
1237 compiler->temporaries = temporaries;
1238 compiler->generals = generals;
1239
1240 compiler->has_locals = local_size > 0;
1241 local_size += (generals + 2 + 4) * sizeof(sljit_w);
1242 compiler->local_size = (local_size + 15) & ~0xf;
1243
1244 if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS)
1245 return push_inst(compiler, ADDU_W | SA(RETURN_ADDR_REG) | TA(0) | D(dst), DR(dst));
1246 else if (dst & SLJIT_MEM)
1247 return emit_op_mem(compiler, WORD_DATA, RETURN_ADDR_REG, dst, dstw);
1248 return SLJIT_SUCCESS;
1249 }
1250
1251 int sljit_emit_fast_return(struct sljit_compiler *compiler, int src, sljit_w srcw)
1252 {
1253 CHECK_ERROR();
1254 check_sljit_emit_fast_return(compiler, src, srcw);
1255
1256 if (src >= SLJIT_TEMPORARY_REG1 && src <= SLJIT_NO_REGISTERS)
1257 FAIL_IF(push_inst(compiler, ADDU_W | S(src) | TA(0) | DA(RETURN_ADDR_REG), RETURN_ADDR_REG));
1258 else if (src & SLJIT_MEM)
1259 FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, RETURN_ADDR_REG, src, srcw));
1260 else if (src & SLJIT_IMM)
1261 FAIL_IF(load_immediate(compiler, RETURN_ADDR_REG, srcw));
1262
1263 FAIL_IF(push_inst(compiler, JR | SA(RETURN_ADDR_REG), UNMOVABLE_INS));
1264 return push_inst(compiler, NOP, UNMOVABLE_INS);
1265 }
1266
1267 /* --------------------------------------------------------------------- */
1268 /* Conditional instructions */
1269 /* --------------------------------------------------------------------- */
1270
1271 struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
1272 {
1273 struct sljit_label *label;
1274
1275 CHECK_ERROR_PTR();
1276 check_sljit_emit_label(compiler);
1277
1278 if (compiler->last_label && compiler->last_label->size == compiler->size)
1279 return compiler->last_label;
1280
1281 label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
1282 PTR_FAIL_IF(!label);
1283 set_label(label, compiler);
1284 compiler->delay_slot = UNMOVABLE_INS;
1285 return label;
1286 }
1287
1288 #if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
1289 #define JUMP_LENGTH 4
1290 #else
1291 #define JUMP_LENGTH 7
1292 #endif
1293
1294 #define BR_Z(src) \
1295 inst = BEQ | SA(src) | TA(0) | JUMP_LENGTH; \
1296 flags = IS_BIT26_COND; \
1297 delay_check = src;
1298
1299 #define BR_NZ(src) \
1300 inst = BNE | SA(src) | TA(0) | JUMP_LENGTH; \
1301 flags = IS_BIT26_COND; \
1302 delay_check = src;
1303
1304 #define BR_T() \
1305 inst = BC1T | JUMP_LENGTH; \
1306 flags = IS_BIT16_COND; \
1307 delay_check = FCSR_FCC;
1308
1309 #define BR_F() \
1310 inst = BC1F | JUMP_LENGTH; \
1311 flags = IS_BIT16_COND; \
1312 delay_check = FCSR_FCC;
1313
1314 struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, int type)
1315 {
1316 struct sljit_jump *jump;
1317 sljit_ins inst;
1318 int flags = 0;
1319 int delay_check = UNMOVABLE_INS;
1320
1321 CHECK_ERROR_PTR();
1322 check_sljit_emit_jump(compiler, type);
1323
1324 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1325 PTR_FAIL_IF(!jump);
1326 set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
1327 type &= 0xff;
1328
1329 switch (type) {
1330 case SLJIT_C_EQUAL:
1331 case SLJIT_C_FLOAT_NOT_EQUAL:
1332 BR_NZ(EQUAL_FLAG);
1333 break;
1334 case SLJIT_C_NOT_EQUAL:
1335 case SLJIT_C_FLOAT_EQUAL:
1336 BR_Z(EQUAL_FLAG);
1337 break;
1338 case SLJIT_C_LESS:
1339 case SLJIT_C_FLOAT_LESS:
1340 BR_Z(ULESS_FLAG);
1341 break;
1342 case SLJIT_C_GREATER_EQUAL:
1343 case SLJIT_C_FLOAT_GREATER_EQUAL:
1344 BR_NZ(ULESS_FLAG);
1345 break;
1346 case SLJIT_C_GREATER:
1347 case SLJIT_C_FLOAT_GREATER:
1348 BR_Z(UGREATER_FLAG);
1349 break;
1350 case SLJIT_C_LESS_EQUAL:
1351 case SLJIT_C_FLOAT_LESS_EQUAL:
1352 BR_NZ(UGREATER_FLAG);
1353 break;
1354 case SLJIT_C_SIG_LESS:
1355 BR_Z(LESS_FLAG);
1356 break;
1357 case SLJIT_C_SIG_GREATER_EQUAL:
1358 BR_NZ(LESS_FLAG);
1359 break;
1360 case SLJIT_C_SIG_GREATER:
1361 BR_Z(GREATER_FLAG);
1362 break;
1363 case SLJIT_C_SIG_LESS_EQUAL:
1364 BR_NZ(GREATER_FLAG);
1365 break;
1366 case SLJIT_C_OVERFLOW:
1367 case SLJIT_C_MUL_OVERFLOW:
1368 BR_Z(OVERFLOW_FLAG);
1369 break;
1370 case SLJIT_C_NOT_OVERFLOW:
1371 case SLJIT_C_MUL_NOT_OVERFLOW:
1372 BR_NZ(OVERFLOW_FLAG);
1373 break;
1374 case SLJIT_C_FLOAT_NAN:
1375 BR_F();
1376 break;
1377 case SLJIT_C_FLOAT_NOT_NAN:
1378 BR_T();
1379 break;
1380 default:
1381 /* Not conditional branch. */
1382 inst = 0;
1383 break;
1384 }
1385
1386 jump->flags |= flags;
1387 if (compiler->delay_slot == MOVABLE_INS || (compiler->delay_slot != UNMOVABLE_INS && compiler->delay_slot != delay_check))
1388 jump->flags |= IS_MOVABLE;
1389
1390 if (inst)
1391 PTR_FAIL_IF(push_inst(compiler, inst, UNMOVABLE_INS));
1392
1393 PTR_FAIL_IF(emit_const(compiler, TMP_REG2, 0));
1394 if (type <= SLJIT_JUMP) {
1395 PTR_FAIL_IF(push_inst(compiler, JR | S(TMP_REG2), UNMOVABLE_INS));
1396 jump->addr = compiler->size;
1397 PTR_FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
1398 } else {
1399 /* Cannot be optimized out. */
1400 SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25 && PIC_ADDR_REG == TMP_REG2);
1401 jump->flags |= IS_JAL;
1402 PTR_FAIL_IF(push_inst(compiler, JALR | S(TMP_REG2) | DA(RETURN_ADDR_REG), UNMOVABLE_INS));
1403 jump->addr = compiler->size;
1404 PTR_FAIL_IF(push_inst(compiler, ADDU_W | S(SLJIT_TEMPORARY_REG1) | TA(0) | DA(4), UNMOVABLE_INS));
1405 }
1406 return jump;
1407 }
1408
1409 #define RESOLVE_IMM1() \
1410 if (src1 & SLJIT_IMM) { \
1411 if (src1w) { \
1412 PTR_FAIL_IF(load_immediate(compiler, DR(TMP_REG1), src1w)); \
1413 src1 = TMP_REG1; \
1414 } \
1415 else \
1416 src1 = 0; \
1417 }
1418
1419 #define RESOLVE_IMM2() \
1420 if (src2 & SLJIT_IMM) { \
1421 if (src2w) { \
1422 PTR_FAIL_IF(load_immediate(compiler, DR(TMP_REG2), src2w)); \
1423 src2 = TMP_REG2; \
1424 } \
1425 else \
1426 src2 = 0; \
1427 }
1428
1429 struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, int type,
1430 int src1, sljit_w src1w,
1431 int src2, sljit_w src2w)
1432 {
1433 struct sljit_jump *jump;
1434 int flags;
1435 sljit_ins inst;
1436
1437 CHECK_ERROR_PTR();
1438 check_sljit_emit_cmp(compiler, type, src1, src1w, src2, src2w);
1439
1440 compiler->cache_arg = 0;
1441 compiler->cache_argw = 0;
1442 flags = ((type & SLJIT_INT_OP) ? INT_DATA : WORD_DATA) | LOAD_DATA;
1443 if (src1 & SLJIT_MEM) {
1444 if (getput_arg_fast(compiler, flags, DR(TMP_REG1), src1, src1w))
1445 PTR_FAIL_IF(compiler->error);
1446 else
1447 PTR_FAIL_IF(getput_arg(compiler, flags, DR(TMP_REG1), src1, src1w, src2, src2w));
1448 src1 = TMP_REG1;
1449 }
1450 if (src2 & SLJIT_MEM) {
1451 if (getput_arg_fast(compiler, flags, DR(TMP_REG2), src2, src2w))
1452 PTR_FAIL_IF(compiler->error);
1453 else
1454 PTR_FAIL_IF(getput_arg(compiler, flags, DR(TMP_REG2), src2, src2w, 0, 0));
1455 src2 = TMP_REG2;
1456 }
1457
1458 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1459 PTR_FAIL_IF(!jump);
1460 set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
1461 type &= 0xff;
1462
1463 if (type <= SLJIT_C_NOT_EQUAL) {
1464 RESOLVE_IMM1();
1465 RESOLVE_IMM2();
1466 jump->flags |= IS_BIT26_COND;
1467 if (compiler->delay_slot == MOVABLE_INS || (compiler->delay_slot != UNMOVABLE_INS && compiler->delay_slot != DR(src1) && compiler->delay_slot != DR(src2)))
1468 jump->flags |= IS_MOVABLE;
1469 PTR_FAIL_IF(push_inst(compiler, (type == SLJIT_C_EQUAL ? BNE : BEQ) | S(src1) | T(src2) | JUMP_LENGTH, UNMOVABLE_INS));
1470 }
1471 else if (type >= SLJIT_C_SIG_LESS && (((src1 & SLJIT_IMM) && (src1w == 0)) || ((src2 & SLJIT_IMM) && (src2w == 0)))) {
1472 inst = NOP;
1473 if ((src1 & SLJIT_IMM) && (src1w == 0)) {
1474 RESOLVE_IMM2();
1475 switch (type) {
1476 case SLJIT_C_SIG_LESS:
1477 inst = BLEZ;
1478 jump->flags |= IS_BIT26_COND;
1479 break;
1480 case SLJIT_C_SIG_GREATER_EQUAL:
1481 inst = BGTZ;
1482 jump->flags |= IS_BIT26_COND;
1483 break;
1484 case SLJIT_C_SIG_GREATER:
1485 inst = BGEZ;
1486 jump->flags |= IS_BIT16_COND;
1487 break;
1488 case SLJIT_C_SIG_LESS_EQUAL:
1489 inst = BLTZ;
1490 jump->flags |= IS_BIT16_COND;
1491 break;
1492 }
1493 src1 = src2;
1494 }
1495 else {
1496 RESOLVE_IMM1();
1497 switch (type) {
1498 case SLJIT_C_SIG_LESS:
1499 inst = BGEZ;
1500 jump->flags |= IS_BIT16_COND;
1501 break;
1502 case SLJIT_C_SIG_GREATER_EQUAL:
1503 inst = BLTZ;
1504 jump->flags |= IS_BIT16_COND;
1505 break;
1506 case SLJIT_C_SIG_GREATER:
1507 inst = BLEZ;
1508 jump->flags |= IS_BIT26_COND;
1509 break;
1510 case SLJIT_C_SIG_LESS_EQUAL:
1511 inst = BGTZ;
1512 jump->flags |= IS_BIT26_COND;
1513 break;
1514 }
1515 }
1516 PTR_FAIL_IF(push_inst(compiler, inst | S(src1) | JUMP_LENGTH, UNMOVABLE_INS));
1517 }
1518 else {
1519 if (type == SLJIT_C_LESS || type == SLJIT_C_GREATER_EQUAL || type == SLJIT_C_SIG_LESS || type == SLJIT_C_SIG_GREATER_EQUAL) {
1520 RESOLVE_IMM1();
1521 if ((src2 & SLJIT_IMM) && src2w <= SIMM_MAX && src2w >= SIMM_MIN)
1522 PTR_FAIL_IF(push_inst(compiler, (type <= SLJIT_C_LESS_EQUAL ? SLTIU : SLTI) | S(src1) | T(TMP_REG1) | IMM(src2w), DR(TMP_REG1)));
1523 else {
1524 RESOLVE_IMM2();
1525 PTR_FAIL_IF(push_inst(compiler, (type <= SLJIT_C_LESS_EQUAL ? SLTU : SLT) | S(src1) | T(src2) | D(TMP_REG1), DR(TMP_REG1)));
1526 }
1527 type = (type == SLJIT_C_LESS || type == SLJIT_C_SIG_LESS) ? SLJIT_C_NOT_EQUAL : SLJIT_C_EQUAL;
1528 }
1529 else {
1530 RESOLVE_IMM2();
1531 if ((src1 & SLJIT_IMM) && src1w <= SIMM_MAX && src1w >= SIMM_MIN)
1532 PTR_FAIL_IF(push_inst(compiler, (type <= SLJIT_C_LESS_EQUAL ? SLTIU : SLTI) | S(src2) | T(TMP_REG1) | IMM(src1w), DR(TMP_REG1)));
1533 else {
1534 RESOLVE_IMM1();
1535 PTR_FAIL_IF(push_inst(compiler, (type <= SLJIT_C_LESS_EQUAL ? SLTU : SLT) | S(src2) | T(src1) | D(TMP_REG1), DR(TMP_REG1)));
1536 }
1537 type = (type == SLJIT_C_GREATER || type == SLJIT_C_SIG_GREATER) ? SLJIT_C_NOT_EQUAL : SLJIT_C_EQUAL;
1538 }
1539
1540 jump->flags |= IS_BIT26_COND;
1541 PTR_FAIL_IF(push_inst(compiler, (type == SLJIT_C_EQUAL ? BNE : BEQ) | S(TMP_REG1) | TA(0) | JUMP_LENGTH, UNMOVABLE_INS));
1542 }
1543
1544 PTR_FAIL_IF(emit_const(compiler, TMP_REG2, 0));
1545 PTR_FAIL_IF(push_inst(compiler, JR | S(TMP_REG2), UNMOVABLE_INS));
1546 jump->addr = compiler->size;
1547 PTR_FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
1548 return jump;
1549 }
1550
1551 #undef RESOLVE_IMM1
1552 #undef RESOLVE_IMM2
1553
1554 #undef JUMP_LENGTH
1555 #undef BR_Z
1556 #undef BR_NZ
1557 #undef BR_T
1558 #undef BR_F
1559
1560 int sljit_emit_ijump(struct sljit_compiler *compiler, int type, int src, sljit_w srcw)
1561 {
1562 int src_r = TMP_REG2;
1563 struct sljit_jump *jump = NULL;
1564
1565 CHECK_ERROR();
1566 check_sljit_emit_ijump(compiler, type, src, srcw);
1567
1568 if (src >= SLJIT_TEMPORARY_REG1 && src <= SLJIT_NO_REGISTERS) {
1569 if (DR(src) != 4)
1570 src_r = src;
1571 else
1572 FAIL_IF(push_inst(compiler, ADDU_W | S(src) | TA(0) | D(TMP_REG2), DR(TMP_REG2)));
1573 }
1574
1575 if (type >= SLJIT_CALL0) {
1576 SLJIT_ASSERT(DR(PIC_ADDR_REG) == 25 && PIC_ADDR_REG == TMP_REG2);
1577 if (src & (SLJIT_IMM | SLJIT_MEM)) {
1578 if (src & SLJIT_IMM)
1579 FAIL_IF(load_immediate(compiler, DR(PIC_ADDR_REG), srcw));
1580 else {
1581 SLJIT_ASSERT(src_r == TMP_REG2 && (src & SLJIT_MEM));
1582 FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, TMP_REG2, 0, TMP_REG1, 0, src, srcw));
1583 }
1584 FAIL_IF(push_inst(compiler, JALR | S(PIC_ADDR_REG) | DA(RETURN_ADDR_REG), UNMOVABLE_INS));
1585 /* We need an extra instruction in any case. */
1586 return push_inst(compiler, ADDU_W | S(SLJIT_TEMPORARY_REG1) | TA(0) | DA(4), UNMOVABLE_INS);
1587 }
1588
1589 /* Register input. */
1590 if (type >= SLJIT_CALL1)
1591 FAIL_IF(push_inst(compiler, ADDU_W | S(SLJIT_TEMPORARY_REG1) | TA(0) | DA(4), 4));
1592 FAIL_IF(push_inst(compiler, JALR | S(src_r) | DA(RETURN_ADDR_REG), UNMOVABLE_INS));
1593 return push_inst(compiler, ADDU_W | S(src_r) | TA(0) | D(PIC_ADDR_REG), UNMOVABLE_INS);
1594 }
1595
1596 if (src & SLJIT_IMM) {
1597 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1598 FAIL_IF(!jump);
1599 set_jump(jump, compiler, JUMP_ADDR);
1600 jump->u.target = srcw;
1601
1602 if (compiler->delay_slot != UNMOVABLE_INS)
1603 jump->flags |= IS_MOVABLE;
1604
1605 FAIL_IF(emit_const(compiler, TMP_REG2, 0));
1606 }
1607 else if (src & SLJIT_MEM)
1608 FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, TMP_REG2, 0, TMP_REG1, 0, src, srcw));
1609
1610 FAIL_IF(push_inst(compiler, JR | S(src_r), UNMOVABLE_INS));
1611 if (jump)
1612 jump->addr = compiler->size;
1613 FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
1614 return SLJIT_SUCCESS;
1615 }
1616
1617 int sljit_emit_cond_value(struct sljit_compiler *compiler, int op, int dst, sljit_w dstw, int type)
1618 {
1619 int sugg_dst_ar, dst_ar;
1620
1621 CHECK_ERROR();
1622 check_sljit_emit_cond_value(compiler, op, dst, dstw, type);
1623
1624 if (dst == SLJIT_UNUSED)
1625 return SLJIT_SUCCESS;
1626
1627 sugg_dst_ar = DR((op == SLJIT_MOV && dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) ? dst : TMP_REG2);
1628
1629 switch (type) {
1630 case SLJIT_C_EQUAL:
1631 case SLJIT_C_NOT_EQUAL:
1632 FAIL_IF(push_inst(compiler, SLTIU | SA(EQUAL_FLAG) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
1633 dst_ar = sugg_dst_ar;
1634 break;
1635 case SLJIT_C_LESS:
1636 case SLJIT_C_GREATER_EQUAL:
1637 case SLJIT_C_FLOAT_LESS:
1638 case SLJIT_C_FLOAT_GREATER_EQUAL:
1639 dst_ar = ULESS_FLAG;
1640 break;
1641 case SLJIT_C_GREATER:
1642 case SLJIT_C_LESS_EQUAL:
1643 case SLJIT_C_FLOAT_GREATER:
1644 case SLJIT_C_FLOAT_LESS_EQUAL:
1645 dst_ar = UGREATER_FLAG;
1646 break;
1647 case SLJIT_C_SIG_LESS:
1648 case SLJIT_C_SIG_GREATER_EQUAL:
1649 dst_ar = LESS_FLAG;
1650 break;
1651 case SLJIT_C_SIG_GREATER:
1652 case SLJIT_C_SIG_LESS_EQUAL:
1653 dst_ar = GREATER_FLAG;
1654 break;
1655 case SLJIT_C_OVERFLOW:
1656 case SLJIT_C_NOT_OVERFLOW:
1657 dst_ar = OVERFLOW_FLAG;
1658 break;
1659 case SLJIT_C_MUL_OVERFLOW:
1660 case SLJIT_C_MUL_NOT_OVERFLOW:
1661 FAIL_IF(push_inst(compiler, SLTIU | SA(OVERFLOW_FLAG) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
1662 dst_ar = sugg_dst_ar;
1663 type ^= 0x1; /* Flip type bit for the XORI below. */
1664 break;
1665 case SLJIT_C_FLOAT_EQUAL:
1666 case SLJIT_C_FLOAT_NOT_EQUAL:
1667 dst_ar = EQUAL_FLAG;
1668 break;
1669
1670 case SLJIT_C_FLOAT_NAN:
1671 case SLJIT_C_FLOAT_NOT_NAN:
1672 FAIL_IF(push_inst(compiler, CFC1 | TA(sugg_dst_ar) | DA(FCSR_REG), sugg_dst_ar));
1673 FAIL_IF(push_inst(compiler, SRL | TA(sugg_dst_ar) | DA(sugg_dst_ar) | SH_IMM(23), sugg_dst_ar));
1674 FAIL_IF(push_inst(compiler, ANDI | SA(sugg_dst_ar) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
1675 dst_ar = sugg_dst_ar;
1676 break;
1677
1678 default:
1679 SLJIT_ASSERT_STOP();
1680 dst_ar = sugg_dst_ar;
1681 break;
1682 }
1683
1684 if (type & 0x1) {
1685 FAIL_IF(push_inst(compiler, XORI | SA(dst_ar) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
1686 dst_ar = sugg_dst_ar;
1687 }
1688
1689 if (GET_OPCODE(op) == SLJIT_OR) {
1690 if (DR(TMP_REG2) != dst_ar)
1691 FAIL_IF(push_inst(compiler, ADDU_W | SA(dst_ar) | TA(0) | D(TMP_REG2), DR(TMP_REG2)));
1692 return emit_op(compiler, op, CUMULATIVE_OP | LOGICAL_OP | IMM_OP, dst, dstw, dst, dstw, TMP_REG2, 0);
1693 }
1694
1695 if (dst & SLJIT_MEM)
1696 return emit_op_mem(compiler, WORD_DATA, dst_ar, dst, dstw);
1697
1698 if (sugg_dst_ar != dst_ar)
1699 return push_inst(compiler, ADDU_W | SA(dst_ar) | TA(0) | DA(sugg_dst_ar), sugg_dst_ar);
1700 return SLJIT_SUCCESS;
1701 }
1702
1703 struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, int dst, sljit_w dstw, sljit_w init_value)
1704 {
1705 struct sljit_const *const_;
1706 int reg;
1707
1708 CHECK_ERROR_PTR();
1709 check_sljit_emit_const(compiler, dst, dstw, init_value);
1710
1711 const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
1712 PTR_FAIL_IF(!const_);
1713 set_const(const_, compiler);
1714
1715 reg = (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) ? dst : TMP_REG2;
1716
1717 PTR_FAIL_IF(emit_const(compiler, reg, init_value));
1718
1719 if (dst & SLJIT_MEM)
1720 PTR_FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, TMP_REG2, 0));
1721 return const_;
1722 }

  ViewVC Help
Powered by ViewVC 1.1.5