/[pcre]/code/trunk/sljit/sljitNativeX86_32.c
ViewVC logotype

Contents of /code/trunk/sljit/sljitNativeX86_32.c

Parent Directory Parent Directory | Revision Log Revision Log


Revision 1195 - (show annotations)
Thu Nov 1 15:21:27 2012 UTC (6 years, 9 months ago) by zherczeg
File MIME type: text/plain
File size: 15103 byte(s)
Another huge JIT compiler update.
1 /*
2 * Stack-less Just-In-Time compiler
3 *
4 * Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright notice, this list of
10 * conditions and the following disclaimer.
11 *
12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
13 * of conditions and the following disclaimer in the documentation and/or other materials
14 * provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26
27 /* x86 32-bit arch dependent functions. */
28
29 static sljit_si emit_do_imm(struct sljit_compiler *compiler, sljit_ub opcode, sljit_sw imm)
30 {
31 sljit_ub *buf;
32
33 buf = (sljit_ub*)ensure_buf(compiler, 1 + 1 + sizeof(sljit_sw));
34 FAIL_IF(!buf);
35 INC_SIZE(1 + sizeof(sljit_sw));
36 *buf++ = opcode;
37 *(sljit_sw*)buf = imm;
38 return SLJIT_SUCCESS;
39 }
40
41 static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_si type)
42 {
43 if (type == SLJIT_JUMP) {
44 *code_ptr++ = 0xe9;
45 jump->addr++;
46 }
47 else if (type >= SLJIT_FAST_CALL) {
48 *code_ptr++ = 0xe8;
49 jump->addr++;
50 }
51 else {
52 *code_ptr++ = 0x0f;
53 *code_ptr++ = get_jump_code(type);
54 jump->addr += 2;
55 }
56
57 if (jump->flags & JUMP_LABEL)
58 jump->flags |= PATCH_MW;
59 else
60 *(sljit_sw*)code_ptr = jump->u.target - (jump->addr + 4);
61 code_ptr += 4;
62
63 return code_ptr;
64 }
65
66 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler, sljit_si args, sljit_si temporaries, sljit_si saveds, sljit_si local_size)
67 {
68 sljit_si size;
69 sljit_si locals_offset;
70 sljit_ub *buf;
71
72 CHECK_ERROR();
73 check_sljit_emit_enter(compiler, args, temporaries, saveds, local_size);
74
75 compiler->temporaries = temporaries;
76 compiler->saveds = saveds;
77 compiler->args = args;
78 compiler->flags_saved = 0;
79 #if (defined SLJIT_DEBUG && SLJIT_DEBUG)
80 compiler->logical_local_size = local_size;
81 #endif
82
83 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
84 size = 1 + (saveds <= 3 ? saveds : 3) + (args > 0 ? (args * 2) : 0) + (args > 2 ? 2 : 0);
85 #else
86 size = 1 + (saveds <= 3 ? saveds : 3) + (args > 0 ? (2 + args * 3) : 0);
87 #endif
88 buf = (sljit_ub*)ensure_buf(compiler, 1 + size);
89 FAIL_IF(!buf);
90
91 INC_SIZE(size);
92 PUSH_REG(reg_map[TMP_REGISTER]);
93 #if !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
94 if (args > 0) {
95 *buf++ = 0x8b;
96 *buf++ = 0xc4 | (reg_map[TMP_REGISTER] << 3);
97 }
98 #endif
99 if (saveds > 2)
100 PUSH_REG(reg_map[SLJIT_SAVED_REG3]);
101 if (saveds > 1)
102 PUSH_REG(reg_map[SLJIT_SAVED_REG2]);
103 if (saveds > 0)
104 PUSH_REG(reg_map[SLJIT_SAVED_REG1]);
105
106 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
107 if (args > 0) {
108 *buf++ = 0x8b;
109 *buf++ = 0xc0 | (reg_map[SLJIT_SAVED_REG1] << 3) | reg_map[SLJIT_TEMPORARY_REG3];
110 }
111 if (args > 1) {
112 *buf++ = 0x8b;
113 *buf++ = 0xc0 | (reg_map[SLJIT_SAVED_REG2] << 3) | reg_map[SLJIT_TEMPORARY_REG2];
114 }
115 if (args > 2) {
116 *buf++ = 0x8b;
117 *buf++ = 0x44 | (reg_map[SLJIT_SAVED_REG3] << 3);
118 *buf++ = 0x24;
119 *buf++ = sizeof(sljit_sw) * (3 + 2); /* saveds >= 3 as well. */
120 }
121 #else
122 if (args > 0) {
123 *buf++ = 0x8b;
124 *buf++ = 0x40 | (reg_map[SLJIT_SAVED_REG1] << 3) | reg_map[TMP_REGISTER];
125 *buf++ = sizeof(sljit_sw) * 2;
126 }
127 if (args > 1) {
128 *buf++ = 0x8b;
129 *buf++ = 0x40 | (reg_map[SLJIT_SAVED_REG2] << 3) | reg_map[TMP_REGISTER];
130 *buf++ = sizeof(sljit_sw) * 3;
131 }
132 if (args > 2) {
133 *buf++ = 0x8b;
134 *buf++ = 0x40 | (reg_map[SLJIT_SAVED_REG3] << 3) | reg_map[TMP_REGISTER];
135 *buf++ = sizeof(sljit_sw) * 4;
136 }
137 #endif
138
139 locals_offset = 2 * sizeof(sljit_uw);
140 compiler->temporaries_start = locals_offset;
141 if (temporaries > 3)
142 locals_offset += (temporaries - 3) * sizeof(sljit_uw);
143 compiler->saveds_start = locals_offset;
144 if (saveds > 3)
145 locals_offset += (saveds - 3) * sizeof(sljit_uw);
146 compiler->locals_offset = locals_offset;
147 local_size = locals_offset + ((local_size + sizeof(sljit_uw) - 1) & ~(sizeof(sljit_uw) - 1));
148
149 #ifdef _WIN32
150 if (local_size > 1024) {
151 FAIL_IF(emit_do_imm(compiler, 0xb8 + reg_map[SLJIT_TEMPORARY_REG1], local_size));
152 FAIL_IF(sljit_emit_ijump(compiler, SLJIT_CALL1, SLJIT_IMM, SLJIT_FUNC_OFFSET(sljit_grow_stack)));
153 }
154 #endif
155
156 compiler->local_size = local_size;
157 SLJIT_ASSERT(local_size > 0);
158 return emit_non_cum_binary(compiler, 0x2b, 0x29, 0x5 << 3, 0x2d,
159 SLJIT_LOCALS_REG, 0, SLJIT_LOCALS_REG, 0, SLJIT_IMM, local_size);
160
161 return SLJIT_SUCCESS;
162 }
163
164 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_context(struct sljit_compiler *compiler, sljit_si args, sljit_si temporaries, sljit_si saveds, sljit_si local_size)
165 {
166 sljit_si locals_offset;
167
168 CHECK_ERROR_VOID();
169 check_sljit_set_context(compiler, args, temporaries, saveds, local_size);
170
171 compiler->temporaries = temporaries;
172 compiler->saveds = saveds;
173 compiler->args = args;
174 #if (defined SLJIT_DEBUG && SLJIT_DEBUG)
175 compiler->logical_local_size = local_size;
176 #endif
177
178 locals_offset = 2 * sizeof(sljit_uw);
179 compiler->temporaries_start = locals_offset;
180 if (temporaries > 3)
181 locals_offset += (temporaries - 3) * sizeof(sljit_uw);
182 compiler->saveds_start = locals_offset;
183 if (saveds > 3)
184 locals_offset += (saveds - 3) * sizeof(sljit_uw);
185 compiler->locals_offset = locals_offset;
186 compiler->local_size = locals_offset + ((local_size + sizeof(sljit_uw) - 1) & ~(sizeof(sljit_uw) - 1));
187 }
188
189 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
190 {
191 sljit_si size;
192 sljit_ub *buf;
193
194 CHECK_ERROR();
195 check_sljit_emit_return(compiler, op, src, srcw);
196 SLJIT_ASSERT(compiler->args >= 0);
197
198 compiler->flags_saved = 0;
199 FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
200
201 SLJIT_ASSERT(compiler->local_size > 0);
202 FAIL_IF(emit_cum_binary(compiler, 0x03, 0x01, 0x0 << 3, 0x05,
203 SLJIT_LOCALS_REG, 0, SLJIT_LOCALS_REG, 0, SLJIT_IMM, compiler->local_size));
204
205 size = 2 + (compiler->saveds <= 3 ? compiler->saveds : 3);
206 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
207 if (compiler->args > 2)
208 size += 2;
209 #else
210 if (compiler->args > 0)
211 size += 2;
212 #endif
213 buf = (sljit_ub*)ensure_buf(compiler, 1 + size);
214 FAIL_IF(!buf);
215
216 INC_SIZE(size);
217
218 if (compiler->saveds > 0)
219 POP_REG(reg_map[SLJIT_SAVED_REG1]);
220 if (compiler->saveds > 1)
221 POP_REG(reg_map[SLJIT_SAVED_REG2]);
222 if (compiler->saveds > 2)
223 POP_REG(reg_map[SLJIT_SAVED_REG3]);
224 POP_REG(reg_map[TMP_REGISTER]);
225 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
226 if (compiler->args > 2)
227 RETN(sizeof(sljit_sw));
228 else
229 RET();
230 #else
231 if (compiler->args > 0)
232 RETN(compiler->args * sizeof(sljit_sw));
233 else
234 RET();
235 #endif
236
237 return SLJIT_SUCCESS;
238 }
239
240 /* --------------------------------------------------------------------- */
241 /* Operators */
242 /* --------------------------------------------------------------------- */
243
244 /* Size contains the flags as well. */
245 static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si size,
246 /* The register or immediate operand. */
247 sljit_si a, sljit_sw imma,
248 /* The general operand (not immediate). */
249 sljit_si b, sljit_sw immb)
250 {
251 sljit_ub *buf;
252 sljit_ub *buf_ptr;
253 sljit_si flags = size & ~0xf;
254 sljit_si inst_size;
255
256 /* Both cannot be switched on. */
257 SLJIT_ASSERT((flags & (EX86_BIN_INS | EX86_SHIFT_INS)) != (EX86_BIN_INS | EX86_SHIFT_INS));
258 /* Size flags not allowed for typed instructions. */
259 SLJIT_ASSERT(!(flags & (EX86_BIN_INS | EX86_SHIFT_INS)) || (flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) == 0);
260 /* Both size flags cannot be switched on. */
261 SLJIT_ASSERT((flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) != (EX86_BYTE_ARG | EX86_HALF_ARG));
262 #if (defined SLJIT_SSE2 && SLJIT_SSE2)
263 /* SSE2 and immediate is not possible. */
264 SLJIT_ASSERT(!(a & SLJIT_IMM) || !(flags & EX86_SSE2));
265 SLJIT_ASSERT((flags & (EX86_PREF_F2 | EX86_PREF_F3)) != (EX86_PREF_F2 | EX86_PREF_F3)
266 && (flags & (EX86_PREF_F2 | EX86_PREF_66)) != (EX86_PREF_F2 | EX86_PREF_66)
267 && (flags & (EX86_PREF_F3 | EX86_PREF_66)) != (EX86_PREF_F3 | EX86_PREF_66));
268 #endif
269
270 size &= 0xf;
271 inst_size = size;
272
273 #if (defined SLJIT_SSE2 && SLJIT_SSE2)
274 if (flags & (EX86_PREF_F2 | EX86_PREF_F3))
275 inst_size++;
276 #endif
277 if (flags & EX86_PREF_66)
278 inst_size++;
279
280 /* Calculate size of b. */
281 inst_size += 1; /* mod r/m byte. */
282 if (b & SLJIT_MEM) {
283 if ((b & 0x0f) == SLJIT_UNUSED)
284 inst_size += sizeof(sljit_sw);
285 else if (immb != 0 && !(b & 0xf0)) {
286 /* Immediate operand. */
287 if (immb <= 127 && immb >= -128)
288 inst_size += sizeof(sljit_sb);
289 else
290 inst_size += sizeof(sljit_sw);
291 }
292
293 if ((b & 0xf) == SLJIT_LOCALS_REG && !(b & 0xf0))
294 b |= SLJIT_LOCALS_REG << 4;
295
296 if ((b & 0xf0) != SLJIT_UNUSED)
297 inst_size += 1; /* SIB byte. */
298 }
299
300 /* Calculate size of a. */
301 if (a & SLJIT_IMM) {
302 if (flags & EX86_BIN_INS) {
303 if (imma <= 127 && imma >= -128) {
304 inst_size += 1;
305 flags |= EX86_BYTE_ARG;
306 } else
307 inst_size += 4;
308 }
309 else if (flags & EX86_SHIFT_INS) {
310 imma &= 0x1f;
311 if (imma != 1) {
312 inst_size ++;
313 flags |= EX86_BYTE_ARG;
314 }
315 } else if (flags & EX86_BYTE_ARG)
316 inst_size++;
317 else if (flags & EX86_HALF_ARG)
318 inst_size += sizeof(short);
319 else
320 inst_size += sizeof(sljit_sw);
321 }
322 else
323 SLJIT_ASSERT(!(flags & EX86_SHIFT_INS) || a == SLJIT_PREF_SHIFT_REG);
324
325 buf = (sljit_ub*)ensure_buf(compiler, 1 + inst_size);
326 PTR_FAIL_IF(!buf);
327
328 /* Encoding the byte. */
329 INC_SIZE(inst_size);
330 #if (defined SLJIT_SSE2 && SLJIT_SSE2)
331 if (flags & EX86_PREF_F2)
332 *buf++ = 0xf2;
333 if (flags & EX86_PREF_F3)
334 *buf++ = 0xf3;
335 #endif
336 if (flags & EX86_PREF_66)
337 *buf++ = 0x66;
338
339 buf_ptr = buf + size;
340
341 /* Encode mod/rm byte. */
342 if (!(flags & EX86_SHIFT_INS)) {
343 if ((flags & EX86_BIN_INS) && (a & SLJIT_IMM))
344 *buf = (flags & EX86_BYTE_ARG) ? 0x83 : 0x81;
345
346 if ((a & SLJIT_IMM) || (a == 0))
347 *buf_ptr = 0;
348 #if (defined SLJIT_SSE2 && SLJIT_SSE2)
349 else if (!(flags & EX86_SSE2))
350 *buf_ptr = reg_map[a] << 3;
351 else
352 *buf_ptr = a << 3;
353 #else
354 else
355 *buf_ptr = reg_map[a] << 3;
356 #endif
357 }
358 else {
359 if (a & SLJIT_IMM) {
360 if (imma == 1)
361 *buf = 0xd1;
362 else
363 *buf = 0xc1;
364 } else
365 *buf = 0xd3;
366 *buf_ptr = 0;
367 }
368
369 if (!(b & SLJIT_MEM))
370 #if (defined SLJIT_SSE2 && SLJIT_SSE2)
371 *buf_ptr++ |= 0xc0 + ((!(flags & EX86_SSE2)) ? reg_map[b] : b);
372 #else
373 *buf_ptr++ |= 0xc0 + reg_map[b];
374 #endif
375 else if ((b & 0x0f) != SLJIT_UNUSED) {
376 if ((b & 0xf0) == SLJIT_UNUSED || (b & 0xf0) == (SLJIT_LOCALS_REG << 4)) {
377 if (immb != 0) {
378 if (immb <= 127 && immb >= -128)
379 *buf_ptr |= 0x40;
380 else
381 *buf_ptr |= 0x80;
382 }
383
384 if ((b & 0xf0) == SLJIT_UNUSED)
385 *buf_ptr++ |= reg_map[b & 0x0f];
386 else {
387 *buf_ptr++ |= 0x04;
388 *buf_ptr++ = reg_map[b & 0x0f] | (reg_map[(b >> 4) & 0x0f] << 3);
389 }
390
391 if (immb != 0) {
392 if (immb <= 127 && immb >= -128)
393 *buf_ptr++ = immb; /* 8 bit displacement. */
394 else {
395 *(sljit_sw*)buf_ptr = immb; /* 32 bit displacement. */
396 buf_ptr += sizeof(sljit_sw);
397 }
398 }
399 }
400 else {
401 *buf_ptr++ |= 0x04;
402 *buf_ptr++ = reg_map[b & 0x0f] | (reg_map[(b >> 4) & 0x0f] << 3) | (immb << 6);
403 }
404 }
405 else {
406 *buf_ptr++ |= 0x05;
407 *(sljit_sw*)buf_ptr = immb; /* 32 bit displacement. */
408 buf_ptr += sizeof(sljit_sw);
409 }
410
411 if (a & SLJIT_IMM) {
412 if (flags & EX86_BYTE_ARG)
413 *buf_ptr = imma;
414 else if (flags & EX86_HALF_ARG)
415 *(short*)buf_ptr = imma;
416 else if (!(flags & EX86_SHIFT_INS))
417 *(sljit_sw*)buf_ptr = imma;
418 }
419
420 return !(flags & EX86_SHIFT_INS) ? buf : (buf + 1);
421 }
422
423 /* --------------------------------------------------------------------- */
424 /* Call / return instructions */
425 /* --------------------------------------------------------------------- */
426
427 static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, sljit_si type)
428 {
429 sljit_ub *buf;
430
431 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
432 buf = (sljit_ub*)ensure_buf(compiler, type >= SLJIT_CALL3 ? 1 + 2 + 1 : 1 + 2);
433 FAIL_IF(!buf);
434 INC_SIZE(type >= SLJIT_CALL3 ? 2 + 1 : 2);
435
436 if (type >= SLJIT_CALL3)
437 PUSH_REG(reg_map[SLJIT_TEMPORARY_REG3]);
438 *buf++ = 0x8b;
439 *buf++ = 0xc0 | (reg_map[SLJIT_TEMPORARY_REG3] << 3) | reg_map[SLJIT_TEMPORARY_REG1];
440 #else
441 buf = (sljit_ub*)ensure_buf(compiler, type - SLJIT_CALL0 + 1);
442 FAIL_IF(!buf);
443 INC_SIZE(type - SLJIT_CALL0);
444 if (type >= SLJIT_CALL3)
445 PUSH_REG(reg_map[SLJIT_TEMPORARY_REG3]);
446 if (type >= SLJIT_CALL2)
447 PUSH_REG(reg_map[SLJIT_TEMPORARY_REG2]);
448 PUSH_REG(reg_map[SLJIT_TEMPORARY_REG1]);
449 #endif
450 return SLJIT_SUCCESS;
451 }
452
453 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw)
454 {
455 sljit_ub *buf;
456
457 CHECK_ERROR();
458 check_sljit_emit_fast_enter(compiler, dst, dstw);
459 ADJUST_LOCAL_OFFSET(dst, dstw);
460
461 CHECK_EXTRA_REGS(dst, dstw, (void)0);
462
463 if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) {
464 buf = (sljit_ub*)ensure_buf(compiler, 1 + 1);
465 FAIL_IF(!buf);
466
467 INC_SIZE(1);
468 POP_REG(reg_map[dst]);
469 return SLJIT_SUCCESS;
470 }
471 else if (dst & SLJIT_MEM) {
472 buf = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
473 FAIL_IF(!buf);
474 *buf++ = 0x8f;
475 return SLJIT_SUCCESS;
476 }
477
478 /* For UNUSED dst. Uncommon, but possible. */
479 buf = (sljit_ub*)ensure_buf(compiler, 1 + 1);
480 FAIL_IF(!buf);
481
482 INC_SIZE(1);
483 POP_REG(reg_map[TMP_REGISTER]);
484 return SLJIT_SUCCESS;
485 }
486
487 SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_si src, sljit_sw srcw)
488 {
489 sljit_ub *buf;
490
491 CHECK_ERROR();
492 check_sljit_emit_fast_return(compiler, src, srcw);
493 ADJUST_LOCAL_OFFSET(src, srcw);
494
495 CHECK_EXTRA_REGS(src, srcw, (void)0);
496
497 if (src >= SLJIT_TEMPORARY_REG1 && src <= SLJIT_NO_REGISTERS) {
498 buf = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 1);
499 FAIL_IF(!buf);
500
501 INC_SIZE(1 + 1);
502 PUSH_REG(reg_map[src]);
503 }
504 else if (src & SLJIT_MEM) {
505 buf = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
506 FAIL_IF(!buf);
507 *buf++ = 0xff;
508 *buf |= 6 << 3;
509
510 buf = (sljit_ub*)ensure_buf(compiler, 1 + 1);
511 FAIL_IF(!buf);
512 INC_SIZE(1);
513 }
514 else {
515 /* SLJIT_IMM. */
516 buf = (sljit_ub*)ensure_buf(compiler, 1 + 5 + 1);
517 FAIL_IF(!buf);
518
519 INC_SIZE(5 + 1);
520 *buf++ = 0x68;
521 *(sljit_sw*)buf = srcw;
522 buf += sizeof(sljit_sw);
523 }
524
525 RET();
526 return SLJIT_SUCCESS;
527 }

  ViewVC Help
Powered by ViewVC 1.1.5