1 |
/*
|
2 |
* Stack-less Just-In-Time compiler
|
3 |
*
|
4 |
* Copyright 2009-2010 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
|
5 |
*
|
6 |
* Redistribution and use in source and binary forms, with or without modification, are
|
7 |
* permitted provided that the following conditions are met:
|
8 |
*
|
9 |
* 1. Redistributions of source code must retain the above copyright notice, this list of
|
10 |
* conditions and the following disclaimer.
|
11 |
*
|
12 |
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
|
13 |
* of conditions and the following disclaimer in the documentation and/or other materials
|
14 |
* provided with the distribution.
|
15 |
*
|
16 |
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
|
17 |
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
18 |
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
|
19 |
* SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
20 |
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
21 |
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
22 |
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
23 |
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
24 |
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
25 |
*/
|
26 |
|
27 |
/* x86 32-bit arch dependent functions. */
|
28 |
|
29 |
static int emit_do_imm(struct sljit_compiler *compiler, sljit_ub opcode, sljit_w imm)
|
30 |
{
|
31 |
sljit_ub *buf;
|
32 |
|
33 |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 1 + sizeof(sljit_w));
|
34 |
FAIL_IF(!buf);
|
35 |
INC_SIZE(1 + sizeof(sljit_w));
|
36 |
*buf++ = opcode;
|
37 |
*(sljit_w*)buf = imm;
|
38 |
return SLJIT_SUCCESS;
|
39 |
}
|
40 |
|
41 |
static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, int type)
|
42 |
{
|
43 |
if (type == SLJIT_JUMP) {
|
44 |
*code_ptr++ = 0xe9;
|
45 |
jump->addr++;
|
46 |
}
|
47 |
else if (type >= SLJIT_FAST_CALL) {
|
48 |
*code_ptr++ = 0xe8;
|
49 |
jump->addr++;
|
50 |
}
|
51 |
else {
|
52 |
*code_ptr++ = 0x0f;
|
53 |
*code_ptr++ = get_jump_code(type);
|
54 |
jump->addr += 2;
|
55 |
}
|
56 |
|
57 |
if (jump->flags & JUMP_LABEL)
|
58 |
jump->flags |= PATCH_MW;
|
59 |
else
|
60 |
*(sljit_w*)code_ptr = jump->u.target - (jump->addr + 4);
|
61 |
code_ptr += 4;
|
62 |
|
63 |
return code_ptr;
|
64 |
}
|
65 |
|
66 |
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_enter(struct sljit_compiler *compiler, int args, int temporaries, int generals, int local_size)
|
67 |
{
|
68 |
int size;
|
69 |
sljit_ub *buf;
|
70 |
|
71 |
CHECK_ERROR();
|
72 |
check_sljit_emit_enter(compiler, args, temporaries, generals, local_size);
|
73 |
|
74 |
compiler->temporaries = temporaries;
|
75 |
compiler->generals = generals;
|
76 |
compiler->args = args;
|
77 |
compiler->flags_saved = 0;
|
78 |
|
79 |
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
|
80 |
size = 1 + (generals <= 3 ? generals : 3) + (args > 0 ? (args * 2) : 0) + (args > 2 ? 2 : 0);
|
81 |
#else
|
82 |
size = 1 + (generals <= 3 ? generals : 3) + (args > 0 ? (2 + args * 3) : 0);
|
83 |
#endif
|
84 |
buf = (sljit_ub*)ensure_buf(compiler, 1 + size);
|
85 |
FAIL_IF(!buf);
|
86 |
|
87 |
INC_SIZE(size);
|
88 |
PUSH_REG(reg_map[TMP_REGISTER]);
|
89 |
#if !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
|
90 |
if (args > 0) {
|
91 |
*buf++ = 0x8b;
|
92 |
*buf++ = 0xc4 | (reg_map[TMP_REGISTER] << 3);
|
93 |
}
|
94 |
#endif
|
95 |
if (generals > 2)
|
96 |
PUSH_REG(reg_map[SLJIT_GENERAL_REG3]);
|
97 |
if (generals > 1)
|
98 |
PUSH_REG(reg_map[SLJIT_GENERAL_REG2]);
|
99 |
if (generals > 0)
|
100 |
PUSH_REG(reg_map[SLJIT_GENERAL_REG1]);
|
101 |
|
102 |
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
|
103 |
if (args > 0) {
|
104 |
*buf++ = 0x8b;
|
105 |
*buf++ = 0xc0 | (reg_map[SLJIT_GENERAL_REG1] << 3) | reg_map[SLJIT_TEMPORARY_REG3];
|
106 |
}
|
107 |
if (args > 1) {
|
108 |
*buf++ = 0x8b;
|
109 |
*buf++ = 0xc0 | (reg_map[SLJIT_GENERAL_REG2] << 3) | reg_map[SLJIT_TEMPORARY_REG2];
|
110 |
}
|
111 |
if (args > 2) {
|
112 |
*buf++ = 0x8b;
|
113 |
*buf++ = 0x44 | (reg_map[SLJIT_GENERAL_REG3] << 3);
|
114 |
*buf++ = 0x24;
|
115 |
*buf++ = sizeof(sljit_w) * (3 + 2); /* generals >= 3 as well. */
|
116 |
}
|
117 |
#else
|
118 |
if (args > 0) {
|
119 |
*buf++ = 0x8b;
|
120 |
*buf++ = 0x40 | (reg_map[SLJIT_GENERAL_REG1] << 3) | reg_map[TMP_REGISTER];
|
121 |
*buf++ = sizeof(sljit_w) * 2;
|
122 |
}
|
123 |
if (args > 1) {
|
124 |
*buf++ = 0x8b;
|
125 |
*buf++ = 0x40 | (reg_map[SLJIT_GENERAL_REG2] << 3) | reg_map[TMP_REGISTER];
|
126 |
*buf++ = sizeof(sljit_w) * 3;
|
127 |
}
|
128 |
if (args > 2) {
|
129 |
*buf++ = 0x8b;
|
130 |
*buf++ = 0x40 | (reg_map[SLJIT_GENERAL_REG3] << 3) | reg_map[TMP_REGISTER];
|
131 |
*buf++ = sizeof(sljit_w) * 4;
|
132 |
}
|
133 |
#endif
|
134 |
|
135 |
local_size = (local_size + sizeof(sljit_uw) - 1) & ~(sizeof(sljit_uw) - 1);
|
136 |
compiler->temporaries_start = local_size;
|
137 |
if (temporaries > 3)
|
138 |
local_size += (temporaries - 3) * sizeof(sljit_uw);
|
139 |
compiler->generals_start = local_size;
|
140 |
if (generals > 3)
|
141 |
local_size += (generals - 3) * sizeof(sljit_uw);
|
142 |
|
143 |
#ifdef _WIN32
|
144 |
if (local_size > 1024) {
|
145 |
FAIL_IF(emit_do_imm(compiler, 0xb8 + reg_map[SLJIT_TEMPORARY_REG1], local_size));
|
146 |
FAIL_IF(sljit_emit_ijump(compiler, SLJIT_CALL1, SLJIT_IMM, SLJIT_FUNC_OFFSET(sljit_touch_stack)));
|
147 |
}
|
148 |
#endif
|
149 |
|
150 |
compiler->local_size = local_size;
|
151 |
if (local_size > 0)
|
152 |
return emit_non_cum_binary(compiler, 0x2b, 0x29, 0x5 << 3, 0x2d,
|
153 |
SLJIT_LOCALS_REG, 0, SLJIT_LOCALS_REG, 0, SLJIT_IMM, local_size);
|
154 |
|
155 |
/* Mov arguments to general registers. */
|
156 |
return SLJIT_SUCCESS;
|
157 |
}
|
158 |
|
159 |
SLJIT_API_FUNC_ATTRIBUTE void sljit_fake_enter(struct sljit_compiler *compiler, int args, int temporaries, int generals, int local_size)
|
160 |
{
|
161 |
CHECK_ERROR_VOID();
|
162 |
check_sljit_fake_enter(compiler, args, temporaries, generals, local_size);
|
163 |
|
164 |
compiler->temporaries = temporaries;
|
165 |
compiler->generals = generals;
|
166 |
compiler->args = args;
|
167 |
compiler->local_size = (local_size + sizeof(sljit_uw) - 1) & ~(sizeof(sljit_uw) - 1);
|
168 |
compiler->temporaries_start = compiler->local_size;
|
169 |
if (temporaries > 3)
|
170 |
compiler->local_size += (temporaries - 3) * sizeof(sljit_uw);
|
171 |
compiler->generals_start = compiler->local_size;
|
172 |
if (generals > 3)
|
173 |
compiler->local_size += (generals - 3) * sizeof(sljit_uw);
|
174 |
}
|
175 |
|
176 |
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_return(struct sljit_compiler *compiler, int src, sljit_w srcw)
|
177 |
{
|
178 |
int size;
|
179 |
sljit_ub *buf;
|
180 |
|
181 |
CHECK_ERROR();
|
182 |
check_sljit_emit_return(compiler, src, srcw);
|
183 |
SLJIT_ASSERT(compiler->args >= 0);
|
184 |
|
185 |
compiler->flags_saved = 0;
|
186 |
CHECK_EXTRA_REGS(src, srcw, (void)0);
|
187 |
|
188 |
if (src != SLJIT_UNUSED && src != SLJIT_RETURN_REG)
|
189 |
FAIL_IF(emit_mov(compiler, SLJIT_RETURN_REG, 0, src, srcw));
|
190 |
|
191 |
if (compiler->local_size > 0)
|
192 |
FAIL_IF(emit_cum_binary(compiler, 0x03, 0x01, 0x0 << 3, 0x05,
|
193 |
SLJIT_LOCALS_REG, 0, SLJIT_LOCALS_REG, 0, SLJIT_IMM, compiler->local_size));
|
194 |
|
195 |
size = 2 + (compiler->generals <= 3 ? compiler->generals : 3);
|
196 |
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
|
197 |
if (compiler->args > 2)
|
198 |
size += 2;
|
199 |
#else
|
200 |
if (compiler->args > 0)
|
201 |
size += 2;
|
202 |
#endif
|
203 |
buf = (sljit_ub*)ensure_buf(compiler, 1 + size);
|
204 |
FAIL_IF(!buf);
|
205 |
|
206 |
INC_SIZE(size);
|
207 |
|
208 |
if (compiler->generals > 0)
|
209 |
POP_REG(reg_map[SLJIT_GENERAL_REG1]);
|
210 |
if (compiler->generals > 1)
|
211 |
POP_REG(reg_map[SLJIT_GENERAL_REG2]);
|
212 |
if (compiler->generals > 2)
|
213 |
POP_REG(reg_map[SLJIT_GENERAL_REG3]);
|
214 |
POP_REG(reg_map[TMP_REGISTER]);
|
215 |
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
|
216 |
if (compiler->args > 2)
|
217 |
RETN(sizeof(sljit_w));
|
218 |
else
|
219 |
RET();
|
220 |
#else
|
221 |
if (compiler->args > 0)
|
222 |
RETN(compiler->args * sizeof(sljit_w));
|
223 |
else
|
224 |
RET();
|
225 |
#endif
|
226 |
|
227 |
return SLJIT_SUCCESS;
|
228 |
}
|
229 |
|
230 |
/* --------------------------------------------------------------------- */
|
231 |
/* Operators */
|
232 |
/* --------------------------------------------------------------------- */
|
233 |
|
234 |
/* Size contains the flags as well. */
|
235 |
static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, int size,
|
236 |
/* The register or immediate operand. */
|
237 |
int a, sljit_w imma,
|
238 |
/* The general operand (not immediate). */
|
239 |
int b, sljit_w immb)
|
240 |
{
|
241 |
sljit_ub *buf;
|
242 |
sljit_ub *buf_ptr;
|
243 |
int flags = size & ~0xf;
|
244 |
int inst_size;
|
245 |
|
246 |
/* Both cannot be switched on. */
|
247 |
SLJIT_ASSERT((flags & (EX86_BIN_INS | EX86_SHIFT_INS)) != (EX86_BIN_INS | EX86_SHIFT_INS));
|
248 |
/* Size flags not allowed for typed instructions. */
|
249 |
SLJIT_ASSERT(!(flags & (EX86_BIN_INS | EX86_SHIFT_INS)) || (flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) == 0);
|
250 |
/* Both size flags cannot be switched on. */
|
251 |
SLJIT_ASSERT((flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) != (EX86_BYTE_ARG | EX86_HALF_ARG));
|
252 |
#if (defined SLJIT_SSE2 && SLJIT_SSE2)
|
253 |
/* SSE2 and immediate is not possible. */
|
254 |
SLJIT_ASSERT(!(a & SLJIT_IMM) || !(flags & EX86_SSE2));
|
255 |
#endif
|
256 |
|
257 |
size &= 0xf;
|
258 |
inst_size = size;
|
259 |
|
260 |
#if (defined SLJIT_SSE2 && SLJIT_SSE2)
|
261 |
if (flags & EX86_PREF_F2)
|
262 |
inst_size++;
|
263 |
#endif
|
264 |
if (flags & EX86_PREF_66)
|
265 |
inst_size++;
|
266 |
|
267 |
/* Calculate size of b. */
|
268 |
inst_size += 1; /* mod r/m byte. */
|
269 |
if (b & SLJIT_MEM) {
|
270 |
if ((b & 0x0f) == SLJIT_UNUSED)
|
271 |
inst_size += sizeof(sljit_w);
|
272 |
else if (immb != 0 && !(b & 0xf0)) {
|
273 |
/* Immediate operand. */
|
274 |
if (immb <= 127 && immb >= -128)
|
275 |
inst_size += sizeof(sljit_b);
|
276 |
else
|
277 |
inst_size += sizeof(sljit_w);
|
278 |
}
|
279 |
|
280 |
if ((b & 0xf) == SLJIT_LOCALS_REG && !(b & 0xf0))
|
281 |
b |= SLJIT_LOCALS_REG << 4;
|
282 |
|
283 |
if ((b & 0xf0) != SLJIT_UNUSED)
|
284 |
inst_size += 1; /* SIB byte. */
|
285 |
}
|
286 |
|
287 |
/* Calculate size of a. */
|
288 |
if (a & SLJIT_IMM) {
|
289 |
if (flags & EX86_BIN_INS) {
|
290 |
if (imma <= 127 && imma >= -128) {
|
291 |
inst_size += 1;
|
292 |
flags |= EX86_BYTE_ARG;
|
293 |
} else
|
294 |
inst_size += 4;
|
295 |
}
|
296 |
else if (flags & EX86_SHIFT_INS) {
|
297 |
imma &= 0x1f;
|
298 |
if (imma != 1) {
|
299 |
inst_size ++;
|
300 |
flags |= EX86_BYTE_ARG;
|
301 |
}
|
302 |
} else if (flags & EX86_BYTE_ARG)
|
303 |
inst_size++;
|
304 |
else if (flags & EX86_HALF_ARG)
|
305 |
inst_size += sizeof(short);
|
306 |
else
|
307 |
inst_size += sizeof(sljit_w);
|
308 |
}
|
309 |
else
|
310 |
SLJIT_ASSERT(!(flags & EX86_SHIFT_INS) || a == SLJIT_PREF_SHIFT_REG);
|
311 |
|
312 |
buf = (sljit_ub*)ensure_buf(compiler, 1 + inst_size);
|
313 |
PTR_FAIL_IF(!buf);
|
314 |
|
315 |
/* Encoding the byte. */
|
316 |
INC_SIZE(inst_size);
|
317 |
#if (defined SLJIT_SSE2 && SLJIT_SSE2)
|
318 |
if (flags & EX86_PREF_F2)
|
319 |
*buf++ = 0xf2;
|
320 |
#endif
|
321 |
if (flags & EX86_PREF_66)
|
322 |
*buf++ = 0x66;
|
323 |
|
324 |
buf_ptr = buf + size;
|
325 |
|
326 |
/* Encode mod/rm byte. */
|
327 |
if (!(flags & EX86_SHIFT_INS)) {
|
328 |
if ((flags & EX86_BIN_INS) && (a & SLJIT_IMM))
|
329 |
*buf = (flags & EX86_BYTE_ARG) ? 0x83 : 0x81;
|
330 |
|
331 |
if ((a & SLJIT_IMM) || (a == 0))
|
332 |
*buf_ptr = 0;
|
333 |
#if (defined SLJIT_SSE2 && SLJIT_SSE2)
|
334 |
else if (!(flags & EX86_SSE2))
|
335 |
*buf_ptr = reg_map[a] << 3;
|
336 |
else
|
337 |
*buf_ptr = a << 3;
|
338 |
#else
|
339 |
else
|
340 |
*buf_ptr = reg_map[a] << 3;
|
341 |
#endif
|
342 |
}
|
343 |
else {
|
344 |
if (a & SLJIT_IMM) {
|
345 |
if (imma == 1)
|
346 |
*buf = 0xd1;
|
347 |
else
|
348 |
*buf = 0xc1;
|
349 |
} else
|
350 |
*buf = 0xd3;
|
351 |
*buf_ptr = 0;
|
352 |
}
|
353 |
|
354 |
if (!(b & SLJIT_MEM))
|
355 |
#if (defined SLJIT_SSE2 && SLJIT_SSE2)
|
356 |
*buf_ptr++ |= 0xc0 + ((!(flags & EX86_SSE2)) ? reg_map[b] : b);
|
357 |
#else
|
358 |
*buf_ptr++ |= 0xc0 + reg_map[b];
|
359 |
#endif
|
360 |
else if ((b & 0x0f) != SLJIT_UNUSED) {
|
361 |
if ((b & 0xf0) == SLJIT_UNUSED || (b & 0xf0) == (SLJIT_LOCALS_REG << 4)) {
|
362 |
if (immb != 0) {
|
363 |
if (immb <= 127 && immb >= -128)
|
364 |
*buf_ptr |= 0x40;
|
365 |
else
|
366 |
*buf_ptr |= 0x80;
|
367 |
}
|
368 |
|
369 |
if ((b & 0xf0) == SLJIT_UNUSED)
|
370 |
*buf_ptr++ |= reg_map[b & 0x0f];
|
371 |
else {
|
372 |
*buf_ptr++ |= 0x04;
|
373 |
*buf_ptr++ = reg_map[b & 0x0f] | (reg_map[(b >> 4) & 0x0f] << 3);
|
374 |
}
|
375 |
|
376 |
if (immb != 0) {
|
377 |
if (immb <= 127 && immb >= -128)
|
378 |
*buf_ptr++ = immb; /* 8 bit displacement. */
|
379 |
else {
|
380 |
*(sljit_w*)buf_ptr = immb; /* 32 bit displacement. */
|
381 |
buf_ptr += sizeof(sljit_w);
|
382 |
}
|
383 |
}
|
384 |
}
|
385 |
else {
|
386 |
*buf_ptr++ |= 0x04;
|
387 |
*buf_ptr++ = reg_map[b & 0x0f] | (reg_map[(b >> 4) & 0x0f] << 3) | (immb << 6);
|
388 |
}
|
389 |
}
|
390 |
else {
|
391 |
*buf_ptr++ |= 0x05;
|
392 |
*(sljit_w*)buf_ptr = immb; /* 32 bit displacement. */
|
393 |
buf_ptr += sizeof(sljit_w);
|
394 |
}
|
395 |
|
396 |
if (a & SLJIT_IMM) {
|
397 |
if (flags & EX86_BYTE_ARG)
|
398 |
*buf_ptr = imma;
|
399 |
else if (flags & EX86_HALF_ARG)
|
400 |
*(short*)buf_ptr = imma;
|
401 |
else if (!(flags & EX86_SHIFT_INS))
|
402 |
*(sljit_w*)buf_ptr = imma;
|
403 |
}
|
404 |
|
405 |
return !(flags & EX86_SHIFT_INS) ? buf : (buf + 1);
|
406 |
}
|
407 |
|
408 |
/* --------------------------------------------------------------------- */
|
409 |
/* Call / return instructions */
|
410 |
/* --------------------------------------------------------------------- */
|
411 |
|
412 |
static SLJIT_INLINE int call_with_args(struct sljit_compiler *compiler, int type)
|
413 |
{
|
414 |
sljit_ub *buf;
|
415 |
|
416 |
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
|
417 |
buf = (sljit_ub*)ensure_buf(compiler, type >= SLJIT_CALL3 ? 1 + 2 + 1 : 1 + 2);
|
418 |
FAIL_IF(!buf);
|
419 |
INC_SIZE(type >= SLJIT_CALL3 ? 2 + 1 : 2);
|
420 |
|
421 |
if (type >= SLJIT_CALL3)
|
422 |
PUSH_REG(reg_map[SLJIT_TEMPORARY_REG3]);
|
423 |
*buf++ = 0x8b;
|
424 |
*buf++ = 0xc0 | (reg_map[SLJIT_TEMPORARY_REG3] << 3) | reg_map[SLJIT_TEMPORARY_REG1];
|
425 |
#else
|
426 |
buf = (sljit_ub*)ensure_buf(compiler, type - SLJIT_CALL0 + 1);
|
427 |
FAIL_IF(!buf);
|
428 |
INC_SIZE(type - SLJIT_CALL0);
|
429 |
if (type >= SLJIT_CALL3)
|
430 |
PUSH_REG(reg_map[SLJIT_TEMPORARY_REG3]);
|
431 |
if (type >= SLJIT_CALL2)
|
432 |
PUSH_REG(reg_map[SLJIT_TEMPORARY_REG2]);
|
433 |
PUSH_REG(reg_map[SLJIT_TEMPORARY_REG1]);
|
434 |
#endif
|
435 |
return SLJIT_SUCCESS;
|
436 |
}
|
437 |
|
438 |
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fast_enter(struct sljit_compiler *compiler, int dst, sljit_w dstw, int args, int temporaries, int generals, int local_size)
|
439 |
{
|
440 |
sljit_ub *buf;
|
441 |
|
442 |
CHECK_ERROR();
|
443 |
check_sljit_emit_fast_enter(compiler, dst, dstw, args, temporaries, generals, local_size);
|
444 |
|
445 |
compiler->temporaries = temporaries;
|
446 |
compiler->generals = generals;
|
447 |
compiler->args = args;
|
448 |
compiler->local_size = (local_size + sizeof(sljit_uw) - 1) & ~(sizeof(sljit_uw) - 1);
|
449 |
compiler->temporaries_start = compiler->local_size;
|
450 |
if (temporaries > 3)
|
451 |
compiler->local_size += (temporaries - 3) * sizeof(sljit_uw);
|
452 |
compiler->generals_start = compiler->local_size;
|
453 |
if (generals > 3)
|
454 |
compiler->local_size += (generals - 3) * sizeof(sljit_uw);
|
455 |
|
456 |
CHECK_EXTRA_REGS(dst, dstw, (void)0);
|
457 |
|
458 |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) {
|
459 |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 1);
|
460 |
FAIL_IF(!buf);
|
461 |
|
462 |
INC_SIZE(1);
|
463 |
POP_REG(reg_map[dst]);
|
464 |
return SLJIT_SUCCESS;
|
465 |
}
|
466 |
else if (dst & SLJIT_MEM) {
|
467 |
buf = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
|
468 |
FAIL_IF(!buf);
|
469 |
*buf++ = 0x8f;
|
470 |
return SLJIT_SUCCESS;
|
471 |
}
|
472 |
|
473 |
/* For UNUSED dst. Uncommon, but possible. */
|
474 |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 1);
|
475 |
FAIL_IF(!buf);
|
476 |
|
477 |
INC_SIZE(1);
|
478 |
POP_REG(reg_map[TMP_REGISTER]);
|
479 |
return SLJIT_SUCCESS;
|
480 |
}
|
481 |
|
482 |
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fast_return(struct sljit_compiler *compiler, int src, sljit_w srcw)
|
483 |
{
|
484 |
sljit_ub *buf;
|
485 |
|
486 |
CHECK_ERROR();
|
487 |
check_sljit_emit_fast_return(compiler, src, srcw);
|
488 |
|
489 |
CHECK_EXTRA_REGS(src, srcw, (void)0);
|
490 |
|
491 |
if (src >= SLJIT_TEMPORARY_REG1 && src <= SLJIT_NO_REGISTERS) {
|
492 |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 1);
|
493 |
FAIL_IF(!buf);
|
494 |
|
495 |
INC_SIZE(1 + 1);
|
496 |
PUSH_REG(reg_map[src]);
|
497 |
}
|
498 |
else if (src & SLJIT_MEM) {
|
499 |
buf = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
|
500 |
FAIL_IF(!buf);
|
501 |
*buf++ = 0xff;
|
502 |
*buf |= 6 << 3;
|
503 |
|
504 |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 1);
|
505 |
FAIL_IF(!buf);
|
506 |
INC_SIZE(1);
|
507 |
}
|
508 |
else {
|
509 |
/* SLJIT_IMM. */
|
510 |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 5 + 1);
|
511 |
FAIL_IF(!buf);
|
512 |
|
513 |
INC_SIZE(5 + 1);
|
514 |
*buf++ = 0x68;
|
515 |
*(sljit_w*)buf = srcw;
|
516 |
buf += sizeof(sljit_w);
|
517 |
}
|
518 |
|
519 |
RET();
|
520 |
return SLJIT_SUCCESS;
|
521 |
}
|