Mercurial > hg > tinycc
annotate i386/asm.c @ 551:d8b3fa09ca5d
One of the members of "struct Sym" is a token. Rename it from "v" to "token", and change local variables
it's copied into from "v" to "token" as well.
author | Rob Landley <rob@landley.net> |
---|---|
date | Thu, 17 Jan 2008 23:26:25 -0600 |
parents | f304c7e3de8d |
children | 4533aa54ffcf |
rev | line source |
---|---|
217 | 1 /* |
2 * i386 specific functions for TCC assembler | |
3 * | |
4 * Copyright (c) 2001, 2002 Fabrice Bellard | |
5 * | |
499
2b451d2e68ea
Exercise LGPL clause 3 and convert more notices from LGPL to GPLv2. (If you
Rob Landley <rob@landley.net>
parents:
449
diff
changeset
|
6 * Licensed under GPLv2, see file LICENSE in this tarball. |
217 | 7 */ |
8 | |
9 #define MAX_OPERANDS 3 | |
10 | |
11 typedef struct ASMInstr { | |
12 uint16_t sym; | |
13 uint16_t opcode; | |
14 uint16_t instr_type; | |
15 #define OPC_JMP 0x01 /* jmp operand */ | |
16 #define OPC_B 0x02 /* only used zith OPC_WL */ | |
17 #define OPC_WL 0x04 /* accepts w, l or no suffix */ | |
18 #define OPC_BWL (OPC_B | OPC_WL) /* accepts b, w, l or no suffix */ | |
19 #define OPC_REG 0x08 /* register is added to opcode */ | |
20 #define OPC_MODRM 0x10 /* modrm encoding */ | |
21 #define OPC_FWAIT 0x20 /* add fwait opcode */ | |
22 #define OPC_TEST 0x40 /* test opcodes */ | |
23 #define OPC_SHIFT 0x80 /* shift opcodes */ | |
24 #define OPC_D16 0x0100 /* generate data16 prefix */ | |
25 #define OPC_ARITH 0x0200 /* arithmetic opcodes */ | |
26 #define OPC_SHORTJMP 0x0400 /* short jmp operand */ | |
27 #define OPC_FARITH 0x0800 /* FPU arithmetic opcodes */ | |
28 #define OPC_GROUP_SHIFT 13 | |
29 | |
30 /* in order to compress the operand type, we use specific operands and | |
31 we or only with EA */ | |
32 #define OPT_REG8 0 /* warning: value is hardcoded from TOK_ASM_xxx */ | |
33 #define OPT_REG16 1 /* warning: value is hardcoded from TOK_ASM_xxx */ | |
34 #define OPT_REG32 2 /* warning: value is hardcoded from TOK_ASM_xxx */ | |
35 #define OPT_MMX 3 /* warning: value is hardcoded from TOK_ASM_xxx */ | |
36 #define OPT_SSE 4 /* warning: value is hardcoded from TOK_ASM_xxx */ | |
37 #define OPT_CR 5 /* warning: value is hardcoded from TOK_ASM_xxx */ | |
38 #define OPT_TR 6 /* warning: value is hardcoded from TOK_ASM_xxx */ | |
39 #define OPT_DB 7 /* warning: value is hardcoded from TOK_ASM_xxx */ | |
40 #define OPT_SEG 8 | |
41 #define OPT_ST 9 | |
42 #define OPT_IM8 10 | |
43 #define OPT_IM8S 11 | |
44 #define OPT_IM16 12 | |
45 #define OPT_IM32 13 | |
46 #define OPT_EAX 14 /* %al, %ax or %eax register */ | |
47 #define OPT_ST0 15 /* %st(0) register */ | |
48 #define OPT_CL 16 /* %cl register */ | |
49 #define OPT_DX 17 /* %dx register */ | |
50 #define OPT_ADDR 18 /* OP_EA with only offset */ | |
51 #define OPT_INDIR 19 /* *(expr) */ | |
52 | |
53 /* composite types */ | |
54 #define OPT_COMPOSITE_FIRST 20 | |
55 #define OPT_IM 20 /* IM8 | IM16 | IM32 */ | |
56 #define OPT_REG 21 /* REG8 | REG16 | REG32 */ | |
57 #define OPT_REGW 22 /* REG16 | REG32 */ | |
58 #define OPT_IMW 23 /* IM16 | IM32 */ | |
59 | |
60 /* can be ored with any OPT_xxx */ | |
61 #define OPT_EA 0x80 | |
62 | |
63 uint8_t nb_ops; | |
64 uint8_t op_type[MAX_OPERANDS]; /* see OP_xxx */ | |
65 } ASMInstr; | |
66 | |
67 typedef struct Operand { | |
68 uint32_t type; | |
69 #define OP_REG8 (1 << OPT_REG8) | |
70 #define OP_REG16 (1 << OPT_REG16) | |
71 #define OP_REG32 (1 << OPT_REG32) | |
72 #define OP_MMX (1 << OPT_MMX) | |
73 #define OP_SSE (1 << OPT_SSE) | |
74 #define OP_CR (1 << OPT_CR) | |
75 #define OP_TR (1 << OPT_TR) | |
76 #define OP_DB (1 << OPT_DB) | |
77 #define OP_SEG (1 << OPT_SEG) | |
78 #define OP_ST (1 << OPT_ST) | |
79 #define OP_IM8 (1 << OPT_IM8) | |
80 #define OP_IM8S (1 << OPT_IM8S) | |
81 #define OP_IM16 (1 << OPT_IM16) | |
82 #define OP_IM32 (1 << OPT_IM32) | |
83 #define OP_EAX (1 << OPT_EAX) | |
84 #define OP_ST0 (1 << OPT_ST0) | |
85 #define OP_CL (1 << OPT_CL) | |
86 #define OP_DX (1 << OPT_DX) | |
87 #define OP_ADDR (1 << OPT_ADDR) | |
88 #define OP_INDIR (1 << OPT_INDIR) | |
89 | |
90 #define OP_EA 0x40000000 | |
91 #define OP_REG (OP_REG8 | OP_REG16 | OP_REG32) | |
92 #define OP_IM OP_IM32 | |
93 int8_t reg; /* register, -1 if none */ | |
94 int8_t reg2; /* second register, -1 if none */ | |
95 uint8_t shift; | |
96 ExprValue e; | |
97 } Operand; | |
98 | |
99 static const uint8_t reg_to_size[5] = { | |
100 [OP_REG8] = 0, | |
101 [OP_REG16] = 1, | |
102 [OP_REG32] = 2, | |
103 }; | |
104 | |
105 #define WORD_PREFIX_OPCODE 0x66 | |
106 | |
107 #define NB_TEST_OPCODES 30 | |
108 | |
109 static const uint8_t test_bits[NB_TEST_OPCODES] = { | |
110 0x00, /* o */ | |
111 0x01, /* no */ | |
112 0x02, /* b */ | |
113 0x02, /* c */ | |
114 0x02, /* nae */ | |
115 0x03, /* nb */ | |
116 0x03, /* nc */ | |
117 0x03, /* ae */ | |
118 0x04, /* e */ | |
119 0x04, /* z */ | |
120 0x05, /* ne */ | |
121 0x05, /* nz */ | |
122 0x06, /* be */ | |
123 0x06, /* na */ | |
124 0x07, /* nbe */ | |
125 0x07, /* a */ | |
126 0x08, /* s */ | |
127 0x09, /* ns */ | |
128 0x0a, /* p */ | |
129 0x0a, /* pe */ | |
130 0x0b, /* np */ | |
131 0x0b, /* po */ | |
132 0x0c, /* l */ | |
133 0x0c, /* nge */ | |
134 0x0d, /* nl */ | |
135 0x0d, /* ge */ | |
136 0x0e, /* le */ | |
137 0x0e, /* ng */ | |
138 0x0f, /* nle */ | |
139 0x0f, /* g */ | |
140 }; | |
141 | |
385 | 142 static const uint8_t segment_prefixes[] = { |
143 0x26, /* es */ | |
144 0x2e, /* cs */ | |
145 0x36, /* ss */ | |
146 0x3e, /* ds */ | |
147 0x64, /* fs */ | |
148 0x65 /* gs */ | |
149 }; | |
150 | |
217 | 151 static const ASMInstr asm_instrs[] = { |
152 #define ALT(x) x | |
153 #define DEF_ASM_OP0(name, opcode) | |
154 #define DEF_ASM_OP0L(name, opcode, group, instr_type) { TOK_ASM_ ## name, opcode, (instr_type | group << OPC_GROUP_SHIFT), 0 }, | |
155 #define DEF_ASM_OP1(name, opcode, group, instr_type, op0) { TOK_ASM_ ## name, opcode, (instr_type | group << OPC_GROUP_SHIFT), 1, { op0 }}, | |
156 #define DEF_ASM_OP2(name, opcode, group, instr_type, op0, op1) { TOK_ASM_ ## name, opcode, (instr_type | group << OPC_GROUP_SHIFT), 2, { op0, op1 }}, | |
157 #define DEF_ASM_OP3(name, opcode, group, instr_type, op0, op1, op2) { TOK_ASM_ ## name, opcode, (instr_type | group << OPC_GROUP_SHIFT), 3, { op0, op1, op2 }}, | |
510
f304c7e3de8d
Teach tcc.c to handle colon separated paths (semicolon separated on a win32
Rob Landley <rob@landley.net>
parents:
508
diff
changeset
|
158 #include "asm.h" |
217 | 159 |
160 /* last operation */ | |
161 { 0, }, | |
162 }; | |
163 | |
164 static const uint16_t op0_codes[] = { | |
165 #define ALT(x) | |
166 #define DEF_ASM_OP0(x, opcode) opcode, | |
167 #define DEF_ASM_OP0L(name, opcode, group, instr_type) | |
168 #define DEF_ASM_OP1(name, opcode, group, instr_type, op0) | |
169 #define DEF_ASM_OP2(name, opcode, group, instr_type, op0, op1) | |
170 #define DEF_ASM_OP3(name, opcode, group, instr_type, op0, op1, op2) | |
510
f304c7e3de8d
Teach tcc.c to handle colon separated paths (semicolon separated on a win32
Rob Landley <rob@landley.net>
parents:
508
diff
changeset
|
171 #include "asm.h" |
217 | 172 }; |
173 | |
174 static inline int get_reg_shift(TCCState *s1) | |
175 { | |
176 int shift, v; | |
177 | |
178 v = asm_int_expr(s1); | |
179 switch(v) { | |
180 case 1: | |
181 shift = 0; | |
182 break; | |
183 case 2: | |
184 shift = 1; | |
185 break; | |
186 case 4: | |
187 shift = 2; | |
188 break; | |
189 case 8: | |
190 shift = 3; | |
191 break; | |
192 default: | |
193 expect("1, 2, 4 or 8 constant"); | |
194 shift = 0; | |
195 break; | |
196 } | |
197 return shift; | |
198 } | |
199 | |
200 static int asm_parse_reg(void) | |
201 { | |
202 int reg; | |
203 if (tok != '%') | |
204 goto error_32; | |
205 next(); | |
206 if (tok >= TOK_ASM_eax && tok <= TOK_ASM_edi) { | |
207 reg = tok - TOK_ASM_eax; | |
208 next(); | |
209 return reg; | |
210 } else { | |
211 error_32: | |
212 expect("32 bit register"); | |
213 return 0; | |
214 } | |
215 } | |
216 | |
217 static void parse_operand(TCCState *s1, Operand *op) | |
218 { | |
219 ExprValue e; | |
220 int reg, indir; | |
221 const char *p; | |
222 | |
223 indir = 0; | |
224 if (tok == '*') { | |
225 next(); | |
226 indir = OP_INDIR; | |
227 } | |
228 | |
229 if (tok == '%') { | |
230 next(); | |
231 if (tok >= TOK_ASM_al && tok <= TOK_ASM_db7) { | |
232 reg = tok - TOK_ASM_al; | |
233 op->type = 1 << (reg >> 3); /* WARNING: do not change constant order */ | |
234 op->reg = reg & 7; | |
235 if ((op->type & OP_REG) && op->reg == TREG_EAX) | |
236 op->type |= OP_EAX; | |
237 else if (op->type == OP_REG8 && op->reg == TREG_ECX) | |
238 op->type |= OP_CL; | |
239 else if (op->type == OP_REG16 && op->reg == TREG_EDX) | |
240 op->type |= OP_DX; | |
241 } else if (tok >= TOK_ASM_dr0 && tok <= TOK_ASM_dr7) { | |
242 op->type = OP_DB; | |
243 op->reg = tok - TOK_ASM_dr0; | |
244 } else if (tok >= TOK_ASM_es && tok <= TOK_ASM_gs) { | |
245 op->type = OP_SEG; | |
246 op->reg = tok - TOK_ASM_es; | |
247 } else if (tok == TOK_ASM_st) { | |
248 op->type = OP_ST; | |
249 op->reg = 0; | |
250 next(); | |
251 if (tok == '(') { | |
252 next(); | |
253 if (tok != TOK_PPNUM) | |
254 goto reg_error; | |
255 p = tokc.cstr->data; | |
256 reg = p[0] - '0'; | |
257 if ((unsigned)reg >= 8 || p[1] != '\0') | |
258 goto reg_error; | |
259 op->reg = reg; | |
260 next(); | |
261 skip(')'); | |
262 } | |
263 if (op->reg == 0) | |
264 op->type |= OP_ST0; | |
265 goto no_skip; | |
266 } else { | |
267 reg_error: | |
268 error("unknown register"); | |
269 } | |
270 next(); | |
271 no_skip: ; | |
272 } else if (tok == '$') { | |
273 /* constant value */ | |
274 next(); | |
275 asm_expr(s1, &e); | |
276 op->type = OP_IM32; | |
277 op->e.v = e.v; | |
278 op->e.sym = e.sym; | |
279 if (!op->e.sym) { | |
280 if (op->e.v == (uint8_t)op->e.v) | |
281 op->type |= OP_IM8; | |
282 if (op->e.v == (int8_t)op->e.v) | |
283 op->type |= OP_IM8S; | |
284 if (op->e.v == (uint16_t)op->e.v) | |
285 op->type |= OP_IM16; | |
286 } | |
287 } else { | |
288 /* address(reg,reg2,shift) with all variants */ | |
289 op->type = OP_EA; | |
290 op->reg = -1; | |
291 op->reg2 = -1; | |
292 op->shift = 0; | |
293 if (tok != '(') { | |
294 asm_expr(s1, &e); | |
295 op->e.v = e.v; | |
296 op->e.sym = e.sym; | |
297 } else { | |
298 op->e.v = 0; | |
299 op->e.sym = NULL; | |
300 } | |
301 if (tok == '(') { | |
302 next(); | |
303 if (tok != ',') { | |
304 op->reg = asm_parse_reg(); | |
305 } | |
306 if (tok == ',') { | |
307 next(); | |
308 if (tok != ',') { | |
309 op->reg2 = asm_parse_reg(); | |
310 } | |
395 | 311 if (tok == ',') { |
312 next(); | |
313 op->shift = get_reg_shift(s1); | |
314 } | |
217 | 315 } |
316 skip(')'); | |
317 } | |
318 if (op->reg == -1 && op->reg2 == -1) | |
319 op->type |= OP_ADDR; | |
320 } | |
321 op->type |= indir; | |
322 } | |
323 | |
324 /* XXX: unify with C code output ? */ | |
325 static void gen_expr32(ExprValue *pe) | |
326 { | |
327 if (pe->sym) | |
328 greloc(cur_text_section, pe->sym, ind, R_386_32); | |
329 gen_le32(pe->v); | |
330 } | |
331 | |
332 /* XXX: unify with C code output ? */ | |
333 static void gen_disp32(ExprValue *pe) | |
334 { | |
335 Sym *sym; | |
336 sym = pe->sym; | |
337 if (sym) { | |
338 if (sym->r == cur_text_section->sh_num) { | |
339 /* same section: we can output an absolute value. Note | |
340 that the TCC compiler behaves differently here because | |
341 it always outputs a relocation to ease (future) code | |
342 elimination in the linker */ | |
343 gen_le32(pe->v + (long)sym->next - ind - 4); | |
344 } else { | |
345 greloc(cur_text_section, sym, ind, R_386_PC32); | |
346 gen_le32(pe->v - 4); | |
347 } | |
348 } else { | |
349 /* put an empty PC32 relocation */ | |
350 put_elf_reloc(symtab_section, cur_text_section, | |
351 ind, R_386_PC32, 0); | |
352 gen_le32(pe->v - 4); | |
353 } | |
354 } | |
355 | |
356 | |
357 static void gen_le16(int v) | |
358 { | |
359 g(v); | |
360 g(v >> 8); | |
361 } | |
362 | |
363 /* generate the modrm operand */ | |
364 static inline void asm_modrm(int reg, Operand *op) | |
365 { | |
314 | 366 int mod, reg1, reg2, sib_reg1; |
217 | 367 |
368 if (op->type & (OP_REG | OP_MMX | OP_SSE)) { | |
369 g(0xc0 + (reg << 3) + op->reg); | |
370 } else if (op->reg == -1 && op->reg2 == -1) { | |
371 /* displacement only */ | |
372 g(0x05 + (reg << 3)); | |
373 gen_expr32(&op->e); | |
374 } else { | |
314 | 375 sib_reg1 = op->reg; |
217 | 376 /* fist compute displacement encoding */ |
314 | 377 if (sib_reg1 == -1) { |
378 sib_reg1 = 5; | |
379 mod = 0x00; | |
380 } else if (op->e.v == 0 && !op->e.sym && op->reg != 5) { | |
217 | 381 mod = 0x00; |
382 } else if (op->e.v == (int8_t)op->e.v && !op->e.sym) { | |
383 mod = 0x40; | |
384 } else { | |
385 mod = 0x80; | |
386 } | |
387 /* compute if sib byte needed */ | |
388 reg1 = op->reg; | |
389 if (op->reg2 != -1) | |
390 reg1 = 4; | |
391 g(mod + (reg << 3) + reg1); | |
392 if (reg1 == 4) { | |
393 /* add sib byte */ | |
394 reg2 = op->reg2; | |
395 if (reg2 == -1) | |
396 reg2 = 4; /* indicate no index */ | |
314 | 397 g((op->shift << 6) + (reg2 << 3) + sib_reg1); |
217 | 398 } |
399 | |
400 /* add offset */ | |
401 if (mod == 0x40) { | |
402 g(op->e.v); | |
314 | 403 } else if (mod == 0x80 || op->reg == -1) { |
217 | 404 gen_expr32(&op->e); |
405 } | |
406 } | |
407 } | |
408 | |
409 static void asm_opcode(TCCState *s1, int opcode) | |
410 { | |
411 const ASMInstr *pa; | |
385 | 412 int i, modrm_index, reg, v, op1, is_short_jmp, has_seg_prefix; |
217 | 413 int nb_ops, s, ss; |
385 | 414 Operand ops[MAX_OPERANDS], *pop, seg_prefix; |
217 | 415 int op_type[3]; /* decoded op type */ |
416 | |
417 /* get operands */ | |
418 pop = ops; | |
419 nb_ops = 0; | |
385 | 420 has_seg_prefix = 0; |
217 | 421 for(;;) { |
422 if (tok == ';' || tok == TOK_LINEFEED) | |
423 break; | |
424 if (nb_ops >= MAX_OPERANDS) { | |
425 error("incorrect number of operands"); | |
426 } | |
427 parse_operand(s1, pop); | |
385 | 428 if (tok == ':') { |
429 if (pop->type != OP_SEG || has_seg_prefix) { | |
430 error("incorrect prefix"); | |
431 } | |
432 seg_prefix = *pop; | |
433 has_seg_prefix = 1; | |
434 next(); | |
435 parse_operand(s1, pop); | |
436 if (!(pop->type & OP_EA)) { | |
437 error("segment prefix must be followed by memory reference"); | |
438 } | |
439 } | |
217 | 440 pop++; |
441 nb_ops++; | |
442 if (tok != ',') | |
443 break; | |
444 next(); | |
445 } | |
446 | |
447 is_short_jmp = 0; | |
448 s = 0; /* avoid warning */ | |
449 | |
450 /* optimize matching by using a lookup table (no hashing is needed | |
451 !) */ | |
452 for(pa = asm_instrs; pa->sym != 0; pa++) { | |
453 s = 0; | |
454 if (pa->instr_type & OPC_FARITH) { | |
455 v = opcode - pa->sym; | |
456 if (!((unsigned)v < 8 * 6 && (v % 6) == 0)) | |
457 continue; | |
458 } else if (pa->instr_type & OPC_ARITH) { | |
459 if (!(opcode >= pa->sym && opcode < pa->sym + 8 * 4)) | |
460 continue; | |
461 goto compute_size; | |
462 } else if (pa->instr_type & OPC_SHIFT) { | |
463 if (!(opcode >= pa->sym && opcode < pa->sym + 7 * 4)) | |
464 continue; | |
465 goto compute_size; | |
466 } else if (pa->instr_type & OPC_TEST) { | |
467 if (!(opcode >= pa->sym && opcode < pa->sym + NB_TEST_OPCODES)) | |
468 continue; | |
469 } else if (pa->instr_type & OPC_B) { | |
470 if (!(opcode >= pa->sym && opcode <= pa->sym + 3)) | |
471 continue; | |
472 compute_size: | |
473 s = (opcode - pa->sym) & 3; | |
474 } else if (pa->instr_type & OPC_WL) { | |
475 if (!(opcode >= pa->sym && opcode <= pa->sym + 2)) | |
476 continue; | |
477 s = opcode - pa->sym + 1; | |
478 } else { | |
479 if (pa->sym != opcode) | |
480 continue; | |
481 } | |
482 if (pa->nb_ops != nb_ops) | |
483 continue; | |
484 /* now decode and check each operand */ | |
485 for(i = 0; i < nb_ops; i++) { | |
486 int op1, op2; | |
487 op1 = pa->op_type[i]; | |
488 op2 = op1 & 0x1f; | |
489 switch(op2) { | |
490 case OPT_IM: | |
491 v = OP_IM8 | OP_IM16 | OP_IM32; | |
492 break; | |
493 case OPT_REG: | |
494 v = OP_REG8 | OP_REG16 | OP_REG32; | |
495 break; | |
496 case OPT_REGW: | |
497 v = OP_REG16 | OP_REG32; | |
498 break; | |
499 case OPT_IMW: | |
500 v = OP_IM16 | OP_IM32; | |
501 break; | |
502 default: | |
503 v = 1 << op2; | |
504 break; | |
505 } | |
506 if (op1 & OPT_EA) | |
507 v |= OP_EA; | |
508 op_type[i] = v; | |
509 if ((ops[i].type & v) == 0) | |
510 goto next; | |
511 } | |
512 /* all is matching ! */ | |
513 break; | |
514 next: ; | |
515 } | |
516 if (pa->sym == 0) { | |
517 if (opcode >= TOK_ASM_pusha && opcode <= TOK_ASM_emms) { | |
518 int b; | |
519 b = op0_codes[opcode - TOK_ASM_pusha]; | |
520 if (b & 0xff00) | |
521 g(b >> 8); | |
522 g(b); | |
523 return; | |
524 } else { | |
525 error("unknown opcode '%s'", | |
526 get_tok_str(opcode, NULL)); | |
527 } | |
528 } | |
529 /* if the size is unknown, then evaluate it (OPC_B or OPC_WL case) */ | |
530 if (s == 3) { | |
531 for(i = 0; s == 3 && i < nb_ops; i++) { | |
532 if ((ops[i].type & OP_REG) && !(op_type[i] & (OP_CL | OP_DX))) | |
533 s = reg_to_size[ops[i].type & OP_REG]; | |
534 } | |
535 if (s == 3) { | |
314 | 536 if ((opcode == TOK_ASM_push || opcode == TOK_ASM_pop) && |
537 (ops[0].type & (OP_SEG | OP_IM8S | OP_IM32))) | |
538 s = 2; | |
539 else | |
540 error("cannot infer opcode suffix"); | |
217 | 541 } |
542 } | |
543 | |
544 /* generate data16 prefix if needed */ | |
545 ss = s; | |
546 if (s == 1 || (pa->instr_type & OPC_D16)) | |
547 g(WORD_PREFIX_OPCODE); | |
548 else if (s == 2) | |
549 s = 1; | |
550 /* now generates the operation */ | |
551 if (pa->instr_type & OPC_FWAIT) | |
552 g(0x9b); | |
385 | 553 if (has_seg_prefix) |
554 g(segment_prefixes[seg_prefix.reg]); | |
217 | 555 |
556 v = pa->opcode; | |
557 if (v == 0x69 || v == 0x69) { | |
558 /* kludge for imul $im, %reg */ | |
559 nb_ops = 3; | |
560 ops[2] = ops[1]; | |
561 } else if (v == 0xcd && ops[0].e.v == 3 && !ops[0].e.sym) { | |
562 v--; /* int $3 case */ | |
563 nb_ops = 0; | |
564 } else if ((v == 0x06 || v == 0x07)) { | |
565 if (ops[0].reg >= 4) { | |
566 /* push/pop %fs or %gs */ | |
567 v = 0x0fa0 + (v - 0x06) + ((ops[0].reg - 4) << 3); | |
568 } else { | |
569 v += ops[0].reg << 3; | |
570 } | |
571 nb_ops = 0; | |
572 } else if (v <= 0x05) { | |
573 /* arith case */ | |
574 v += ((opcode - TOK_ASM_addb) >> 2) << 3; | |
575 } else if ((pa->instr_type & (OPC_FARITH | OPC_MODRM)) == OPC_FARITH) { | |
576 /* fpu arith case */ | |
577 v += ((opcode - pa->sym) / 6) << 3; | |
578 } | |
579 if (pa->instr_type & OPC_REG) { | |
580 for(i = 0; i < nb_ops; i++) { | |
581 if (op_type[i] & (OP_REG | OP_ST)) { | |
582 v += ops[i].reg; | |
583 break; | |
584 } | |
585 } | |
586 /* mov $im, %reg case */ | |
587 if (pa->opcode == 0xb0 && s >= 1) | |
588 v += 7; | |
589 } | |
590 if (pa->instr_type & OPC_B) | |
591 v += s; | |
592 if (pa->instr_type & OPC_TEST) | |
593 v += test_bits[opcode - pa->sym]; | |
594 if (pa->instr_type & OPC_SHORTJMP) { | |
595 Sym *sym; | |
596 int jmp_disp; | |
597 | |
598 /* see if we can really generate the jump with a byte offset */ | |
599 sym = ops[0].e.sym; | |
600 if (!sym) | |
601 goto no_short_jump; | |
602 if (sym->r != cur_text_section->sh_num) | |
603 goto no_short_jump; | |
604 jmp_disp = ops[0].e.v + (long)sym->next - ind - 2; | |
605 if (jmp_disp == (int8_t)jmp_disp) { | |
606 /* OK to generate jump */ | |
607 is_short_jmp = 1; | |
608 ops[0].e.v = jmp_disp; | |
609 } else { | |
610 no_short_jump: | |
611 if (pa->instr_type & OPC_JMP) { | |
612 /* long jump will be allowed. need to modify the | |
613 opcode slightly */ | |
614 if (v == 0xeb) | |
615 v = 0xe9; | |
616 else | |
617 v += 0x0f10; | |
618 } else { | |
619 error("invalid displacement"); | |
620 } | |
621 } | |
622 } | |
623 op1 = v >> 8; | |
624 if (op1) | |
625 g(op1); | |
626 g(v); | |
627 | |
628 /* search which operand will used for modrm */ | |
629 modrm_index = 0; | |
630 if (pa->instr_type & OPC_SHIFT) { | |
631 reg = (opcode - pa->sym) >> 2; | |
632 if (reg == 6) | |
633 reg = 7; | |
634 } else if (pa->instr_type & OPC_ARITH) { | |
635 reg = (opcode - pa->sym) >> 2; | |
636 } else if (pa->instr_type & OPC_FARITH) { | |
637 reg = (opcode - pa->sym) / 6; | |
638 } else { | |
639 reg = (pa->instr_type >> OPC_GROUP_SHIFT) & 7; | |
640 } | |
641 if (pa->instr_type & OPC_MODRM) { | |
642 /* first look for an ea operand */ | |
643 for(i = 0;i < nb_ops; i++) { | |
644 if (op_type[i] & OP_EA) | |
645 goto modrm_found; | |
646 } | |
647 /* then if not found, a register or indirection (shift instructions) */ | |
648 for(i = 0;i < nb_ops; i++) { | |
649 if (op_type[i] & (OP_REG | OP_MMX | OP_SSE | OP_INDIR)) | |
650 goto modrm_found; | |
651 } | |
652 #ifdef ASM_DEBUG | |
653 error("bad op table"); | |
654 #endif | |
655 modrm_found: | |
656 modrm_index = i; | |
657 /* if a register is used in another operand then it is | |
658 used instead of group */ | |
659 for(i = 0;i < nb_ops; i++) { | |
660 v = op_type[i]; | |
661 if (i != modrm_index && | |
662 (v & (OP_REG | OP_MMX | OP_SSE | OP_CR | OP_TR | OP_DB | OP_SEG))) { | |
663 reg = ops[i].reg; | |
664 break; | |
665 } | |
666 } | |
667 | |
668 asm_modrm(reg, &ops[modrm_index]); | |
669 } | |
670 | |
671 /* emit constants */ | |
672 if (pa->opcode == 0x9a || pa->opcode == 0xea) { | |
673 /* ljmp or lcall kludge */ | |
674 gen_expr32(&ops[1].e); | |
675 if (ops[0].e.sym) | |
676 error("cannot relocate"); | |
677 gen_le16(ops[0].e.v); | |
678 } else { | |
679 for(i = 0;i < nb_ops; i++) { | |
680 v = op_type[i]; | |
681 if (v & (OP_IM8 | OP_IM16 | OP_IM32 | OP_IM8S | OP_ADDR)) { | |
682 /* if multiple sizes are given it means we must look | |
683 at the op size */ | |
684 if (v == (OP_IM8 | OP_IM16 | OP_IM32) || | |
685 v == (OP_IM16 | OP_IM32)) { | |
686 if (ss == 0) | |
687 v = OP_IM8; | |
688 else if (ss == 1) | |
689 v = OP_IM16; | |
690 else | |
691 v = OP_IM32; | |
692 } | |
693 if (v & (OP_IM8 | OP_IM8S)) { | |
694 if (ops[i].e.sym) | |
695 goto error_relocate; | |
696 g(ops[i].e.v); | |
697 } else if (v & OP_IM16) { | |
698 if (ops[i].e.sym) { | |
699 error_relocate: | |
700 error("cannot relocate"); | |
701 } | |
702 gen_le16(ops[i].e.v); | |
703 } else { | |
704 if (pa->instr_type & (OPC_JMP | OPC_SHORTJMP)) { | |
705 if (is_short_jmp) | |
706 g(ops[i].e.v); | |
707 else | |
708 gen_disp32(&ops[i].e); | |
709 } else { | |
710 gen_expr32(&ops[i].e); | |
711 } | |
712 } | |
713 } | |
714 } | |
715 } | |
716 } | |
717 | |
718 #define NB_SAVED_REGS 3 | |
719 #define NB_ASM_REGS 8 | |
720 | |
721 /* return the constraint priority (we allocate first the lowest | |
722 numbered constraints) */ | |
723 static inline int constraint_priority(const char *str) | |
724 { | |
725 int priority, c, pr; | |
726 | |
727 /* we take the lowest priority */ | |
728 priority = 0; | |
729 for(;;) { | |
730 c = *str; | |
731 if (c == '\0') | |
732 break; | |
733 str++; | |
734 switch(c) { | |
243 | 735 case 'A': |
736 pr = 0; | |
737 break; | |
217 | 738 case 'a': |
739 case 'b': | |
740 case 'c': | |
741 case 'd': | |
742 case 'S': | |
743 case 'D': | |
744 pr = 1; | |
745 break; | |
243 | 746 case 'q': |
747 pr = 2; | |
748 break; | |
217 | 749 case 'r': |
243 | 750 pr = 3; |
217 | 751 break; |
752 case 'N': | |
753 case 'M': | |
754 case 'I': | |
755 case 'i': | |
756 case 'm': | |
757 case 'g': | |
243 | 758 pr = 4; |
217 | 759 break; |
760 default: | |
761 error("unknown constraint '%c'", c); | |
762 pr = 0; | |
763 } | |
764 if (pr > priority) | |
765 priority = pr; | |
766 } | |
767 return priority; | |
768 } | |
769 | |
257 | 770 static const char *skip_constraint_modifiers(const char *p) |
771 { | |
772 while (*p == '=' || *p == '&' || *p == '+' || *p == '%') | |
773 p++; | |
774 return p; | |
775 } | |
776 | |
314 | 777 #define REG_OUT_MASK 0x01 |
778 #define REG_IN_MASK 0x02 | |
779 | |
780 #define is_reg_allocated(reg) (regs_allocated[reg] & reg_mask) | |
781 | |
782 static void asm_compute_constraints(ASMOperand *operands, | |
783 int nb_operands, int nb_outputs, | |
784 const uint8_t *clobber_regs, | |
785 int *pout_reg) | |
217 | 786 { |
787 ASMOperand *op; | |
788 int sorted_op[MAX_ASM_OPERANDS]; | |
314 | 789 int i, j, k, p1, p2, tmp, reg, c, reg_mask; |
217 | 790 const char *str; |
314 | 791 uint8_t regs_allocated[NB_ASM_REGS]; |
217 | 792 |
314 | 793 /* init fields */ |
794 for(i=0;i<nb_operands;i++) { | |
795 op = &operands[i]; | |
796 op->input_index = -1; | |
797 op->ref_index = -1; | |
798 op->reg = -1; | |
799 op->is_memory = 0; | |
800 op->is_rw = 0; | |
217 | 801 } |
802 /* compute constraint priority and evaluate references to output | |
803 constraints if input constraints */ | |
804 for(i=0;i<nb_operands;i++) { | |
314 | 805 op = &operands[i]; |
217 | 806 str = op->constraint; |
257 | 807 str = skip_constraint_modifiers(str); |
314 | 808 if (isnum(*str) || *str == '[') { |
217 | 809 /* this is a reference to another constraint */ |
314 | 810 k = find_constraint(operands, nb_operands, str, NULL); |
811 if ((unsigned)k >= i || i < nb_outputs) | |
217 | 812 error("invalid reference in constraint %d ('%s')", |
314 | 813 i, str); |
217 | 814 op->ref_index = k; |
314 | 815 if (operands[k].input_index >= 0) |
816 error("cannot reference twice the same operand"); | |
817 operands[k].input_index = i; | |
818 op->priority = 5; | |
819 } else { | |
820 op->priority = constraint_priority(str); | |
217 | 821 } |
822 } | |
823 | |
824 /* sort operands according to their priority */ | |
825 for(i=0;i<nb_operands;i++) | |
314 | 826 sorted_op[i] = i; |
217 | 827 for(i=0;i<nb_operands - 1;i++) { |
828 for(j=i+1;j<nb_operands;j++) { | |
829 p1 = operands[sorted_op[i]].priority; | |
830 p2 = operands[sorted_op[j]].priority; | |
831 if (p2 < p1) { | |
832 tmp = sorted_op[i]; | |
833 sorted_op[i] = sorted_op[j]; | |
834 sorted_op[j] = tmp; | |
835 } | |
836 } | |
837 } | |
838 | |
314 | 839 for(i = 0;i < NB_ASM_REGS; i++) { |
840 if (clobber_regs[i]) | |
841 regs_allocated[i] = REG_IN_MASK | REG_OUT_MASK; | |
842 else | |
843 regs_allocated[i] = 0; | |
844 } | |
845 /* esp cannot be used */ | |
846 regs_allocated[4] = REG_IN_MASK | REG_OUT_MASK; | |
847 /* ebp cannot be used yet */ | |
848 regs_allocated[5] = REG_IN_MASK | REG_OUT_MASK; | |
849 | |
217 | 850 /* allocate registers and generate corresponding asm moves */ |
851 for(i=0;i<nb_operands;i++) { | |
852 j = sorted_op[i]; | |
853 op = &operands[j]; | |
854 str = op->constraint; | |
314 | 855 /* no need to allocate references */ |
856 if (op->ref_index >= 0) | |
857 continue; | |
858 /* select if register is used for output, input or both */ | |
859 if (op->input_index >= 0) { | |
860 reg_mask = REG_IN_MASK | REG_OUT_MASK; | |
861 } else if (j < nb_outputs) { | |
862 reg_mask = REG_OUT_MASK; | |
863 } else { | |
864 reg_mask = REG_IN_MASK; | |
217 | 865 } |
866 try_next: | |
867 c = *str++; | |
868 switch(c) { | |
314 | 869 case '=': |
870 goto try_next; | |
871 case '+': | |
872 op->is_rw = 1; | |
873 /* FALL THRU */ | |
874 case '&': | |
875 if (j >= nb_outputs) | |
876 error("'%c' modifier can only be applied to outputs", c); | |
877 reg_mask = REG_IN_MASK | REG_OUT_MASK; | |
878 goto try_next; | |
243 | 879 case 'A': |
880 /* allocate both eax and edx */ | |
314 | 881 if (is_reg_allocated(TREG_EAX) || |
882 is_reg_allocated(TREG_EDX)) | |
243 | 883 goto try_next; |
884 op->is_llong = 1; | |
885 op->reg = TREG_EAX; | |
314 | 886 regs_allocated[TREG_EAX] |= reg_mask; |
887 regs_allocated[TREG_EDX] |= reg_mask; | |
243 | 888 break; |
217 | 889 case 'a': |
890 reg = TREG_EAX; | |
891 goto alloc_reg; | |
892 case 'b': | |
893 reg = 3; | |
894 goto alloc_reg; | |
895 case 'c': | |
896 reg = TREG_ECX; | |
897 goto alloc_reg; | |
898 case 'd': | |
899 reg = TREG_EDX; | |
900 goto alloc_reg; | |
901 case 'S': | |
902 reg = 6; | |
903 goto alloc_reg; | |
904 case 'D': | |
905 reg = 7; | |
906 alloc_reg: | |
314 | 907 if (is_reg_allocated(reg)) |
217 | 908 goto try_next; |
909 goto reg_found; | |
910 case 'q': | |
911 /* eax, ebx, ecx or edx */ | |
912 for(reg = 0; reg < 4; reg++) { | |
314 | 913 if (!is_reg_allocated(reg)) |
217 | 914 goto reg_found; |
915 } | |
916 goto try_next; | |
917 case 'r': | |
918 /* any general register */ | |
919 for(reg = 0; reg < 8; reg++) { | |
314 | 920 if (!is_reg_allocated(reg)) |
217 | 921 goto reg_found; |
922 } | |
923 goto try_next; | |
924 reg_found: | |
925 /* now we can reload in the register */ | |
243 | 926 op->is_llong = 0; |
217 | 927 op->reg = reg; |
314 | 928 regs_allocated[reg] |= reg_mask; |
217 | 929 break; |
930 case 'i': | |
931 if (!((op->vt->r & (VT_VALMASK | VT_LVAL)) == VT_CONST)) | |
932 goto try_next; | |
933 break; | |
934 case 'I': | |
935 case 'N': | |
936 case 'M': | |
937 if (!((op->vt->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST)) | |
938 goto try_next; | |
939 break; | |
940 case 'm': | |
941 case 'g': | |
314 | 942 /* nothing special to do because the operand is already in |
943 memory, except if the pointer itself is stored in a | |
944 memory variable (VT_LLOCAL case) */ | |
217 | 945 /* XXX: fix constant case */ |
314 | 946 /* if it is a reference to a memory zone, it must lie |
947 in a register, so we reserve the register in the | |
948 input registers and a load will be generated | |
949 later */ | |
950 if (j < nb_outputs || c == 'm') { | |
217 | 951 if ((op->vt->r & VT_VALMASK) == VT_LLOCAL) { |
952 /* any general register */ | |
953 for(reg = 0; reg < 8; reg++) { | |
314 | 954 if (!(regs_allocated[reg] & REG_IN_MASK)) |
217 | 955 goto reg_found1; |
956 } | |
957 goto try_next; | |
958 reg_found1: | |
959 /* now we can reload in the register */ | |
314 | 960 regs_allocated[reg] |= REG_IN_MASK; |
217 | 961 op->reg = reg; |
314 | 962 op->is_memory = 1; |
217 | 963 } |
964 } | |
965 break; | |
966 default: | |
967 error("asm constraint %d ('%s') could not be satisfied", | |
968 j, op->constraint); | |
969 break; | |
970 } | |
314 | 971 /* if a reference is present for that operand, we assign it too */ |
972 if (op->input_index >= 0) { | |
973 operands[op->input_index].reg = op->reg; | |
974 operands[op->input_index].is_llong = op->is_llong; | |
975 } | |
217 | 976 } |
314 | 977 |
978 /* compute out_reg. It is used to store outputs registers to memory | |
979 locations references by pointers (VT_LLOCAL case) */ | |
980 *pout_reg = -1; | |
981 for(i=0;i<nb_operands;i++) { | |
982 op = &operands[i]; | |
983 if (op->reg >= 0 && | |
984 (op->vt->r & VT_VALMASK) == VT_LLOCAL && | |
985 !op->is_memory) { | |
986 for(reg = 0; reg < 8; reg++) { | |
987 if (!(regs_allocated[reg] & REG_OUT_MASK)) | |
988 goto reg_found2; | |
989 } | |
990 error("could not find free output register for reloading"); | |
991 reg_found2: | |
992 *pout_reg = reg; | |
993 break; | |
994 } | |
995 } | |
996 | |
217 | 997 /* print sorted constraints */ |
998 #ifdef ASM_DEBUG | |
999 for(i=0;i<nb_operands;i++) { | |
1000 j = sorted_op[i]; | |
1001 op = &operands[j]; | |
1002 printf("%%%d [%s]: \"%s\" r=0x%04x reg=%d\n", | |
1003 j, | |
1004 op->id ? get_tok_str(op->id, NULL) : "", | |
1005 op->constraint, | |
1006 op->vt->r, | |
1007 op->reg); | |
1008 } | |
314 | 1009 if (*pout_reg >= 0) |
1010 printf("out_reg=%d\n", *pout_reg); | |
217 | 1011 #endif |
1012 } | |
1013 | |
1014 static void subst_asm_operand(CString *add_str, | |
1015 SValue *sv, int modifier) | |
1016 { | |
1017 int r, reg, size, val; | |
1018 char buf[64]; | |
1019 | |
1020 r = sv->r; | |
1021 if ((r & VT_VALMASK) == VT_CONST) { | |
1022 if (!(r & VT_LVAL) && modifier != 'c' && modifier != 'n') | |
1023 cstr_ccat(add_str, '$'); | |
1024 if (r & VT_SYM) { | |
551
d8b3fa09ca5d
One of the members of "struct Sym" is a token. Rename it from "v" to "token", and change local variables
Rob Landley <rob@landley.net>
parents:
510
diff
changeset
|
1025 cstr_cat(add_str, get_tok_str(sv->sym->token, NULL)); |
217 | 1026 if (sv->c.i != 0) { |
1027 cstr_ccat(add_str, '+'); | |
1028 } else { | |
1029 return; | |
1030 } | |
1031 } | |
1032 val = sv->c.i; | |
1033 if (modifier == 'n') | |
1034 val = -val; | |
1035 snprintf(buf, sizeof(buf), "%d", sv->c.i); | |
1036 cstr_cat(add_str, buf); | |
1037 } else if ((r & VT_VALMASK) == VT_LOCAL) { | |
1038 snprintf(buf, sizeof(buf), "%d(%%ebp)", sv->c.i); | |
1039 cstr_cat(add_str, buf); | |
1040 } else if (r & VT_LVAL) { | |
1041 reg = r & VT_VALMASK; | |
1042 if (reg >= VT_CONST) | |
1043 error("internal compiler error"); | |
1044 snprintf(buf, sizeof(buf), "(%%%s)", | |
1045 get_tok_str(TOK_ASM_eax + reg, NULL)); | |
1046 cstr_cat(add_str, buf); | |
1047 } else { | |
1048 /* register case */ | |
1049 reg = r & VT_VALMASK; | |
1050 if (reg >= VT_CONST) | |
1051 error("internal compiler error"); | |
1052 | |
1053 /* choose register operand size */ | |
1054 if ((sv->type.t & VT_BTYPE) == VT_BYTE) | |
1055 size = 1; | |
1056 else if ((sv->type.t & VT_BTYPE) == VT_SHORT) | |
1057 size = 2; | |
1058 else | |
1059 size = 4; | |
1060 if (size == 1 && reg >= 4) | |
1061 size = 4; | |
1062 | |
1063 if (modifier == 'b') { | |
1064 if (reg >= 4) | |
1065 error("cannot use byte register"); | |
1066 size = 1; | |
1067 } else if (modifier == 'h') { | |
1068 if (reg >= 4) | |
1069 error("cannot use byte register"); | |
1070 size = -1; | |
1071 } else if (modifier == 'w') { | |
1072 size = 2; | |
1073 } | |
1074 | |
1075 switch(size) { | |
1076 case -1: | |
1077 reg = TOK_ASM_ah + reg; | |
1078 break; | |
1079 case 1: | |
1080 reg = TOK_ASM_al + reg; | |
1081 break; | |
1082 case 2: | |
1083 reg = TOK_ASM_ax + reg; | |
1084 break; | |
1085 default: | |
1086 reg = TOK_ASM_eax + reg; | |
1087 break; | |
1088 } | |
1089 snprintf(buf, sizeof(buf), "%%%s", get_tok_str(reg, NULL)); | |
1090 cstr_cat(add_str, buf); | |
1091 } | |
1092 } | |
1093 | |
1094 /* generate prolog and epilog code for asm statment */ | |
1095 static void asm_gen_code(ASMOperand *operands, int nb_operands, | |
1096 int nb_outputs, int is_output, | |
314 | 1097 uint8_t *clobber_regs, |
1098 int out_reg) | |
217 | 1099 { |
1100 uint8_t regs_allocated[NB_ASM_REGS]; | |
1101 ASMOperand *op; | |
1102 int i, reg; | |
1103 static uint8_t reg_saved[NB_SAVED_REGS] = { 3, 6, 7 }; | |
1104 | |
1105 /* mark all used registers */ | |
1106 memcpy(regs_allocated, clobber_regs, sizeof(regs_allocated)); | |
1107 for(i = 0; i < nb_operands;i++) { | |
1108 op = &operands[i]; | |
1109 if (op->reg >= 0) | |
1110 regs_allocated[op->reg] = 1; | |
1111 } | |
1112 if (!is_output) { | |
1113 /* generate reg save code */ | |
1114 for(i = 0; i < NB_SAVED_REGS; i++) { | |
1115 reg = reg_saved[i]; | |
1116 if (regs_allocated[reg]) | |
1117 g(0x50 + reg); | |
1118 } | |
1119 | |
1120 /* generate load code */ | |
314 | 1121 for(i = 0; i < nb_operands; i++) { |
217 | 1122 op = &operands[i]; |
1123 if (op->reg >= 0) { | |
314 | 1124 if ((op->vt->r & VT_VALMASK) == VT_LLOCAL && |
1125 op->is_memory) { | |
1126 /* memory reference case (for both input and | |
1127 output cases) */ | |
243 | 1128 SValue sv; |
1129 sv = *op->vt; | |
314 | 1130 sv.r = (sv.r & ~VT_VALMASK) | VT_LOCAL; |
1131 load(op->reg, &sv); | |
1132 } else if (i >= nb_outputs || op->is_rw) { | |
1133 /* load value in register */ | |
1134 load(op->reg, op->vt); | |
1135 if (op->is_llong) { | |
1136 SValue sv; | |
1137 sv = *op->vt; | |
1138 sv.c.ul += 4; | |
1139 load(TREG_EDX, &sv); | |
1140 } | |
243 | 1141 } |
217 | 1142 } |
1143 } | |
1144 } else { | |
1145 /* generate save code */ | |
1146 for(i = 0 ; i < nb_outputs; i++) { | |
1147 op = &operands[i]; | |
314 | 1148 if (op->reg >= 0) { |
1149 if ((op->vt->r & VT_VALMASK) == VT_LLOCAL) { | |
1150 if (!op->is_memory) { | |
1151 SValue sv; | |
1152 sv = *op->vt; | |
1153 sv.r = (sv.r & ~VT_VALMASK) | VT_LOCAL; | |
1154 load(out_reg, &sv); | |
1155 | |
1156 sv.r = (sv.r & ~VT_VALMASK) | out_reg; | |
1157 store(op->reg, &sv); | |
1158 } | |
1159 } else { | |
1160 store(op->reg, op->vt); | |
1161 if (op->is_llong) { | |
1162 SValue sv; | |
1163 sv = *op->vt; | |
1164 sv.c.ul += 4; | |
1165 store(TREG_EDX, &sv); | |
1166 } | |
243 | 1167 } |
217 | 1168 } |
1169 } | |
1170 /* generate reg restore code */ | |
1171 for(i = NB_SAVED_REGS - 1; i >= 0; i--) { | |
1172 reg = reg_saved[i]; | |
1173 if (regs_allocated[reg]) | |
1174 g(0x58 + reg); | |
1175 } | |
1176 } | |
1177 } | |
1178 | |
1179 static void asm_clobber(uint8_t *clobber_regs, const char *str) | |
1180 { | |
1181 int reg; | |
1182 TokenSym *ts; | |
1183 | |
1184 if (!strcmp(str, "memory") || | |
1185 !strcmp(str, "cc")) | |
1186 return; | |
1187 ts = tok_alloc(str, strlen(str)); | |
1188 reg = ts->tok; | |
1189 if (reg >= TOK_ASM_eax && reg <= TOK_ASM_edi) { | |
1190 reg -= TOK_ASM_eax; | |
1191 } else if (reg >= TOK_ASM_ax && reg <= TOK_ASM_di) { | |
1192 reg -= TOK_ASM_ax; | |
1193 } else { | |
1194 error("invalid clobber register '%s'", str); | |
1195 } | |
1196 clobber_regs[reg] = 1; | |
1197 } |