-W[no-]error...: features by shrinkage
[tinycc.git] / tccgen.c
bloba2efee6a96916efb3ccc7670e1dc45f00421fe3c
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
49 ST_DATA char debug_modes;
51 ST_DATA SValue *vtop;
52 static SValue _vstack[1 + VSTACK_SIZE];
53 #define vstack (_vstack + 1)
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define unevalmask 0xffff /* unevaluated subexpression */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 static void tcc_tcov_block_begin(void);
67 /* Clear 'nocode_wanted' at label if it was used */
68 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
69 static int gind(void) { int t = ind; CODE_ON(); if (debug_modes) tcc_tcov_block_begin(); return t; }
71 /* Set 'nocode_wanted' after unconditional jumps */
72 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
73 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
75 /* These are #undef'd at the end of this file */
76 #define gjmp_addr gjmp_addr_acs
77 #define gjmp gjmp_acs
78 /* <---- */
80 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
81 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
82 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
83 ST_DATA int func_vc;
84 static int last_line_num, new_file, func_ind; /* debug info control */
85 ST_DATA const char *funcname;
86 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
87 static CString initstr;
89 #if PTR_SIZE == 4
90 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_INT
92 #elif LONG_SIZE == 4
93 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T VT_LLONG
95 #else
96 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
97 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
98 #endif
100 ST_DATA struct switch_t {
101 struct case_t {
102 int64_t v1, v2;
103 int sym;
104 } **p; int n; /* list of case ranges */
105 int def_sym; /* default symbol */
106 int *bsym;
107 struct scope *scope;
108 struct switch_t *prev;
109 SValue sv;
110 } *cur_switch; /* current switch */
112 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
113 /*list of temporary local variables on the stack in current function. */
114 ST_DATA struct temp_local_variable {
115 int location; //offset on stack. Svalue.c.i
116 short size;
117 short align;
118 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
119 short nb_temp_local_vars;
121 static struct scope {
122 struct scope *prev;
123 struct { int loc, locorig, num; } vla;
124 struct { Sym *s; int n; } cl;
125 int *bsym, *csym;
126 Sym *lstk, *llstk;
127 } *cur_scope, *loop_scope, *root_scope;
129 typedef struct {
130 Section *sec;
131 int local_offset;
132 Sym *flex_array_ref;
133 } init_params;
135 /********************************************************/
136 /* stab debug support */
138 static const struct {
139 int type;
140 const char *name;
141 } default_debug[] = {
142 { VT_INT, "int:t1=r1;-2147483648;2147483647;" },
143 { VT_BYTE, "char:t2=r2;0;127;" },
144 #if LONG_SIZE == 4
145 { VT_LONG | VT_INT, "long int:t3=r3;-2147483648;2147483647;" },
146 #else
147 { VT_LLONG | VT_LONG, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
148 #endif
149 { VT_INT | VT_UNSIGNED, "unsigned int:t4=r4;0;037777777777;" },
150 #if LONG_SIZE == 4
151 { VT_LONG | VT_INT | VT_UNSIGNED, "long unsigned int:t5=r5;0;037777777777;" },
152 #else
153 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
154 { VT_LLONG | VT_LONG | VT_UNSIGNED, "long unsigned int:t5=r5;0;01777777777777777777777;" },
155 #endif
156 { VT_QLONG, "__int128:t6=r6;0;-1;" },
157 { VT_QLONG | VT_UNSIGNED, "__int128 unsigned:t7=r7;0;-1;" },
158 { VT_LLONG, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
159 { VT_LLONG | VT_UNSIGNED, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
160 { VT_SHORT, "short int:t10=r10;-32768;32767;" },
161 { VT_SHORT | VT_UNSIGNED, "short unsigned int:t11=r11;0;65535;" },
162 { VT_BYTE | VT_DEFSIGN, "signed char:t12=r12;-128;127;" },
163 { VT_BYTE | VT_DEFSIGN | VT_UNSIGNED, "unsigned char:t13=r13;0;255;" },
164 { VT_FLOAT, "float:t14=r1;4;0;" },
165 { VT_DOUBLE, "double:t15=r1;8;0;" },
166 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
167 { VT_DOUBLE | VT_LONG, "long double:t16=r1;8;0;" },
168 #else
169 { VT_LDOUBLE, "long double:t16=r1;16;0;" },
170 #endif
171 { -1, "_Float32:t17=r1;4;0;" },
172 { -1, "_Float64:t18=r1;8;0;" },
173 { -1, "_Float128:t19=r1;16;0;" },
174 { -1, "_Float32x:t20=r1;8;0;" },
175 { -1, "_Float64x:t21=r1;16;0;" },
176 { -1, "_Decimal32:t22=r1;4;0;" },
177 { -1, "_Decimal64:t23=r1;8;0;" },
178 { -1, "_Decimal128:t24=r1;16;0;" },
179 /* if default char is unsigned */
180 { VT_BYTE | VT_UNSIGNED, "unsigned char:t25=r25;0;255;" },
181 /* boolean type */
182 { VT_BOOL, "bool:t26=r26;0;255;" },
183 { VT_VOID, "void:t27=27" },
186 static int debug_next_type;
188 static struct debug_hash {
189 int debug_type;
190 Sym *type;
191 } *debug_hash;
193 static int n_debug_hash;
195 static struct debug_info {
196 int start;
197 int end;
198 int n_sym;
199 struct debug_sym {
200 int type;
201 unsigned long value;
202 char *str;
203 Section *sec;
204 int sym_index;
205 } *sym;
206 struct debug_info *child, *next, *last, *parent;
207 } *debug_info, *debug_info_root;
209 static struct {
210 unsigned long offset;
211 unsigned long last_file_name;
212 unsigned long last_func_name;
213 int ind;
214 int line;
215 } tcov_data;
217 /********************************************************/
218 #if 1
219 #define precedence_parser
220 static void init_prec(void);
221 #endif
222 /********************************************************/
223 #ifndef CONFIG_TCC_ASM
224 ST_FUNC void asm_instr(void)
226 tcc_error("inline asm() not supported");
228 ST_FUNC void asm_global_instr(void)
230 tcc_error("inline asm() not supported");
232 #endif
234 /* ------------------------------------------------------------------------- */
235 static void gen_cast(CType *type);
236 static void gen_cast_s(int t);
237 static inline CType *pointed_type(CType *type);
238 static int is_compatible_types(CType *type1, CType *type2);
239 static int parse_btype(CType *type, AttributeDef *ad);
240 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
241 static void parse_expr_type(CType *type);
242 static void init_putv(init_params *p, CType *type, unsigned long c);
243 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
244 static void block(int is_expr);
245 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
246 static void decl(int l);
247 static int decl0(int l, int is_for_loop_init, Sym *);
248 static void expr_eq(void);
249 static void vla_runtime_type_size(CType *type, int *a);
250 static int is_compatible_unqualified_types(CType *type1, CType *type2);
251 static inline int64_t expr_const64(void);
252 static void vpush64(int ty, unsigned long long v);
253 static void vpush(CType *type);
254 static int gvtst(int inv, int t);
255 static void gen_inline_functions(TCCState *s);
256 static void free_inline_functions(TCCState *s);
257 static void skip_or_save_block(TokenString **str);
258 static void gv_dup(void);
259 static int get_temp_local_var(int size,int align);
260 static void clear_temp_local_var_list();
261 static void cast_error(CType *st, CType *dt);
263 ST_INLN int is_float(int t)
265 int bt = t & VT_BTYPE;
266 return bt == VT_LDOUBLE
267 || bt == VT_DOUBLE
268 || bt == VT_FLOAT
269 || bt == VT_QFLOAT;
272 static inline int is_integer_btype(int bt)
274 return bt == VT_BYTE
275 || bt == VT_BOOL
276 || bt == VT_SHORT
277 || bt == VT_INT
278 || bt == VT_LLONG;
281 static int btype_size(int bt)
283 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
284 bt == VT_SHORT ? 2 :
285 bt == VT_INT ? 4 :
286 bt == VT_LLONG ? 8 :
287 bt == VT_PTR ? PTR_SIZE : 0;
290 /* returns function return register from type */
291 static int R_RET(int t)
293 if (!is_float(t))
294 return REG_IRET;
295 #ifdef TCC_TARGET_X86_64
296 if ((t & VT_BTYPE) == VT_LDOUBLE)
297 return TREG_ST0;
298 #elif defined TCC_TARGET_RISCV64
299 if ((t & VT_BTYPE) == VT_LDOUBLE)
300 return REG_IRET;
301 #endif
302 return REG_FRET;
305 /* returns 2nd function return register, if any */
306 static int R2_RET(int t)
308 t &= VT_BTYPE;
309 #if PTR_SIZE == 4
310 if (t == VT_LLONG)
311 return REG_IRE2;
312 #elif defined TCC_TARGET_X86_64
313 if (t == VT_QLONG)
314 return REG_IRE2;
315 if (t == VT_QFLOAT)
316 return REG_FRE2;
317 #elif defined TCC_TARGET_RISCV64
318 if (t == VT_LDOUBLE)
319 return REG_IRE2;
320 #endif
321 return VT_CONST;
324 /* returns true for two-word types */
325 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
327 /* put function return registers to stack value */
328 static void PUT_R_RET(SValue *sv, int t)
330 sv->r = R_RET(t), sv->r2 = R2_RET(t);
333 /* returns function return register class for type t */
334 static int RC_RET(int t)
336 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
339 /* returns generic register class for type t */
340 static int RC_TYPE(int t)
342 if (!is_float(t))
343 return RC_INT;
344 #ifdef TCC_TARGET_X86_64
345 if ((t & VT_BTYPE) == VT_LDOUBLE)
346 return RC_ST0;
347 if ((t & VT_BTYPE) == VT_QFLOAT)
348 return RC_FRET;
349 #elif defined TCC_TARGET_RISCV64
350 if ((t & VT_BTYPE) == VT_LDOUBLE)
351 return RC_INT;
352 #endif
353 return RC_FLOAT;
356 /* returns 2nd register class corresponding to t and rc */
357 static int RC2_TYPE(int t, int rc)
359 if (!USING_TWO_WORDS(t))
360 return 0;
361 #ifdef RC_IRE2
362 if (rc == RC_IRET)
363 return RC_IRE2;
364 #endif
365 #ifdef RC_FRE2
366 if (rc == RC_FRET)
367 return RC_FRE2;
368 #endif
369 if (rc & RC_FLOAT)
370 return RC_FLOAT;
371 return RC_INT;
374 /* we use our own 'finite' function to avoid potential problems with
375 non standard math libs */
376 /* XXX: endianness dependent */
377 ST_FUNC int ieee_finite(double d)
379 int p[4];
380 memcpy(p, &d, sizeof(double));
381 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
384 /* compiling intel long double natively */
385 #if (defined __i386__ || defined __x86_64__) \
386 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
387 # define TCC_IS_NATIVE_387
388 #endif
390 ST_FUNC void test_lvalue(void)
392 if (!(vtop->r & VT_LVAL))
393 expect("lvalue");
396 ST_FUNC void check_vstack(void)
398 if (vtop != vstack - 1)
399 tcc_error("internal compiler error: vstack leak (%d)",
400 (int)(vtop - vstack + 1));
403 /* ------------------------------------------------------------------------- */
404 /* vstack debugging aid */
406 #if 0
407 void pv (const char *lbl, int a, int b)
409 int i;
410 for (i = a; i < a + b; ++i) {
411 SValue *p = &vtop[-i];
412 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
413 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
416 #endif
418 /* ------------------------------------------------------------------------- */
419 /* start of translation unit info */
420 ST_FUNC void tcc_debug_start(TCCState *s1)
422 if (s1->do_debug) {
423 int i;
424 char buf[512];
426 /* file info: full path + filename */
427 section_sym = put_elf_sym(symtab_section, 0, 0,
428 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
429 text_section->sh_num, NULL);
430 getcwd(buf, sizeof(buf));
431 #ifdef _WIN32
432 normalize_slashes(buf);
433 #endif
434 pstrcat(buf, sizeof(buf), "/");
435 put_stabs_r(s1, buf, N_SO, 0, 0,
436 text_section->data_offset, text_section, section_sym);
437 put_stabs_r(s1, file->prev ? file->prev->filename : file->filename,
438 N_SO, 0, 0,
439 text_section->data_offset, text_section, section_sym);
440 for (i = 0; i < sizeof (default_debug) / sizeof (default_debug[0]); i++)
441 put_stabs(s1, default_debug[i].name, N_LSYM, 0, 0, 0);
443 new_file = last_line_num = 0;
444 func_ind = -1;
445 debug_next_type = sizeof(default_debug) / sizeof(default_debug[0]);
446 debug_hash = NULL;
447 n_debug_hash = 0;
449 /* we're currently 'including' the <command line> */
450 tcc_debug_bincl(s1);
453 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
454 symbols can be safely used */
455 put_elf_sym(symtab_section, 0, 0,
456 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
457 SHN_ABS, file->filename);
460 /* put end of translation unit info */
461 ST_FUNC void tcc_debug_end(TCCState *s1)
463 if (!s1->do_debug)
464 return;
465 put_stabs_r(s1, NULL, N_SO, 0, 0,
466 text_section->data_offset, text_section, section_sym);
467 tcc_free(debug_hash);
470 static BufferedFile* put_new_file(TCCState *s1)
472 BufferedFile *f = file;
473 /* use upper file if from inline ":asm:" */
474 if (f->filename[0] == ':')
475 f = f->prev;
476 if (f && new_file) {
477 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
478 new_file = last_line_num = 0;
480 return f;
483 /* put alternative filename */
484 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
486 if (0 == strcmp(file->filename, filename))
487 return;
488 pstrcpy(file->filename, sizeof(file->filename), filename);
489 new_file = 1;
492 /* begin of #include */
493 ST_FUNC void tcc_debug_bincl(TCCState *s1)
495 if (!s1->do_debug)
496 return;
497 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
498 new_file = 1;
501 /* end of #include */
502 ST_FUNC void tcc_debug_eincl(TCCState *s1)
504 if (!s1->do_debug)
505 return;
506 put_stabn(s1, N_EINCL, 0, 0, 0);
507 new_file = 1;
510 /* generate line number info */
511 static void tcc_debug_line(TCCState *s1)
513 BufferedFile *f;
514 if (!s1->do_debug
515 || cur_text_section != text_section
516 || !(f = put_new_file(s1))
517 || last_line_num == f->line_num)
518 return;
519 if (func_ind != -1) {
520 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
521 } else {
522 /* from tcc_assemble */
523 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
525 last_line_num = f->line_num;
528 static void tcc_debug_stabs (TCCState *s1, const char *str, int type, unsigned long value,
529 Section *sec, int sym_index)
531 struct debug_sym *s;
533 if (debug_info) {
534 debug_info->sym =
535 (struct debug_sym *)tcc_realloc (debug_info->sym,
536 sizeof(struct debug_sym) *
537 (debug_info->n_sym + 1));
538 s = debug_info->sym + debug_info->n_sym++;
539 s->type = type;
540 s->value = value;
541 s->str = tcc_strdup(str);
542 s->sec = sec;
543 s->sym_index = sym_index;
545 else if (sec)
546 put_stabs_r (s1, str, type, 0, 0, value, sec, sym_index);
547 else
548 put_stabs (s1, str, type, 0, 0, value);
551 static void tcc_debug_stabn(TCCState *s1, int type, int value)
553 if (!s1->do_debug)
554 return;
555 if (type == N_LBRAC) {
556 struct debug_info *info =
557 (struct debug_info *) tcc_mallocz(sizeof (*info));
559 info->start = value;
560 info->parent = debug_info;
561 if (debug_info) {
562 if (debug_info->child) {
563 if (debug_info->child->last)
564 debug_info->child->last->next = info;
565 else
566 debug_info->child->next = info;
567 debug_info->child->last = info;
569 else
570 debug_info->child = info;
572 else
573 debug_info_root = info;
574 debug_info = info;
576 else {
577 debug_info->end = value;
578 debug_info = debug_info->parent;
582 static void tcc_get_debug_info(TCCState *s1, Sym *s, CString *result)
584 int type;
585 int n = 0;
586 int debug_type = -1;
587 Sym *t = s;
588 CString str;
590 for (;;) {
591 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
592 if ((type & VT_BTYPE) != VT_BYTE)
593 type &= ~VT_DEFSIGN;
594 if (type == VT_PTR || type == (VT_PTR | VT_ARRAY))
595 n++, t = t->type.ref;
596 else
597 break;
599 if ((type & VT_BTYPE) == VT_STRUCT) {
600 int i;
602 t = t->type.ref;
603 for (i = 0; i < n_debug_hash; i++) {
604 if (t == debug_hash[i].type) {
605 debug_type = debug_hash[i].debug_type;
606 break;
609 if (debug_type == -1) {
610 debug_type = ++debug_next_type;
611 debug_hash = (struct debug_hash *)
612 tcc_realloc (debug_hash,
613 (n_debug_hash + 1) * sizeof(*debug_hash));
614 debug_hash[n_debug_hash].debug_type = debug_type;
615 debug_hash[n_debug_hash++].type = t;
616 cstr_new (&str);
617 cstr_printf (&str, "%s:T%d=%c%d",
618 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
619 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
620 debug_type,
621 IS_UNION (t->type.t) ? 'u' : 's',
622 t->c);
623 while (t->next) {
624 int pos, size, align;
626 t = t->next;
627 cstr_printf (&str, "%s:",
628 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
629 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
630 tcc_get_debug_info (s1, t, &str);
631 if (t->type.t & VT_BITFIELD) {
632 pos = t->c * 8 + BIT_POS(t->type.t);
633 size = BIT_SIZE(t->type.t);
635 else {
636 pos = t->c * 8;
637 size = type_size(&t->type, &align) * 8;
639 cstr_printf (&str, ",%d,%d;", pos, size);
641 cstr_printf (&str, ";");
642 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
643 cstr_free (&str);
646 else if (IS_ENUM(type)) {
647 Sym *e = t = t->type.ref;
649 debug_type = ++debug_next_type;
650 cstr_new (&str);
651 cstr_printf (&str, "%s:T%d=e",
652 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
653 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
654 debug_type);
655 while (t->next) {
656 t = t->next;
657 cstr_printf (&str, "%s:",
658 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
659 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
660 cstr_printf (&str, e->type.t & VT_UNSIGNED ? "%u," : "%d,",
661 (int)t->enum_val);
663 cstr_printf (&str, ";");
664 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
665 cstr_free (&str);
667 else if ((type & VT_BTYPE) != VT_FUNC) {
668 type &= ~VT_STRUCT_MASK;
669 for (debug_type = 1;
670 debug_type <= sizeof(default_debug) / sizeof(default_debug[0]);
671 debug_type++)
672 if (default_debug[debug_type - 1].type == type)
673 break;
674 if (debug_type > sizeof(default_debug) / sizeof(default_debug[0]))
675 return;
677 if (n > 0)
678 cstr_printf (result, "%d=", ++debug_next_type);
679 t = s;
680 for (;;) {
681 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
682 if ((type & VT_BTYPE) != VT_BYTE)
683 type &= ~VT_DEFSIGN;
684 if (type == VT_PTR)
685 cstr_printf (result, "%d=*", ++debug_next_type);
686 else if (type == (VT_PTR | VT_ARRAY))
687 cstr_printf (result, "%d=ar1;0;%d;",
688 ++debug_next_type, t->type.ref->c - 1);
689 else if (type == VT_FUNC) {
690 cstr_printf (result, "%d=f", ++debug_next_type);
691 tcc_get_debug_info (s1, t->type.ref, result);
692 return;
694 else
695 break;
696 t = t->type.ref;
698 cstr_printf (result, "%d", debug_type);
701 static void tcc_debug_finish (TCCState *s1, struct debug_info *cur)
703 while (cur) {
704 int i;
705 struct debug_info *next = cur->next;
707 for (i = 0; i < cur->n_sym; i++) {
708 struct debug_sym *s = &cur->sym[i];
710 if (s->sec)
711 put_stabs_r(s1, s->str, s->type, 0, 0, s->value,
712 s->sec, s->sym_index);
713 else
714 put_stabs(s1, s->str, s->type, 0, 0, s->value);
715 tcc_free (s->str);
717 tcc_free (cur->sym);
718 put_stabn(s1, N_LBRAC, 0, 0, cur->start);
719 tcc_debug_finish (s1, cur->child);
720 put_stabn(s1, N_RBRAC, 0, 0, cur->end);
721 tcc_free (cur);
722 cur = next;
726 static void tcc_add_debug_info(TCCState *s1, int param, Sym *s, Sym *e)
728 CString debug_str;
729 if (!s1->do_debug)
730 return;
731 cstr_new (&debug_str);
732 for (; s != e; s = s->prev) {
733 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
734 continue;
735 cstr_reset (&debug_str);
736 cstr_printf (&debug_str, "%s:%s", get_tok_str(s->v, NULL), param ? "p" : "");
737 tcc_get_debug_info(s1, s, &debug_str);
738 tcc_debug_stabs(s1, debug_str.data, param ? N_PSYM : N_LSYM, s->c, NULL, 0);
740 cstr_free (&debug_str);
743 /* put function symbol */
744 static void tcc_debug_funcstart(TCCState *s1, Sym *sym)
746 CString debug_str;
747 BufferedFile *f;
748 if (!s1->do_debug)
749 return;
750 debug_info_root = NULL;
751 debug_info = NULL;
752 tcc_debug_stabn(s1, N_LBRAC, ind - func_ind);
753 if (!(f = put_new_file(s1)))
754 return;
755 cstr_new (&debug_str);
756 cstr_printf(&debug_str, "%s:%c", funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
757 tcc_get_debug_info(s1, sym->type.ref, &debug_str);
758 put_stabs_r(s1, debug_str.data, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
759 cstr_free (&debug_str);
761 tcc_debug_line(s1);
764 /* put function size */
765 static void tcc_debug_funcend(TCCState *s1, int size)
767 if (!s1->do_debug)
768 return;
769 tcc_debug_stabn(s1, N_RBRAC, size);
770 tcc_debug_finish (s1, debug_info_root);
774 static void tcc_debug_extern_sym(TCCState *s1, Sym *sym, int sh_num, int sym_bind, int sym_type)
776 Section *s;
777 CString str;
779 if (!s1->do_debug)
780 return;
781 if (sym_type == STT_FUNC || sym->v >= SYM_FIRST_ANOM)
782 return;
783 s = s1->sections[sh_num];
785 cstr_new (&str);
786 cstr_printf (&str, "%s:%c",
787 get_tok_str(sym->v, NULL),
788 sym_bind == STB_GLOBAL ? 'G' : local_scope ? 'V' : 'S'
790 tcc_get_debug_info(s1, sym, &str);
791 if (sym_bind == STB_GLOBAL)
792 tcc_debug_stabs(s1, str.data, N_GSYM, 0, NULL, 0);
793 else
794 tcc_debug_stabs(s1, str.data,
795 (sym->type.t & VT_STATIC) && data_section == s
796 ? N_STSYM : N_LCSYM, 0, s, sym->c);
797 cstr_free (&str);
800 static void tcc_debug_typedef(TCCState *s1, Sym *sym)
802 CString str;
804 if (!s1->do_debug)
805 return;
806 cstr_new (&str);
807 cstr_printf (&str, "%s:t",
808 (sym->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
809 ? "" : get_tok_str(sym->v & ~SYM_FIELD, NULL));
810 tcc_get_debug_info(s1, sym, &str);
811 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
812 cstr_free (&str);
815 /* ------------------------------------------------------------------------- */
816 /* for section layout see lib/tcov.c */
818 static void tcc_tcov_block_end(int line);
820 static void tcc_tcov_block_begin(void)
822 SValue sv;
823 void *ptr;
824 unsigned long last_offset = tcov_data.offset;
826 tcc_tcov_block_end (0);
827 if (tcc_state->test_coverage == 0 || nocode_wanted)
828 return;
830 if (tcov_data.last_file_name == 0 ||
831 strcmp ((const char *)(tcov_section->data + tcov_data.last_file_name),
832 file->true_filename) != 0) {
833 char wd[1024];
834 CString cstr;
836 if (tcov_data.last_func_name)
837 section_ptr_add(tcov_section, 1);
838 if (tcov_data.last_file_name)
839 section_ptr_add(tcov_section, 1);
840 tcov_data.last_func_name = 0;
841 cstr_new (&cstr);
842 if (file->true_filename[0] == '/') {
843 tcov_data.last_file_name = tcov_section->data_offset;
844 cstr_printf (&cstr, "%s", file->true_filename);
846 else {
847 getcwd (wd, sizeof(wd));
848 tcov_data.last_file_name = tcov_section->data_offset + strlen(wd) + 1;
849 cstr_printf (&cstr, "%s/%s", wd, file->true_filename);
851 ptr = section_ptr_add(tcov_section, cstr.size + 1);
852 strcpy((char *)ptr, cstr.data);
853 #ifdef _WIN32
854 normalize_slashes((char *)ptr);
855 #endif
856 cstr_free (&cstr);
858 if (tcov_data.last_func_name == 0 ||
859 strcmp ((const char *)(tcov_section->data + tcov_data.last_func_name),
860 funcname) != 0) {
861 size_t len;
863 if (tcov_data.last_func_name)
864 section_ptr_add(tcov_section, 1);
865 tcov_data.last_func_name = tcov_section->data_offset;
866 len = strlen (funcname);
867 ptr = section_ptr_add(tcov_section, len + 1);
868 strcpy((char *)ptr, funcname);
869 section_ptr_add(tcov_section, -tcov_section->data_offset & 7);
870 ptr = section_ptr_add(tcov_section, 8);
871 write64le (ptr, file->line_num);
873 if (ind == tcov_data.ind && tcov_data.line == file->line_num)
874 tcov_data.offset = last_offset;
875 else {
876 Sym label = {0};
877 label.type.t = VT_LLONG | VT_STATIC;
879 ptr = section_ptr_add(tcov_section, 16);
880 tcov_data.line = file->line_num;
881 write64le (ptr, (tcov_data.line << 8) | 0xff);
882 put_extern_sym(&label, tcov_section,
883 ((unsigned char *)ptr - tcov_section->data) + 8, 0);
884 sv.type = label.type;
885 sv.r = VT_SYM | VT_LVAL | VT_CONST;
886 sv.r2 = VT_CONST;
887 sv.c.i = 0;
888 sv.sym = &label;
889 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || \
890 defined TCC_TARGET_ARM || defined TCC_TARGET_ARM64 || \
891 defined TCC_TARGET_RISCV64
892 gen_increment_tcov (&sv);
893 #else
894 vpushv(&sv);
895 inc(0, TOK_INC);
896 vpop();
897 #endif
898 tcov_data.offset = (unsigned char *)ptr - tcov_section->data;
899 tcov_data.ind = ind;
903 static void tcc_tcov_block_end(int line)
905 if (tcc_state->test_coverage == 0)
906 return;
907 if (tcov_data.offset) {
908 void *ptr = tcov_section->data + tcov_data.offset;
909 unsigned long long nline = line ? line : file->line_num;
911 write64le (ptr, (read64le (ptr) & 0xfffffffffull) | (nline << 36));
912 tcov_data.offset = 0;
916 static void tcc_tcov_check_line(int start)
918 if (tcc_state->test_coverage == 0)
919 return;
920 if (tcov_data.line != file->line_num) {
921 if ((tcov_data.line + 1) != file->line_num) {
922 tcc_tcov_block_end (tcov_data.line);
923 if (start)
924 tcc_tcov_block_begin ();
926 else
927 tcov_data.line = file->line_num;
931 static void tcc_tcov_start(void)
933 if (tcc_state->test_coverage == 0)
934 return;
935 memset (&tcov_data, 0, sizeof (tcov_data));
936 if (tcov_section == NULL) {
937 tcov_section = new_section(tcc_state, ".tcov", SHT_PROGBITS,
938 SHF_ALLOC | SHF_WRITE);
939 section_ptr_add(tcov_section, 4); // pointer to executable name
943 static void tcc_tcov_end(void)
945 if (tcc_state->test_coverage == 0)
946 return;
947 if (tcov_data.last_func_name)
948 section_ptr_add(tcov_section, 1);
949 if (tcov_data.last_file_name)
950 section_ptr_add(tcov_section, 1);
953 /* ------------------------------------------------------------------------- */
954 /* initialize vstack and types. This must be done also for tcc -E */
955 ST_FUNC void tccgen_init(TCCState *s1)
957 vtop = vstack - 1;
958 memset(vtop, 0, sizeof *vtop);
960 /* define some often used types */
961 int_type.t = VT_INT;
963 char_type.t = VT_BYTE;
964 if (s1->char_is_unsigned)
965 char_type.t |= VT_UNSIGNED;
966 char_pointer_type = char_type;
967 mk_pointer(&char_pointer_type);
969 func_old_type.t = VT_FUNC;
970 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
971 func_old_type.ref->f.func_call = FUNC_CDECL;
972 func_old_type.ref->f.func_type = FUNC_OLD;
973 #ifdef precedence_parser
974 init_prec();
975 #endif
976 cstr_new(&initstr);
979 ST_FUNC int tccgen_compile(TCCState *s1)
981 cur_text_section = NULL;
982 funcname = "";
983 anon_sym = SYM_FIRST_ANOM;
984 section_sym = 0;
985 const_wanted = 0;
986 nocode_wanted = 0x80000000;
987 local_scope = 0;
988 debug_modes = s1->do_debug | s1->test_coverage << 1;
990 tcc_debug_start(s1);
991 tcc_tcov_start ();
992 #ifdef TCC_TARGET_ARM
993 arm_init(s1);
994 #endif
995 #ifdef INC_DEBUG
996 printf("%s: **** new file\n", file->filename);
997 #endif
998 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
999 next();
1000 decl(VT_CONST);
1001 gen_inline_functions(s1);
1002 check_vstack();
1003 /* end of translation unit info */
1004 tcc_debug_end(s1);
1005 tcc_tcov_end ();
1006 return 0;
1009 ST_FUNC void tccgen_finish(TCCState *s1)
1011 cstr_free(&initstr);
1012 free_inline_functions(s1);
1013 sym_pop(&global_stack, NULL, 0);
1014 sym_pop(&local_stack, NULL, 0);
1015 /* free preprocessor macros */
1016 free_defines(NULL);
1017 /* free sym_pools */
1018 dynarray_reset(&sym_pools, &nb_sym_pools);
1019 sym_free_first = NULL;
1022 /* ------------------------------------------------------------------------- */
1023 ST_FUNC ElfSym *elfsym(Sym *s)
1025 if (!s || !s->c)
1026 return NULL;
1027 return &((ElfSym *)symtab_section->data)[s->c];
1030 /* apply storage attributes to Elf symbol */
1031 ST_FUNC void update_storage(Sym *sym)
1033 ElfSym *esym;
1034 int sym_bind, old_sym_bind;
1036 esym = elfsym(sym);
1037 if (!esym)
1038 return;
1040 if (sym->a.visibility)
1041 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
1042 | sym->a.visibility;
1044 if (sym->type.t & (VT_STATIC | VT_INLINE))
1045 sym_bind = STB_LOCAL;
1046 else if (sym->a.weak)
1047 sym_bind = STB_WEAK;
1048 else
1049 sym_bind = STB_GLOBAL;
1050 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
1051 if (sym_bind != old_sym_bind) {
1052 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
1055 #ifdef TCC_TARGET_PE
1056 if (sym->a.dllimport)
1057 esym->st_other |= ST_PE_IMPORT;
1058 if (sym->a.dllexport)
1059 esym->st_other |= ST_PE_EXPORT;
1060 #endif
1062 #if 0
1063 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
1064 get_tok_str(sym->v, NULL),
1065 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
1066 sym->a.visibility,
1067 sym->a.dllexport,
1068 sym->a.dllimport
1070 #endif
1073 /* ------------------------------------------------------------------------- */
1074 /* update sym->c so that it points to an external symbol in section
1075 'section' with value 'value' */
1077 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
1078 addr_t value, unsigned long size,
1079 int can_add_underscore)
1081 int sym_type, sym_bind, info, other, t;
1082 ElfSym *esym;
1083 const char *name;
1084 char buf1[256];
1086 if (!sym->c) {
1087 name = get_tok_str(sym->v, NULL);
1088 t = sym->type.t;
1089 if ((t & VT_BTYPE) == VT_FUNC) {
1090 sym_type = STT_FUNC;
1091 } else if ((t & VT_BTYPE) == VT_VOID) {
1092 sym_type = STT_NOTYPE;
1093 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
1094 sym_type = STT_FUNC;
1095 } else {
1096 sym_type = STT_OBJECT;
1098 if (t & (VT_STATIC | VT_INLINE))
1099 sym_bind = STB_LOCAL;
1100 else
1101 sym_bind = STB_GLOBAL;
1102 other = 0;
1104 #ifdef TCC_TARGET_PE
1105 if (sym_type == STT_FUNC && sym->type.ref) {
1106 Sym *ref = sym->type.ref;
1107 if (ref->a.nodecorate) {
1108 can_add_underscore = 0;
1110 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
1111 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
1112 name = buf1;
1113 other |= ST_PE_STDCALL;
1114 can_add_underscore = 0;
1117 #endif
1119 if (sym->asm_label) {
1120 name = get_tok_str(sym->asm_label, NULL);
1121 can_add_underscore = 0;
1124 if (tcc_state->leading_underscore && can_add_underscore) {
1125 buf1[0] = '_';
1126 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
1127 name = buf1;
1130 info = ELFW(ST_INFO)(sym_bind, sym_type);
1131 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
1133 if (debug_modes)
1134 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
1136 } else {
1137 esym = elfsym(sym);
1138 esym->st_value = value;
1139 esym->st_size = size;
1140 esym->st_shndx = sh_num;
1142 update_storage(sym);
1145 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
1146 addr_t value, unsigned long size)
1148 int sh_num = section ? section->sh_num : SHN_UNDEF;
1149 put_extern_sym2(sym, sh_num, value, size, 1);
1152 /* add a new relocation entry to symbol 'sym' in section 's' */
1153 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
1154 addr_t addend)
1156 int c = 0;
1158 if (nocode_wanted && s == cur_text_section)
1159 return;
1161 if (sym) {
1162 if (0 == sym->c)
1163 put_extern_sym(sym, NULL, 0, 0);
1164 c = sym->c;
1167 /* now we can add ELF relocation info */
1168 put_elf_reloca(symtab_section, s, offset, type, c, addend);
1171 #if PTR_SIZE == 4
1172 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
1174 greloca(s, sym, offset, type, 0);
1176 #endif
1178 /* ------------------------------------------------------------------------- */
1179 /* symbol allocator */
1180 static Sym *__sym_malloc(void)
1182 Sym *sym_pool, *sym, *last_sym;
1183 int i;
1185 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
1186 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
1188 last_sym = sym_free_first;
1189 sym = sym_pool;
1190 for(i = 0; i < SYM_POOL_NB; i++) {
1191 sym->next = last_sym;
1192 last_sym = sym;
1193 sym++;
1195 sym_free_first = last_sym;
1196 return last_sym;
1199 static inline Sym *sym_malloc(void)
1201 Sym *sym;
1202 #ifndef SYM_DEBUG
1203 sym = sym_free_first;
1204 if (!sym)
1205 sym = __sym_malloc();
1206 sym_free_first = sym->next;
1207 return sym;
1208 #else
1209 sym = tcc_malloc(sizeof(Sym));
1210 return sym;
1211 #endif
1214 ST_INLN void sym_free(Sym *sym)
1216 #ifndef SYM_DEBUG
1217 sym->next = sym_free_first;
1218 sym_free_first = sym;
1219 #else
1220 tcc_free(sym);
1221 #endif
1224 /* push, without hashing */
1225 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
1227 Sym *s;
1229 s = sym_malloc();
1230 memset(s, 0, sizeof *s);
1231 s->v = v;
1232 s->type.t = t;
1233 s->c = c;
1234 /* add in stack */
1235 s->prev = *ps;
1236 *ps = s;
1237 return s;
1240 /* find a symbol and return its associated structure. 's' is the top
1241 of the symbol stack */
1242 ST_FUNC Sym *sym_find2(Sym *s, int v)
1244 while (s) {
1245 if (s->v == v)
1246 return s;
1247 else if (s->v == -1)
1248 return NULL;
1249 s = s->prev;
1251 return NULL;
1254 /* structure lookup */
1255 ST_INLN Sym *struct_find(int v)
1257 v -= TOK_IDENT;
1258 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1259 return NULL;
1260 return table_ident[v]->sym_struct;
1263 /* find an identifier */
1264 ST_INLN Sym *sym_find(int v)
1266 v -= TOK_IDENT;
1267 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1268 return NULL;
1269 return table_ident[v]->sym_identifier;
1272 static int sym_scope(Sym *s)
1274 if (IS_ENUM_VAL (s->type.t))
1275 return s->type.ref->sym_scope;
1276 else
1277 return s->sym_scope;
1280 /* push a given symbol on the symbol stack */
1281 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
1283 Sym *s, **ps;
1284 TokenSym *ts;
1286 if (local_stack)
1287 ps = &local_stack;
1288 else
1289 ps = &global_stack;
1290 s = sym_push2(ps, v, type->t, c);
1291 s->type.ref = type->ref;
1292 s->r = r;
1293 /* don't record fields or anonymous symbols */
1294 /* XXX: simplify */
1295 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1296 /* record symbol in token array */
1297 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1298 if (v & SYM_STRUCT)
1299 ps = &ts->sym_struct;
1300 else
1301 ps = &ts->sym_identifier;
1302 s->prev_tok = *ps;
1303 *ps = s;
1304 s->sym_scope = local_scope;
1305 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
1306 tcc_error("redeclaration of '%s'",
1307 get_tok_str(v & ~SYM_STRUCT, NULL));
1309 return s;
1312 /* push a global identifier */
1313 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
1315 Sym *s, **ps;
1316 s = sym_push2(&global_stack, v, t, c);
1317 s->r = VT_CONST | VT_SYM;
1318 /* don't record anonymous symbol */
1319 if (v < SYM_FIRST_ANOM) {
1320 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
1321 /* modify the top most local identifier, so that sym_identifier will
1322 point to 's' when popped; happens when called from inline asm */
1323 while (*ps != NULL && (*ps)->sym_scope)
1324 ps = &(*ps)->prev_tok;
1325 s->prev_tok = *ps;
1326 *ps = s;
1328 return s;
1331 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1332 pop them yet from the list, but do remove them from the token array. */
1333 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
1335 Sym *s, *ss, **ps;
1336 TokenSym *ts;
1337 int v;
1339 s = *ptop;
1340 while(s != b) {
1341 ss = s->prev;
1342 v = s->v;
1343 /* remove symbol in token array */
1344 /* XXX: simplify */
1345 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1346 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1347 if (v & SYM_STRUCT)
1348 ps = &ts->sym_struct;
1349 else
1350 ps = &ts->sym_identifier;
1351 *ps = s->prev_tok;
1353 if (!keep)
1354 sym_free(s);
1355 s = ss;
1357 if (!keep)
1358 *ptop = b;
1361 /* ------------------------------------------------------------------------- */
1362 static void vcheck_cmp(void)
1364 /* cannot let cpu flags if other instruction are generated. Also
1365 avoid leaving VT_JMP anywhere except on the top of the stack
1366 because it would complicate the code generator.
1368 Don't do this when nocode_wanted. vtop might come from
1369 !nocode_wanted regions (see 88_codeopt.c) and transforming
1370 it to a register without actually generating code is wrong
1371 as their value might still be used for real. All values
1372 we push under nocode_wanted will eventually be popped
1373 again, so that the VT_CMP/VT_JMP value will be in vtop
1374 when code is unsuppressed again. */
1376 if (vtop->r == VT_CMP && !nocode_wanted)
1377 gv(RC_INT);
1380 static void vsetc(CType *type, int r, CValue *vc)
1382 if (vtop >= vstack + (VSTACK_SIZE - 1))
1383 tcc_error("memory full (vstack)");
1384 vcheck_cmp();
1385 vtop++;
1386 vtop->type = *type;
1387 vtop->r = r;
1388 vtop->r2 = VT_CONST;
1389 vtop->c = *vc;
1390 vtop->sym = NULL;
1393 ST_FUNC void vswap(void)
1395 SValue tmp;
1397 vcheck_cmp();
1398 tmp = vtop[0];
1399 vtop[0] = vtop[-1];
1400 vtop[-1] = tmp;
1403 /* pop stack value */
1404 ST_FUNC void vpop(void)
1406 int v;
1407 v = vtop->r & VT_VALMASK;
1408 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1409 /* for x86, we need to pop the FP stack */
1410 if (v == TREG_ST0) {
1411 o(0xd8dd); /* fstp %st(0) */
1412 } else
1413 #endif
1414 if (v == VT_CMP) {
1415 /* need to put correct jump if && or || without test */
1416 gsym(vtop->jtrue);
1417 gsym(vtop->jfalse);
1419 vtop--;
1422 /* push constant of type "type" with useless value */
1423 static void vpush(CType *type)
1425 vset(type, VT_CONST, 0);
1428 /* push arbitrary 64bit constant */
1429 static void vpush64(int ty, unsigned long long v)
1431 CValue cval;
1432 CType ctype;
1433 ctype.t = ty;
1434 ctype.ref = NULL;
1435 cval.i = v;
1436 vsetc(&ctype, VT_CONST, &cval);
1439 /* push integer constant */
1440 ST_FUNC void vpushi(int v)
1442 vpush64(VT_INT, v);
1445 /* push a pointer sized constant */
1446 static void vpushs(addr_t v)
1448 vpush64(VT_SIZE_T, v);
1451 /* push long long constant */
1452 static inline void vpushll(long long v)
1454 vpush64(VT_LLONG, v);
1457 ST_FUNC void vset(CType *type, int r, int v)
1459 CValue cval;
1460 cval.i = v;
1461 vsetc(type, r, &cval);
1464 static void vseti(int r, int v)
1466 CType type;
1467 type.t = VT_INT;
1468 type.ref = NULL;
1469 vset(&type, r, v);
1472 ST_FUNC void vpushv(SValue *v)
1474 if (vtop >= vstack + (VSTACK_SIZE - 1))
1475 tcc_error("memory full (vstack)");
1476 vtop++;
1477 *vtop = *v;
1480 static void vdup(void)
1482 vpushv(vtop);
1485 /* rotate n first stack elements to the bottom
1486 I1 ... In -> I2 ... In I1 [top is right]
1488 ST_FUNC void vrotb(int n)
1490 int i;
1491 SValue tmp;
1493 vcheck_cmp();
1494 tmp = vtop[-n + 1];
1495 for(i=-n+1;i!=0;i++)
1496 vtop[i] = vtop[i+1];
1497 vtop[0] = tmp;
1500 /* rotate the n elements before entry e towards the top
1501 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1503 ST_FUNC void vrote(SValue *e, int n)
1505 int i;
1506 SValue tmp;
1508 vcheck_cmp();
1509 tmp = *e;
1510 for(i = 0;i < n - 1; i++)
1511 e[-i] = e[-i - 1];
1512 e[-n + 1] = tmp;
1515 /* rotate n first stack elements to the top
1516 I1 ... In -> In I1 ... I(n-1) [top is right]
1518 ST_FUNC void vrott(int n)
1520 vrote(vtop, n);
1523 /* ------------------------------------------------------------------------- */
1524 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1526 /* called from generators to set the result from relational ops */
1527 ST_FUNC void vset_VT_CMP(int op)
1529 vtop->r = VT_CMP;
1530 vtop->cmp_op = op;
1531 vtop->jfalse = 0;
1532 vtop->jtrue = 0;
1535 /* called once before asking generators to load VT_CMP to a register */
1536 static void vset_VT_JMP(void)
1538 int op = vtop->cmp_op;
1540 if (vtop->jtrue || vtop->jfalse) {
1541 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1542 int inv = op & (op < 2); /* small optimization */
1543 vseti(VT_JMP+inv, gvtst(inv, 0));
1544 } else {
1545 /* otherwise convert flags (rsp. 0/1) to register */
1546 vtop->c.i = op;
1547 if (op < 2) /* doesn't seem to happen */
1548 vtop->r = VT_CONST;
1552 /* Set CPU Flags, doesn't yet jump */
1553 static void gvtst_set(int inv, int t)
1555 int *p;
1557 if (vtop->r != VT_CMP) {
1558 vpushi(0);
1559 gen_op(TOK_NE);
1560 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1561 vset_VT_CMP(vtop->c.i != 0);
1564 p = inv ? &vtop->jfalse : &vtop->jtrue;
1565 *p = gjmp_append(*p, t);
1568 /* Generate value test
1570 * Generate a test for any value (jump, comparison and integers) */
1571 static int gvtst(int inv, int t)
1573 int op, x, u;
1575 gvtst_set(inv, t);
1576 t = vtop->jtrue, u = vtop->jfalse;
1577 if (inv)
1578 x = u, u = t, t = x;
1579 op = vtop->cmp_op;
1581 /* jump to the wanted target */
1582 if (op > 1)
1583 t = gjmp_cond(op ^ inv, t);
1584 else if (op != inv)
1585 t = gjmp(t);
1586 /* resolve complementary jumps to here */
1587 gsym(u);
1589 vtop--;
1590 return t;
1593 /* generate a zero or nozero test */
1594 static void gen_test_zero(int op)
1596 if (vtop->r == VT_CMP) {
1597 int j;
1598 if (op == TOK_EQ) {
1599 j = vtop->jfalse;
1600 vtop->jfalse = vtop->jtrue;
1601 vtop->jtrue = j;
1602 vtop->cmp_op ^= 1;
1604 } else {
1605 vpushi(0);
1606 gen_op(op);
1610 /* ------------------------------------------------------------------------- */
1611 /* push a symbol value of TYPE */
1612 ST_FUNC void vpushsym(CType *type, Sym *sym)
1614 CValue cval;
1615 cval.i = 0;
1616 vsetc(type, VT_CONST | VT_SYM, &cval);
1617 vtop->sym = sym;
1620 /* Return a static symbol pointing to a section */
1621 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1623 int v;
1624 Sym *sym;
1626 v = anon_sym++;
1627 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1628 sym->type.t |= VT_STATIC;
1629 put_extern_sym(sym, sec, offset, size);
1630 return sym;
1633 /* push a reference to a section offset by adding a dummy symbol */
1634 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1636 vpushsym(type, get_sym_ref(type, sec, offset, size));
1639 /* define a new external reference to a symbol 'v' of type 'u' */
1640 ST_FUNC Sym *external_global_sym(int v, CType *type)
1642 Sym *s;
1644 s = sym_find(v);
1645 if (!s) {
1646 /* push forward reference */
1647 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1648 s->type.ref = type->ref;
1649 } else if (IS_ASM_SYM(s)) {
1650 s->type.t = type->t | (s->type.t & VT_EXTERN);
1651 s->type.ref = type->ref;
1652 update_storage(s);
1654 return s;
1657 /* create an external reference with no specific type similar to asm labels.
1658 This avoids type conflicts if the symbol is used from C too */
1659 ST_FUNC Sym *external_helper_sym(int v)
1661 CType ct = { VT_ASM_FUNC, NULL };
1662 return external_global_sym(v, &ct);
1665 /* push a reference to an helper function (such as memmove) */
1666 ST_FUNC void vpush_helper_func(int v)
1668 vpushsym(&func_old_type, external_helper_sym(v));
1671 /* Merge symbol attributes. */
1672 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1674 if (sa1->aligned && !sa->aligned)
1675 sa->aligned = sa1->aligned;
1676 sa->packed |= sa1->packed;
1677 sa->weak |= sa1->weak;
1678 if (sa1->visibility != STV_DEFAULT) {
1679 int vis = sa->visibility;
1680 if (vis == STV_DEFAULT
1681 || vis > sa1->visibility)
1682 vis = sa1->visibility;
1683 sa->visibility = vis;
1685 sa->dllexport |= sa1->dllexport;
1686 sa->nodecorate |= sa1->nodecorate;
1687 sa->dllimport |= sa1->dllimport;
1690 /* Merge function attributes. */
1691 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1693 if (fa1->func_call && !fa->func_call)
1694 fa->func_call = fa1->func_call;
1695 if (fa1->func_type && !fa->func_type)
1696 fa->func_type = fa1->func_type;
1697 if (fa1->func_args && !fa->func_args)
1698 fa->func_args = fa1->func_args;
1699 if (fa1->func_noreturn)
1700 fa->func_noreturn = 1;
1701 if (fa1->func_ctor)
1702 fa->func_ctor = 1;
1703 if (fa1->func_dtor)
1704 fa->func_dtor = 1;
1707 /* Merge attributes. */
1708 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1710 merge_symattr(&ad->a, &ad1->a);
1711 merge_funcattr(&ad->f, &ad1->f);
1713 if (ad1->section)
1714 ad->section = ad1->section;
1715 if (ad1->alias_target)
1716 ad->alias_target = ad1->alias_target;
1717 if (ad1->asm_label)
1718 ad->asm_label = ad1->asm_label;
1719 if (ad1->attr_mode)
1720 ad->attr_mode = ad1->attr_mode;
1723 /* Merge some type attributes. */
1724 static void patch_type(Sym *sym, CType *type)
1726 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1727 if (!(sym->type.t & VT_EXTERN))
1728 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1729 sym->type.t &= ~VT_EXTERN;
1732 if (IS_ASM_SYM(sym)) {
1733 /* stay static if both are static */
1734 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1735 sym->type.ref = type->ref;
1738 if (!is_compatible_types(&sym->type, type)) {
1739 tcc_error("incompatible types for redefinition of '%s'",
1740 get_tok_str(sym->v, NULL));
1742 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1743 int static_proto = sym->type.t & VT_STATIC;
1744 /* warn if static follows non-static function declaration */
1745 if ((type->t & VT_STATIC) && !static_proto
1746 /* XXX this test for inline shouldn't be here. Until we
1747 implement gnu-inline mode again it silences a warning for
1748 mingw caused by our workarounds. */
1749 && !((type->t | sym->type.t) & VT_INLINE))
1750 tcc_warning("static storage ignored for redefinition of '%s'",
1751 get_tok_str(sym->v, NULL));
1753 /* set 'inline' if both agree or if one has static */
1754 if ((type->t | sym->type.t) & VT_INLINE) {
1755 if (!((type->t ^ sym->type.t) & VT_INLINE)
1756 || ((type->t | sym->type.t) & VT_STATIC))
1757 static_proto |= VT_INLINE;
1760 if (0 == (type->t & VT_EXTERN)) {
1761 struct FuncAttr f = sym->type.ref->f;
1762 /* put complete type, use static from prototype */
1763 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1764 sym->type.ref = type->ref;
1765 merge_funcattr(&sym->type.ref->f, &f);
1766 } else {
1767 sym->type.t &= ~VT_INLINE | static_proto;
1770 if (sym->type.ref->f.func_type == FUNC_OLD
1771 && type->ref->f.func_type != FUNC_OLD) {
1772 sym->type.ref = type->ref;
1775 } else {
1776 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1777 /* set array size if it was omitted in extern declaration */
1778 sym->type.ref->c = type->ref->c;
1780 if ((type->t ^ sym->type.t) & VT_STATIC)
1781 tcc_warning("storage mismatch for redefinition of '%s'",
1782 get_tok_str(sym->v, NULL));
1786 /* Merge some storage attributes. */
1787 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1789 if (type)
1790 patch_type(sym, type);
1792 #ifdef TCC_TARGET_PE
1793 if (sym->a.dllimport != ad->a.dllimport)
1794 tcc_error("incompatible dll linkage for redefinition of '%s'",
1795 get_tok_str(sym->v, NULL));
1796 #endif
1797 merge_symattr(&sym->a, &ad->a);
1798 if (ad->asm_label)
1799 sym->asm_label = ad->asm_label;
1800 update_storage(sym);
1803 /* copy sym to other stack */
1804 static Sym *sym_copy(Sym *s0, Sym **ps)
1806 Sym *s;
1807 s = sym_malloc(), *s = *s0;
1808 s->prev = *ps, *ps = s;
1809 if (s->v < SYM_FIRST_ANOM) {
1810 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1811 s->prev_tok = *ps, *ps = s;
1813 return s;
1816 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1817 static void sym_copy_ref(Sym *s, Sym **ps)
1819 int bt = s->type.t & VT_BTYPE;
1820 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1821 Sym **sp = &s->type.ref;
1822 for (s = *sp, *sp = NULL; s; s = s->next) {
1823 Sym *s2 = sym_copy(s, ps);
1824 sp = &(*sp = s2)->next;
1825 sym_copy_ref(s2, ps);
1830 /* define a new external reference to a symbol 'v' */
1831 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1833 Sym *s;
1835 /* look for global symbol */
1836 s = sym_find(v);
1837 while (s && s->sym_scope)
1838 s = s->prev_tok;
1840 if (!s) {
1841 /* push forward reference */
1842 s = global_identifier_push(v, type->t, 0);
1843 s->r |= r;
1844 s->a = ad->a;
1845 s->asm_label = ad->asm_label;
1846 s->type.ref = type->ref;
1847 /* copy type to the global stack */
1848 if (local_stack)
1849 sym_copy_ref(s, &global_stack);
1850 } else {
1851 patch_storage(s, ad, type);
1853 /* push variables on local_stack if any */
1854 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1855 s = sym_copy(s, &local_stack);
1856 return s;
1859 /* save registers up to (vtop - n) stack entry */
1860 ST_FUNC void save_regs(int n)
1862 SValue *p, *p1;
1863 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1864 save_reg(p->r);
1867 /* save r to the memory stack, and mark it as being free */
1868 ST_FUNC void save_reg(int r)
1870 save_reg_upstack(r, 0);
1873 /* save r to the memory stack, and mark it as being free,
1874 if seen up to (vtop - n) stack entry */
1875 ST_FUNC void save_reg_upstack(int r, int n)
1877 int l, size, align, bt;
1878 SValue *p, *p1, sv;
1880 if ((r &= VT_VALMASK) >= VT_CONST)
1881 return;
1882 if (nocode_wanted)
1883 return;
1884 l = 0;
1885 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1886 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1887 /* must save value on stack if not already done */
1888 if (!l) {
1889 bt = p->type.t & VT_BTYPE;
1890 if (bt == VT_VOID)
1891 continue;
1892 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1893 bt = VT_PTR;
1894 sv.type.t = bt;
1895 size = type_size(&sv.type, &align);
1896 l = get_temp_local_var(size,align);
1897 sv.r = VT_LOCAL | VT_LVAL;
1898 sv.c.i = l;
1899 store(p->r & VT_VALMASK, &sv);
1900 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1901 /* x86 specific: need to pop fp register ST0 if saved */
1902 if (r == TREG_ST0) {
1903 o(0xd8dd); /* fstp %st(0) */
1905 #endif
1906 /* special long long case */
1907 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1908 sv.c.i += PTR_SIZE;
1909 store(p->r2, &sv);
1912 /* mark that stack entry as being saved on the stack */
1913 if (p->r & VT_LVAL) {
1914 /* also clear the bounded flag because the
1915 relocation address of the function was stored in
1916 p->c.i */
1917 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1918 } else {
1919 p->r = VT_LVAL | VT_LOCAL;
1921 p->sym = NULL;
1922 p->r2 = VT_CONST;
1923 p->c.i = l;
1928 #ifdef TCC_TARGET_ARM
1929 /* find a register of class 'rc2' with at most one reference on stack.
1930 * If none, call get_reg(rc) */
1931 ST_FUNC int get_reg_ex(int rc, int rc2)
1933 int r;
1934 SValue *p;
1936 for(r=0;r<NB_REGS;r++) {
1937 if (reg_classes[r] & rc2) {
1938 int n;
1939 n=0;
1940 for(p = vstack; p <= vtop; p++) {
1941 if ((p->r & VT_VALMASK) == r ||
1942 p->r2 == r)
1943 n++;
1945 if (n <= 1)
1946 return r;
1949 return get_reg(rc);
1951 #endif
1953 /* find a free register of class 'rc'. If none, save one register */
1954 ST_FUNC int get_reg(int rc)
1956 int r;
1957 SValue *p;
1959 /* find a free register */
1960 for(r=0;r<NB_REGS;r++) {
1961 if (reg_classes[r] & rc) {
1962 if (nocode_wanted)
1963 return r;
1964 for(p=vstack;p<=vtop;p++) {
1965 if ((p->r & VT_VALMASK) == r ||
1966 p->r2 == r)
1967 goto notfound;
1969 return r;
1971 notfound: ;
1974 /* no register left : free the first one on the stack (VERY
1975 IMPORTANT to start from the bottom to ensure that we don't
1976 spill registers used in gen_opi()) */
1977 for(p=vstack;p<=vtop;p++) {
1978 /* look at second register (if long long) */
1979 r = p->r2;
1980 if (r < VT_CONST && (reg_classes[r] & rc))
1981 goto save_found;
1982 r = p->r & VT_VALMASK;
1983 if (r < VT_CONST && (reg_classes[r] & rc)) {
1984 save_found:
1985 save_reg(r);
1986 return r;
1989 /* Should never comes here */
1990 return -1;
1993 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1994 static int get_temp_local_var(int size,int align){
1995 int i;
1996 struct temp_local_variable *temp_var;
1997 int found_var;
1998 SValue *p;
1999 int r;
2000 char free;
2001 char found;
2002 found=0;
2003 for(i=0;i<nb_temp_local_vars;i++){
2004 temp_var=&arr_temp_local_vars[i];
2005 if(temp_var->size<size||align!=temp_var->align){
2006 continue;
2008 /*check if temp_var is free*/
2009 free=1;
2010 for(p=vstack;p<=vtop;p++) {
2011 r=p->r&VT_VALMASK;
2012 if(r==VT_LOCAL||r==VT_LLOCAL){
2013 if(p->c.i==temp_var->location){
2014 free=0;
2015 break;
2019 if(free){
2020 found_var=temp_var->location;
2021 found=1;
2022 break;
2025 if(!found){
2026 loc = (loc - size) & -align;
2027 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
2028 temp_var=&arr_temp_local_vars[i];
2029 temp_var->location=loc;
2030 temp_var->size=size;
2031 temp_var->align=align;
2032 nb_temp_local_vars++;
2034 found_var=loc;
2036 return found_var;
2039 static void clear_temp_local_var_list(){
2040 nb_temp_local_vars=0;
2043 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
2044 if needed */
2045 static void move_reg(int r, int s, int t)
2047 SValue sv;
2049 if (r != s) {
2050 save_reg(r);
2051 sv.type.t = t;
2052 sv.type.ref = NULL;
2053 sv.r = s;
2054 sv.c.i = 0;
2055 load(r, &sv);
2059 /* get address of vtop (vtop MUST BE an lvalue) */
2060 ST_FUNC void gaddrof(void)
2062 vtop->r &= ~VT_LVAL;
2063 /* tricky: if saved lvalue, then we can go back to lvalue */
2064 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
2065 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
2068 #ifdef CONFIG_TCC_BCHECK
2069 /* generate a bounded pointer addition */
2070 static void gen_bounded_ptr_add(void)
2072 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
2073 if (save) {
2074 vpushv(&vtop[-1]);
2075 vrott(3);
2077 vpush_helper_func(TOK___bound_ptr_add);
2078 vrott(3);
2079 gfunc_call(2);
2080 vtop -= save;
2081 vpushi(0);
2082 /* returned pointer is in REG_IRET */
2083 vtop->r = REG_IRET | VT_BOUNDED;
2084 if (nocode_wanted)
2085 return;
2086 /* relocation offset of the bounding function call point */
2087 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
2090 /* patch pointer addition in vtop so that pointer dereferencing is
2091 also tested */
2092 static void gen_bounded_ptr_deref(void)
2094 addr_t func;
2095 int size, align;
2096 ElfW_Rel *rel;
2097 Sym *sym;
2099 if (nocode_wanted)
2100 return;
2102 size = type_size(&vtop->type, &align);
2103 switch(size) {
2104 case 1: func = TOK___bound_ptr_indir1; break;
2105 case 2: func = TOK___bound_ptr_indir2; break;
2106 case 4: func = TOK___bound_ptr_indir4; break;
2107 case 8: func = TOK___bound_ptr_indir8; break;
2108 case 12: func = TOK___bound_ptr_indir12; break;
2109 case 16: func = TOK___bound_ptr_indir16; break;
2110 default:
2111 /* may happen with struct member access */
2112 return;
2114 sym = external_helper_sym(func);
2115 if (!sym->c)
2116 put_extern_sym(sym, NULL, 0, 0);
2117 /* patch relocation */
2118 /* XXX: find a better solution ? */
2119 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
2120 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
2123 /* generate lvalue bound code */
2124 static void gbound(void)
2126 CType type1;
2128 vtop->r &= ~VT_MUSTBOUND;
2129 /* if lvalue, then use checking code before dereferencing */
2130 if (vtop->r & VT_LVAL) {
2131 /* if not VT_BOUNDED value, then make one */
2132 if (!(vtop->r & VT_BOUNDED)) {
2133 /* must save type because we must set it to int to get pointer */
2134 type1 = vtop->type;
2135 vtop->type.t = VT_PTR;
2136 gaddrof();
2137 vpushi(0);
2138 gen_bounded_ptr_add();
2139 vtop->r |= VT_LVAL;
2140 vtop->type = type1;
2142 /* then check for dereferencing */
2143 gen_bounded_ptr_deref();
2147 /* we need to call __bound_ptr_add before we start to load function
2148 args into registers */
2149 ST_FUNC void gbound_args(int nb_args)
2151 int i, v;
2152 SValue *sv;
2154 for (i = 1; i <= nb_args; ++i)
2155 if (vtop[1 - i].r & VT_MUSTBOUND) {
2156 vrotb(i);
2157 gbound();
2158 vrott(i);
2161 sv = vtop - nb_args;
2162 if (sv->r & VT_SYM) {
2163 v = sv->sym->v;
2164 if (v == TOK_setjmp
2165 || v == TOK__setjmp
2166 #ifndef TCC_TARGET_PE
2167 || v == TOK_sigsetjmp
2168 || v == TOK___sigsetjmp
2169 #endif
2171 vpush_helper_func(TOK___bound_setjmp);
2172 vpushv(sv + 1);
2173 gfunc_call(1);
2174 func_bound_add_epilog = 1;
2176 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2177 if (v == TOK_alloca)
2178 func_bound_add_epilog = 1;
2179 #endif
2180 #if TARGETOS_NetBSD
2181 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
2182 sv->sym->asm_label = TOK___bound_longjmp;
2183 #endif
2187 /* Add bounds for local symbols from S to E (via ->prev) */
2188 static void add_local_bounds(Sym *s, Sym *e)
2190 for (; s != e; s = s->prev) {
2191 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
2192 continue;
2193 /* Add arrays/structs/unions because we always take address */
2194 if ((s->type.t & VT_ARRAY)
2195 || (s->type.t & VT_BTYPE) == VT_STRUCT
2196 || s->a.addrtaken) {
2197 /* add local bound info */
2198 int align, size = type_size(&s->type, &align);
2199 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
2200 2 * sizeof(addr_t));
2201 bounds_ptr[0] = s->c;
2202 bounds_ptr[1] = size;
2206 #endif
2208 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2209 static void pop_local_syms(Sym *b, int keep)
2211 #ifdef CONFIG_TCC_BCHECK
2212 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
2213 add_local_bounds(local_stack, b);
2214 #endif
2215 if (debug_modes)
2216 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
2217 sym_pop(&local_stack, b, keep);
2220 static void incr_bf_adr(int o)
2222 vtop->type = char_pointer_type;
2223 gaddrof();
2224 vpushs(o);
2225 gen_op('+');
2226 vtop->type.t = VT_BYTE | VT_UNSIGNED;
2227 vtop->r |= VT_LVAL;
2230 /* single-byte load mode for packed or otherwise unaligned bitfields */
2231 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
2233 int n, o, bits;
2234 save_reg_upstack(vtop->r, 1);
2235 vpush64(type->t & VT_BTYPE, 0); // B X
2236 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2237 do {
2238 vswap(); // X B
2239 incr_bf_adr(o);
2240 vdup(); // X B B
2241 n = 8 - bit_pos;
2242 if (n > bit_size)
2243 n = bit_size;
2244 if (bit_pos)
2245 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
2246 if (n < 8)
2247 vpushi((1 << n) - 1), gen_op('&');
2248 gen_cast(type);
2249 if (bits)
2250 vpushi(bits), gen_op(TOK_SHL);
2251 vrotb(3); // B Y X
2252 gen_op('|'); // B X
2253 bits += n, bit_size -= n, o = 1;
2254 } while (bit_size);
2255 vswap(), vpop();
2256 if (!(type->t & VT_UNSIGNED)) {
2257 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
2258 vpushi(n), gen_op(TOK_SHL);
2259 vpushi(n), gen_op(TOK_SAR);
2263 /* single-byte store mode for packed or otherwise unaligned bitfields */
2264 static void store_packed_bf(int bit_pos, int bit_size)
2266 int bits, n, o, m, c;
2267 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2268 vswap(); // X B
2269 save_reg_upstack(vtop->r, 1);
2270 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2271 do {
2272 incr_bf_adr(o); // X B
2273 vswap(); //B X
2274 c ? vdup() : gv_dup(); // B V X
2275 vrott(3); // X B V
2276 if (bits)
2277 vpushi(bits), gen_op(TOK_SHR);
2278 if (bit_pos)
2279 vpushi(bit_pos), gen_op(TOK_SHL);
2280 n = 8 - bit_pos;
2281 if (n > bit_size)
2282 n = bit_size;
2283 if (n < 8) {
2284 m = ((1 << n) - 1) << bit_pos;
2285 vpushi(m), gen_op('&'); // X B V1
2286 vpushv(vtop-1); // X B V1 B
2287 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
2288 gen_op('&'); // X B V1 B1
2289 gen_op('|'); // X B V2
2291 vdup(), vtop[-1] = vtop[-2]; // X B B V2
2292 vstore(), vpop(); // X B
2293 bits += n, bit_size -= n, bit_pos = 0, o = 1;
2294 } while (bit_size);
2295 vpop(), vpop();
2298 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
2300 int t;
2301 if (0 == sv->type.ref)
2302 return 0;
2303 t = sv->type.ref->auxtype;
2304 if (t != -1 && t != VT_STRUCT) {
2305 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
2306 sv->r |= VT_LVAL;
2308 return t;
2311 /* store vtop a register belonging to class 'rc'. lvalues are
2312 converted to values. Cannot be used if cannot be converted to
2313 register value (such as structures). */
2314 ST_FUNC int gv(int rc)
2316 int r, r2, r_ok, r2_ok, rc2, bt;
2317 int bit_pos, bit_size, size, align;
2319 /* NOTE: get_reg can modify vstack[] */
2320 if (vtop->type.t & VT_BITFIELD) {
2321 CType type;
2323 bit_pos = BIT_POS(vtop->type.t);
2324 bit_size = BIT_SIZE(vtop->type.t);
2325 /* remove bit field info to avoid loops */
2326 vtop->type.t &= ~VT_STRUCT_MASK;
2328 type.ref = NULL;
2329 type.t = vtop->type.t & VT_UNSIGNED;
2330 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
2331 type.t |= VT_UNSIGNED;
2333 r = adjust_bf(vtop, bit_pos, bit_size);
2335 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2336 type.t |= VT_LLONG;
2337 else
2338 type.t |= VT_INT;
2340 if (r == VT_STRUCT) {
2341 load_packed_bf(&type, bit_pos, bit_size);
2342 } else {
2343 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
2344 /* cast to int to propagate signedness in following ops */
2345 gen_cast(&type);
2346 /* generate shifts */
2347 vpushi(bits - (bit_pos + bit_size));
2348 gen_op(TOK_SHL);
2349 vpushi(bits - bit_size);
2350 /* NOTE: transformed to SHR if unsigned */
2351 gen_op(TOK_SAR);
2353 r = gv(rc);
2354 } else {
2355 if (is_float(vtop->type.t) &&
2356 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2357 /* CPUs usually cannot use float constants, so we store them
2358 generically in data segment */
2359 init_params p = { rodata_section };
2360 unsigned long offset;
2361 size = type_size(&vtop->type, &align);
2362 if (NODATA_WANTED)
2363 size = 0, align = 1;
2364 offset = section_add(p.sec, size, align);
2365 vpush_ref(&vtop->type, p.sec, offset, size);
2366 vswap();
2367 init_putv(&p, &vtop->type, offset);
2368 vtop->r |= VT_LVAL;
2370 #ifdef CONFIG_TCC_BCHECK
2371 if (vtop->r & VT_MUSTBOUND)
2372 gbound();
2373 #endif
2375 bt = vtop->type.t & VT_BTYPE;
2377 #ifdef TCC_TARGET_RISCV64
2378 /* XXX mega hack */
2379 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
2380 rc = RC_INT;
2381 #endif
2382 rc2 = RC2_TYPE(bt, rc);
2384 /* need to reload if:
2385 - constant
2386 - lvalue (need to dereference pointer)
2387 - already a register, but not in the right class */
2388 r = vtop->r & VT_VALMASK;
2389 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
2390 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
2392 if (!r_ok || !r2_ok) {
2393 if (!r_ok)
2394 r = get_reg(rc);
2395 if (rc2) {
2396 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
2397 int original_type = vtop->type.t;
2399 /* two register type load :
2400 expand to two words temporarily */
2401 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2402 /* load constant */
2403 unsigned long long ll = vtop->c.i;
2404 vtop->c.i = ll; /* first word */
2405 load(r, vtop);
2406 vtop->r = r; /* save register value */
2407 vpushi(ll >> 32); /* second word */
2408 } else if (vtop->r & VT_LVAL) {
2409 /* We do not want to modifier the long long pointer here.
2410 So we save any other instances down the stack */
2411 save_reg_upstack(vtop->r, 1);
2412 /* load from memory */
2413 vtop->type.t = load_type;
2414 load(r, vtop);
2415 vdup();
2416 vtop[-1].r = r; /* save register value */
2417 /* increment pointer to get second word */
2418 vtop->type.t = VT_PTRDIFF_T;
2419 gaddrof();
2420 vpushs(PTR_SIZE);
2421 gen_op('+');
2422 vtop->r |= VT_LVAL;
2423 vtop->type.t = load_type;
2424 } else {
2425 /* move registers */
2426 if (!r_ok)
2427 load(r, vtop);
2428 if (r2_ok && vtop->r2 < VT_CONST)
2429 goto done;
2430 vdup();
2431 vtop[-1].r = r; /* save register value */
2432 vtop->r = vtop[-1].r2;
2434 /* Allocate second register. Here we rely on the fact that
2435 get_reg() tries first to free r2 of an SValue. */
2436 r2 = get_reg(rc2);
2437 load(r2, vtop);
2438 vpop();
2439 /* write second register */
2440 vtop->r2 = r2;
2441 done:
2442 vtop->type.t = original_type;
2443 } else {
2444 if (vtop->r == VT_CMP)
2445 vset_VT_JMP();
2446 /* one register type load */
2447 load(r, vtop);
2450 vtop->r = r;
2451 #ifdef TCC_TARGET_C67
2452 /* uses register pairs for doubles */
2453 if (bt == VT_DOUBLE)
2454 vtop->r2 = r+1;
2455 #endif
2457 return r;
2460 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2461 ST_FUNC void gv2(int rc1, int rc2)
2463 /* generate more generic register first. But VT_JMP or VT_CMP
2464 values must be generated first in all cases to avoid possible
2465 reload errors */
2466 if (vtop->r != VT_CMP && rc1 <= rc2) {
2467 vswap();
2468 gv(rc1);
2469 vswap();
2470 gv(rc2);
2471 /* test if reload is needed for first register */
2472 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
2473 vswap();
2474 gv(rc1);
2475 vswap();
2477 } else {
2478 gv(rc2);
2479 vswap();
2480 gv(rc1);
2481 vswap();
2482 /* test if reload is needed for first register */
2483 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
2484 gv(rc2);
2489 #if PTR_SIZE == 4
2490 /* expand 64bit on stack in two ints */
2491 ST_FUNC void lexpand(void)
2493 int u, v;
2494 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
2495 v = vtop->r & (VT_VALMASK | VT_LVAL);
2496 if (v == VT_CONST) {
2497 vdup();
2498 vtop[0].c.i >>= 32;
2499 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
2500 vdup();
2501 vtop[0].c.i += 4;
2502 } else {
2503 gv(RC_INT);
2504 vdup();
2505 vtop[0].r = vtop[-1].r2;
2506 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2508 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2510 #endif
2512 #if PTR_SIZE == 4
2513 /* build a long long from two ints */
2514 static void lbuild(int t)
2516 gv2(RC_INT, RC_INT);
2517 vtop[-1].r2 = vtop[0].r;
2518 vtop[-1].type.t = t;
2519 vpop();
2521 #endif
2523 /* convert stack entry to register and duplicate its value in another
2524 register */
2525 static void gv_dup(void)
2527 int t, rc, r;
2529 t = vtop->type.t;
2530 #if PTR_SIZE == 4
2531 if ((t & VT_BTYPE) == VT_LLONG) {
2532 if (t & VT_BITFIELD) {
2533 gv(RC_INT);
2534 t = vtop->type.t;
2536 lexpand();
2537 gv_dup();
2538 vswap();
2539 vrotb(3);
2540 gv_dup();
2541 vrotb(4);
2542 /* stack: H L L1 H1 */
2543 lbuild(t);
2544 vrotb(3);
2545 vrotb(3);
2546 vswap();
2547 lbuild(t);
2548 vswap();
2549 return;
2551 #endif
2552 /* duplicate value */
2553 rc = RC_TYPE(t);
2554 gv(rc);
2555 r = get_reg(rc);
2556 vdup();
2557 load(r, vtop);
2558 vtop->r = r;
2561 #if PTR_SIZE == 4
2562 /* generate CPU independent (unsigned) long long operations */
2563 static void gen_opl(int op)
2565 int t, a, b, op1, c, i;
2566 int func;
2567 unsigned short reg_iret = REG_IRET;
2568 unsigned short reg_lret = REG_IRE2;
2569 SValue tmp;
2571 switch(op) {
2572 case '/':
2573 case TOK_PDIV:
2574 func = TOK___divdi3;
2575 goto gen_func;
2576 case TOK_UDIV:
2577 func = TOK___udivdi3;
2578 goto gen_func;
2579 case '%':
2580 func = TOK___moddi3;
2581 goto gen_mod_func;
2582 case TOK_UMOD:
2583 func = TOK___umoddi3;
2584 gen_mod_func:
2585 #ifdef TCC_ARM_EABI
2586 reg_iret = TREG_R2;
2587 reg_lret = TREG_R3;
2588 #endif
2589 gen_func:
2590 /* call generic long long function */
2591 vpush_helper_func(func);
2592 vrott(3);
2593 gfunc_call(2);
2594 vpushi(0);
2595 vtop->r = reg_iret;
2596 vtop->r2 = reg_lret;
2597 break;
2598 case '^':
2599 case '&':
2600 case '|':
2601 case '*':
2602 case '+':
2603 case '-':
2604 //pv("gen_opl A",0,2);
2605 t = vtop->type.t;
2606 vswap();
2607 lexpand();
2608 vrotb(3);
2609 lexpand();
2610 /* stack: L1 H1 L2 H2 */
2611 tmp = vtop[0];
2612 vtop[0] = vtop[-3];
2613 vtop[-3] = tmp;
2614 tmp = vtop[-2];
2615 vtop[-2] = vtop[-3];
2616 vtop[-3] = tmp;
2617 vswap();
2618 /* stack: H1 H2 L1 L2 */
2619 //pv("gen_opl B",0,4);
2620 if (op == '*') {
2621 vpushv(vtop - 1);
2622 vpushv(vtop - 1);
2623 gen_op(TOK_UMULL);
2624 lexpand();
2625 /* stack: H1 H2 L1 L2 ML MH */
2626 for(i=0;i<4;i++)
2627 vrotb(6);
2628 /* stack: ML MH H1 H2 L1 L2 */
2629 tmp = vtop[0];
2630 vtop[0] = vtop[-2];
2631 vtop[-2] = tmp;
2632 /* stack: ML MH H1 L2 H2 L1 */
2633 gen_op('*');
2634 vrotb(3);
2635 vrotb(3);
2636 gen_op('*');
2637 /* stack: ML MH M1 M2 */
2638 gen_op('+');
2639 gen_op('+');
2640 } else if (op == '+' || op == '-') {
2641 /* XXX: add non carry method too (for MIPS or alpha) */
2642 if (op == '+')
2643 op1 = TOK_ADDC1;
2644 else
2645 op1 = TOK_SUBC1;
2646 gen_op(op1);
2647 /* stack: H1 H2 (L1 op L2) */
2648 vrotb(3);
2649 vrotb(3);
2650 gen_op(op1 + 1); /* TOK_xxxC2 */
2651 } else {
2652 gen_op(op);
2653 /* stack: H1 H2 (L1 op L2) */
2654 vrotb(3);
2655 vrotb(3);
2656 /* stack: (L1 op L2) H1 H2 */
2657 gen_op(op);
2658 /* stack: (L1 op L2) (H1 op H2) */
2660 /* stack: L H */
2661 lbuild(t);
2662 break;
2663 case TOK_SAR:
2664 case TOK_SHR:
2665 case TOK_SHL:
2666 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2667 t = vtop[-1].type.t;
2668 vswap();
2669 lexpand();
2670 vrotb(3);
2671 /* stack: L H shift */
2672 c = (int)vtop->c.i;
2673 /* constant: simpler */
2674 /* NOTE: all comments are for SHL. the other cases are
2675 done by swapping words */
2676 vpop();
2677 if (op != TOK_SHL)
2678 vswap();
2679 if (c >= 32) {
2680 /* stack: L H */
2681 vpop();
2682 if (c > 32) {
2683 vpushi(c - 32);
2684 gen_op(op);
2686 if (op != TOK_SAR) {
2687 vpushi(0);
2688 } else {
2689 gv_dup();
2690 vpushi(31);
2691 gen_op(TOK_SAR);
2693 vswap();
2694 } else {
2695 vswap();
2696 gv_dup();
2697 /* stack: H L L */
2698 vpushi(c);
2699 gen_op(op);
2700 vswap();
2701 vpushi(32 - c);
2702 if (op == TOK_SHL)
2703 gen_op(TOK_SHR);
2704 else
2705 gen_op(TOK_SHL);
2706 vrotb(3);
2707 /* stack: L L H */
2708 vpushi(c);
2709 if (op == TOK_SHL)
2710 gen_op(TOK_SHL);
2711 else
2712 gen_op(TOK_SHR);
2713 gen_op('|');
2715 if (op != TOK_SHL)
2716 vswap();
2717 lbuild(t);
2718 } else {
2719 /* XXX: should provide a faster fallback on x86 ? */
2720 switch(op) {
2721 case TOK_SAR:
2722 func = TOK___ashrdi3;
2723 goto gen_func;
2724 case TOK_SHR:
2725 func = TOK___lshrdi3;
2726 goto gen_func;
2727 case TOK_SHL:
2728 func = TOK___ashldi3;
2729 goto gen_func;
2732 break;
2733 default:
2734 /* compare operations */
2735 t = vtop->type.t;
2736 vswap();
2737 lexpand();
2738 vrotb(3);
2739 lexpand();
2740 /* stack: L1 H1 L2 H2 */
2741 tmp = vtop[-1];
2742 vtop[-1] = vtop[-2];
2743 vtop[-2] = tmp;
2744 /* stack: L1 L2 H1 H2 */
2745 save_regs(4);
2746 /* compare high */
2747 op1 = op;
2748 /* when values are equal, we need to compare low words. since
2749 the jump is inverted, we invert the test too. */
2750 if (op1 == TOK_LT)
2751 op1 = TOK_LE;
2752 else if (op1 == TOK_GT)
2753 op1 = TOK_GE;
2754 else if (op1 == TOK_ULT)
2755 op1 = TOK_ULE;
2756 else if (op1 == TOK_UGT)
2757 op1 = TOK_UGE;
2758 a = 0;
2759 b = 0;
2760 gen_op(op1);
2761 if (op == TOK_NE) {
2762 b = gvtst(0, 0);
2763 } else {
2764 a = gvtst(1, 0);
2765 if (op != TOK_EQ) {
2766 /* generate non equal test */
2767 vpushi(0);
2768 vset_VT_CMP(TOK_NE);
2769 b = gvtst(0, 0);
2772 /* compare low. Always unsigned */
2773 op1 = op;
2774 if (op1 == TOK_LT)
2775 op1 = TOK_ULT;
2776 else if (op1 == TOK_LE)
2777 op1 = TOK_ULE;
2778 else if (op1 == TOK_GT)
2779 op1 = TOK_UGT;
2780 else if (op1 == TOK_GE)
2781 op1 = TOK_UGE;
2782 gen_op(op1);
2783 #if 0//def TCC_TARGET_I386
2784 if (op == TOK_NE) { gsym(b); break; }
2785 if (op == TOK_EQ) { gsym(a); break; }
2786 #endif
2787 gvtst_set(1, a);
2788 gvtst_set(0, b);
2789 break;
2792 #endif
2794 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2796 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2797 return (a ^ b) >> 63 ? -x : x;
2800 static int gen_opic_lt(uint64_t a, uint64_t b)
2802 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2805 /* handle integer constant optimizations and various machine
2806 independent opt */
2807 static void gen_opic(int op)
2809 SValue *v1 = vtop - 1;
2810 SValue *v2 = vtop;
2811 int t1 = v1->type.t & VT_BTYPE;
2812 int t2 = v2->type.t & VT_BTYPE;
2813 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2814 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2815 uint64_t l1 = c1 ? v1->c.i : 0;
2816 uint64_t l2 = c2 ? v2->c.i : 0;
2817 int shm = (t1 == VT_LLONG) ? 63 : 31;
2819 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2820 l1 = ((uint32_t)l1 |
2821 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2822 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2823 l2 = ((uint32_t)l2 |
2824 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2826 if (c1 && c2) {
2827 switch(op) {
2828 case '+': l1 += l2; break;
2829 case '-': l1 -= l2; break;
2830 case '&': l1 &= l2; break;
2831 case '^': l1 ^= l2; break;
2832 case '|': l1 |= l2; break;
2833 case '*': l1 *= l2; break;
2835 case TOK_PDIV:
2836 case '/':
2837 case '%':
2838 case TOK_UDIV:
2839 case TOK_UMOD:
2840 /* if division by zero, generate explicit division */
2841 if (l2 == 0) {
2842 if (const_wanted && !(nocode_wanted & unevalmask))
2843 tcc_error("division by zero in constant");
2844 goto general_case;
2846 switch(op) {
2847 default: l1 = gen_opic_sdiv(l1, l2); break;
2848 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2849 case TOK_UDIV: l1 = l1 / l2; break;
2850 case TOK_UMOD: l1 = l1 % l2; break;
2852 break;
2853 case TOK_SHL: l1 <<= (l2 & shm); break;
2854 case TOK_SHR: l1 >>= (l2 & shm); break;
2855 case TOK_SAR:
2856 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2857 break;
2858 /* tests */
2859 case TOK_ULT: l1 = l1 < l2; break;
2860 case TOK_UGE: l1 = l1 >= l2; break;
2861 case TOK_EQ: l1 = l1 == l2; break;
2862 case TOK_NE: l1 = l1 != l2; break;
2863 case TOK_ULE: l1 = l1 <= l2; break;
2864 case TOK_UGT: l1 = l1 > l2; break;
2865 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2866 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2867 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2868 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2869 /* logical */
2870 case TOK_LAND: l1 = l1 && l2; break;
2871 case TOK_LOR: l1 = l1 || l2; break;
2872 default:
2873 goto general_case;
2875 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2876 l1 = ((uint32_t)l1 |
2877 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2878 v1->c.i = l1;
2879 vtop--;
2880 } else {
2881 /* if commutative ops, put c2 as constant */
2882 if (c1 && (op == '+' || op == '&' || op == '^' ||
2883 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2884 vswap();
2885 c2 = c1; //c = c1, c1 = c2, c2 = c;
2886 l2 = l1; //l = l1, l1 = l2, l2 = l;
2888 if (!const_wanted &&
2889 c1 && ((l1 == 0 &&
2890 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2891 (l1 == -1 && op == TOK_SAR))) {
2892 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2893 vtop--;
2894 } else if (!const_wanted &&
2895 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2896 (op == '|' &&
2897 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2898 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2899 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2900 if (l2 == 1)
2901 vtop->c.i = 0;
2902 vswap();
2903 vtop--;
2904 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2905 op == TOK_PDIV) &&
2906 l2 == 1) ||
2907 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2908 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2909 l2 == 0) ||
2910 (op == '&' &&
2911 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2912 /* filter out NOP operations like x*1, x-0, x&-1... */
2913 vtop--;
2914 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2915 /* try to use shifts instead of muls or divs */
2916 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2917 int n = -1;
2918 while (l2) {
2919 l2 >>= 1;
2920 n++;
2922 vtop->c.i = n;
2923 if (op == '*')
2924 op = TOK_SHL;
2925 else if (op == TOK_PDIV)
2926 op = TOK_SAR;
2927 else
2928 op = TOK_SHR;
2930 goto general_case;
2931 } else if (c2 && (op == '+' || op == '-') &&
2932 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2933 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2934 /* symbol + constant case */
2935 if (op == '-')
2936 l2 = -l2;
2937 l2 += vtop[-1].c.i;
2938 /* The backends can't always deal with addends to symbols
2939 larger than +-1<<31. Don't construct such. */
2940 if ((int)l2 != l2)
2941 goto general_case;
2942 vtop--;
2943 vtop->c.i = l2;
2944 } else {
2945 general_case:
2946 /* call low level op generator */
2947 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2948 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2949 gen_opl(op);
2950 else
2951 gen_opi(op);
2956 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2957 # define gen_negf gen_opf
2958 #elif defined TCC_TARGET_ARM
2959 void gen_negf(int op)
2961 /* arm will detect 0-x and replace by vneg */
2962 vpushi(0), vswap(), gen_op('-');
2964 #else
2965 /* XXX: implement in gen_opf() for other backends too */
2966 void gen_negf(int op)
2968 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2969 subtract(-0, x), but with them it's really a sign flip
2970 operation. We implement this with bit manipulation and have
2971 to do some type reinterpretation for this, which TCC can do
2972 only via memory. */
2974 int align, size, bt;
2976 size = type_size(&vtop->type, &align);
2977 bt = vtop->type.t & VT_BTYPE;
2978 save_reg(gv(RC_TYPE(bt)));
2979 vdup();
2980 incr_bf_adr(size - 1);
2981 vdup();
2982 vpushi(0x80); /* flip sign */
2983 gen_op('^');
2984 vstore();
2985 vpop();
2987 #endif
2989 /* generate a floating point operation with constant propagation */
2990 static void gen_opif(int op)
2992 int c1, c2;
2993 SValue *v1, *v2;
2994 #if defined _MSC_VER && defined __x86_64__
2995 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2996 volatile
2997 #endif
2998 long double f1, f2;
3000 v1 = vtop - 1;
3001 v2 = vtop;
3002 if (op == TOK_NEG)
3003 v1 = v2;
3005 /* currently, we cannot do computations with forward symbols */
3006 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3007 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3008 if (c1 && c2) {
3009 if (v1->type.t == VT_FLOAT) {
3010 f1 = v1->c.f;
3011 f2 = v2->c.f;
3012 } else if (v1->type.t == VT_DOUBLE) {
3013 f1 = v1->c.d;
3014 f2 = v2->c.d;
3015 } else {
3016 f1 = v1->c.ld;
3017 f2 = v2->c.ld;
3019 /* NOTE: we only do constant propagation if finite number (not
3020 NaN or infinity) (ANSI spec) */
3021 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
3022 goto general_case;
3023 switch(op) {
3024 case '+': f1 += f2; break;
3025 case '-': f1 -= f2; break;
3026 case '*': f1 *= f2; break;
3027 case '/':
3028 if (f2 == 0.0) {
3029 union { float f; unsigned u; } x1, x2, y;
3030 /* If not in initializer we need to potentially generate
3031 FP exceptions at runtime, otherwise we want to fold. */
3032 if (!const_wanted)
3033 goto general_case;
3034 /* the run-time result of 0.0/0.0 on x87, also of other compilers
3035 when used to compile the f1 /= f2 below, would be -nan */
3036 x1.f = f1, x2.f = f2;
3037 if (f1 == 0.0)
3038 y.u = 0x7fc00000; /* nan */
3039 else
3040 y.u = 0x7f800000; /* infinity */
3041 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
3042 f1 = y.f;
3043 break;
3045 f1 /= f2;
3046 break;
3047 case TOK_NEG:
3048 f1 = -f1;
3049 goto unary_result;
3050 /* XXX: also handles tests ? */
3051 default:
3052 goto general_case;
3054 vtop--;
3055 unary_result:
3056 /* XXX: overflow test ? */
3057 if (v1->type.t == VT_FLOAT) {
3058 v1->c.f = f1;
3059 } else if (v1->type.t == VT_DOUBLE) {
3060 v1->c.d = f1;
3061 } else {
3062 v1->c.ld = f1;
3064 } else {
3065 general_case:
3066 if (op == TOK_NEG) {
3067 gen_negf(op);
3068 } else {
3069 gen_opf(op);
3074 /* print a type. If 'varstr' is not NULL, then the variable is also
3075 printed in the type */
3076 /* XXX: union */
3077 /* XXX: add array and function pointers */
3078 static void type_to_str(char *buf, int buf_size,
3079 CType *type, const char *varstr)
3081 int bt, v, t;
3082 Sym *s, *sa;
3083 char buf1[256];
3084 const char *tstr;
3086 t = type->t;
3087 bt = t & VT_BTYPE;
3088 buf[0] = '\0';
3090 if (t & VT_EXTERN)
3091 pstrcat(buf, buf_size, "extern ");
3092 if (t & VT_STATIC)
3093 pstrcat(buf, buf_size, "static ");
3094 if (t & VT_TYPEDEF)
3095 pstrcat(buf, buf_size, "typedef ");
3096 if (t & VT_INLINE)
3097 pstrcat(buf, buf_size, "inline ");
3098 if (bt != VT_PTR) {
3099 if (t & VT_VOLATILE)
3100 pstrcat(buf, buf_size, "volatile ");
3101 if (t & VT_CONSTANT)
3102 pstrcat(buf, buf_size, "const ");
3104 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3105 || ((t & VT_UNSIGNED)
3106 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3107 && !IS_ENUM(t)
3109 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3111 buf_size -= strlen(buf);
3112 buf += strlen(buf);
3114 switch(bt) {
3115 case VT_VOID:
3116 tstr = "void";
3117 goto add_tstr;
3118 case VT_BOOL:
3119 tstr = "_Bool";
3120 goto add_tstr;
3121 case VT_BYTE:
3122 tstr = "char";
3123 goto add_tstr;
3124 case VT_SHORT:
3125 tstr = "short";
3126 goto add_tstr;
3127 case VT_INT:
3128 tstr = "int";
3129 goto maybe_long;
3130 case VT_LLONG:
3131 tstr = "long long";
3132 maybe_long:
3133 if (t & VT_LONG)
3134 tstr = "long";
3135 if (!IS_ENUM(t))
3136 goto add_tstr;
3137 tstr = "enum ";
3138 goto tstruct;
3139 case VT_FLOAT:
3140 tstr = "float";
3141 goto add_tstr;
3142 case VT_DOUBLE:
3143 tstr = "double";
3144 if (!(t & VT_LONG))
3145 goto add_tstr;
3146 case VT_LDOUBLE:
3147 tstr = "long double";
3148 add_tstr:
3149 pstrcat(buf, buf_size, tstr);
3150 break;
3151 case VT_STRUCT:
3152 tstr = "struct ";
3153 if (IS_UNION(t))
3154 tstr = "union ";
3155 tstruct:
3156 pstrcat(buf, buf_size, tstr);
3157 v = type->ref->v & ~SYM_STRUCT;
3158 if (v >= SYM_FIRST_ANOM)
3159 pstrcat(buf, buf_size, "<anonymous>");
3160 else
3161 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3162 break;
3163 case VT_FUNC:
3164 s = type->ref;
3165 buf1[0]=0;
3166 if (varstr && '*' == *varstr) {
3167 pstrcat(buf1, sizeof(buf1), "(");
3168 pstrcat(buf1, sizeof(buf1), varstr);
3169 pstrcat(buf1, sizeof(buf1), ")");
3171 pstrcat(buf1, buf_size, "(");
3172 sa = s->next;
3173 while (sa != NULL) {
3174 char buf2[256];
3175 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3176 pstrcat(buf1, sizeof(buf1), buf2);
3177 sa = sa->next;
3178 if (sa)
3179 pstrcat(buf1, sizeof(buf1), ", ");
3181 if (s->f.func_type == FUNC_ELLIPSIS)
3182 pstrcat(buf1, sizeof(buf1), ", ...");
3183 pstrcat(buf1, sizeof(buf1), ")");
3184 type_to_str(buf, buf_size, &s->type, buf1);
3185 goto no_var;
3186 case VT_PTR:
3187 s = type->ref;
3188 if (t & VT_ARRAY) {
3189 if (varstr && '*' == *varstr)
3190 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3191 else
3192 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3193 type_to_str(buf, buf_size, &s->type, buf1);
3194 goto no_var;
3196 pstrcpy(buf1, sizeof(buf1), "*");
3197 if (t & VT_CONSTANT)
3198 pstrcat(buf1, buf_size, "const ");
3199 if (t & VT_VOLATILE)
3200 pstrcat(buf1, buf_size, "volatile ");
3201 if (varstr)
3202 pstrcat(buf1, sizeof(buf1), varstr);
3203 type_to_str(buf, buf_size, &s->type, buf1);
3204 goto no_var;
3206 if (varstr) {
3207 pstrcat(buf, buf_size, " ");
3208 pstrcat(buf, buf_size, varstr);
3210 no_var: ;
3213 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
3215 char buf1[256], buf2[256];
3216 type_to_str(buf1, sizeof(buf1), st, NULL);
3217 type_to_str(buf2, sizeof(buf2), dt, NULL);
3218 tcc_error(fmt, buf1, buf2);
3221 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
3223 char buf1[256], buf2[256];
3224 type_to_str(buf1, sizeof(buf1), st, NULL);
3225 type_to_str(buf2, sizeof(buf2), dt, NULL);
3226 tcc_warning(fmt, buf1, buf2);
3229 static int pointed_size(CType *type)
3231 int align;
3232 return type_size(pointed_type(type), &align);
3235 static void vla_runtime_pointed_size(CType *type)
3237 int align;
3238 vla_runtime_type_size(pointed_type(type), &align);
3241 static inline int is_null_pointer(SValue *p)
3243 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
3244 return 0;
3245 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
3246 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
3247 ((p->type.t & VT_BTYPE) == VT_PTR &&
3248 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
3249 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
3250 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
3254 /* compare function types. OLD functions match any new functions */
3255 static int is_compatible_func(CType *type1, CType *type2)
3257 Sym *s1, *s2;
3259 s1 = type1->ref;
3260 s2 = type2->ref;
3261 if (s1->f.func_call != s2->f.func_call)
3262 return 0;
3263 if (s1->f.func_type != s2->f.func_type
3264 && s1->f.func_type != FUNC_OLD
3265 && s2->f.func_type != FUNC_OLD)
3266 return 0;
3267 for (;;) {
3268 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3269 return 0;
3270 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
3271 return 1;
3272 s1 = s1->next;
3273 s2 = s2->next;
3274 if (!s1)
3275 return !s2;
3276 if (!s2)
3277 return 0;
3281 /* return true if type1 and type2 are the same. If unqualified is
3282 true, qualifiers on the types are ignored.
3284 static int compare_types(CType *type1, CType *type2, int unqualified)
3286 int bt1, t1, t2;
3288 t1 = type1->t & VT_TYPE;
3289 t2 = type2->t & VT_TYPE;
3290 if (unqualified) {
3291 /* strip qualifiers before comparing */
3292 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3293 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3296 /* Default Vs explicit signedness only matters for char */
3297 if ((t1 & VT_BTYPE) != VT_BYTE) {
3298 t1 &= ~VT_DEFSIGN;
3299 t2 &= ~VT_DEFSIGN;
3301 /* XXX: bitfields ? */
3302 if (t1 != t2)
3303 return 0;
3305 if ((t1 & VT_ARRAY)
3306 && !(type1->ref->c < 0
3307 || type2->ref->c < 0
3308 || type1->ref->c == type2->ref->c))
3309 return 0;
3311 /* test more complicated cases */
3312 bt1 = t1 & VT_BTYPE;
3313 if (bt1 == VT_PTR) {
3314 type1 = pointed_type(type1);
3315 type2 = pointed_type(type2);
3316 return is_compatible_types(type1, type2);
3317 } else if (bt1 == VT_STRUCT) {
3318 return (type1->ref == type2->ref);
3319 } else if (bt1 == VT_FUNC) {
3320 return is_compatible_func(type1, type2);
3321 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
3322 /* If both are enums then they must be the same, if only one is then
3323 t1 and t2 must be equal, which was checked above already. */
3324 return type1->ref == type2->ref;
3325 } else {
3326 return 1;
3330 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3331 type is stored in DEST if non-null (except for pointer plus/minus) . */
3332 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
3334 CType *type1 = &op1->type, *type2 = &op2->type, type;
3335 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
3336 int ret = 1;
3338 type.t = VT_VOID;
3339 type.ref = NULL;
3341 if (bt1 == VT_VOID || bt2 == VT_VOID) {
3342 ret = op == '?' ? 1 : 0;
3343 /* NOTE: as an extension, we accept void on only one side */
3344 type.t = VT_VOID;
3345 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3346 if (op == '+') ; /* Handled in caller */
3347 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3348 /* If one is a null ptr constant the result type is the other. */
3349 else if (is_null_pointer (op2)) type = *type1;
3350 else if (is_null_pointer (op1)) type = *type2;
3351 else if (bt1 != bt2) {
3352 /* accept comparison or cond-expr between pointer and integer
3353 with a warning */
3354 if ((op == '?' || TOK_ISCOND(op))
3355 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
3356 tcc_warning("pointer/integer mismatch in %s",
3357 op == '?' ? "conditional expression" : "comparison");
3358 else if (op != '-' || !is_integer_btype(bt2))
3359 ret = 0;
3360 type = *(bt1 == VT_PTR ? type1 : type2);
3361 } else {
3362 CType *pt1 = pointed_type(type1);
3363 CType *pt2 = pointed_type(type2);
3364 int pbt1 = pt1->t & VT_BTYPE;
3365 int pbt2 = pt2->t & VT_BTYPE;
3366 int newquals, copied = 0;
3367 if (pbt1 != VT_VOID && pbt2 != VT_VOID
3368 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
3369 if (op != '?' && !TOK_ISCOND(op))
3370 ret = 0;
3371 else
3372 type_incompatibility_warning(type1, type2,
3373 op == '?'
3374 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3375 : "pointer type mismatch in comparison('%s' and '%s')");
3377 if (op == '?') {
3378 /* pointers to void get preferred, otherwise the
3379 pointed to types minus qualifs should be compatible */
3380 type = *((pbt1 == VT_VOID) ? type1 : type2);
3381 /* combine qualifs */
3382 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
3383 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
3384 & newquals)
3386 /* copy the pointer target symbol */
3387 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3388 0, type.ref->c);
3389 copied = 1;
3390 pointed_type(&type)->t |= newquals;
3392 /* pointers to incomplete arrays get converted to
3393 pointers to completed ones if possible */
3394 if (pt1->t & VT_ARRAY
3395 && pt2->t & VT_ARRAY
3396 && pointed_type(&type)->ref->c < 0
3397 && (pt1->ref->c > 0 || pt2->ref->c > 0))
3399 if (!copied)
3400 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3401 0, type.ref->c);
3402 pointed_type(&type)->ref =
3403 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
3404 0, pointed_type(&type)->ref->c);
3405 pointed_type(&type)->ref->c =
3406 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
3410 if (TOK_ISCOND(op))
3411 type.t = VT_SIZE_T;
3412 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
3413 if (op != '?' || !compare_types(type1, type2, 1))
3414 ret = 0;
3415 type = *type1;
3416 } else if (is_float(bt1) || is_float(bt2)) {
3417 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
3418 type.t = VT_LDOUBLE;
3419 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
3420 type.t = VT_DOUBLE;
3421 } else {
3422 type.t = VT_FLOAT;
3424 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
3425 /* cast to biggest op */
3426 type.t = VT_LLONG | VT_LONG;
3427 if (bt1 == VT_LLONG)
3428 type.t &= t1;
3429 if (bt2 == VT_LLONG)
3430 type.t &= t2;
3431 /* convert to unsigned if it does not fit in a long long */
3432 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
3433 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
3434 type.t |= VT_UNSIGNED;
3435 } else {
3436 /* integer operations */
3437 type.t = VT_INT | (VT_LONG & (t1 | t2));
3438 /* convert to unsigned if it does not fit in an integer */
3439 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
3440 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
3441 type.t |= VT_UNSIGNED;
3443 if (dest)
3444 *dest = type;
3445 return ret;
3448 /* generic gen_op: handles types problems */
3449 ST_FUNC void gen_op(int op)
3451 int u, t1, t2, bt1, bt2, t;
3452 CType type1, combtype;
3454 redo:
3455 t1 = vtop[-1].type.t;
3456 t2 = vtop[0].type.t;
3457 bt1 = t1 & VT_BTYPE;
3458 bt2 = t2 & VT_BTYPE;
3460 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
3461 if (bt2 == VT_FUNC) {
3462 mk_pointer(&vtop->type);
3463 gaddrof();
3465 if (bt1 == VT_FUNC) {
3466 vswap();
3467 mk_pointer(&vtop->type);
3468 gaddrof();
3469 vswap();
3471 goto redo;
3472 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
3473 tcc_error_noabort("invalid operand types for binary operation");
3474 vpop();
3475 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3476 /* at least one operand is a pointer */
3477 /* relational op: must be both pointers */
3478 if (TOK_ISCOND(op))
3479 goto std_op;
3480 /* if both pointers, then it must be the '-' op */
3481 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3482 if (op != '-')
3483 tcc_error("cannot use pointers here");
3484 if (vtop[-1].type.t & VT_VLA) {
3485 vla_runtime_pointed_size(&vtop[-1].type);
3486 } else {
3487 vpushi(pointed_size(&vtop[-1].type));
3489 vrott(3);
3490 gen_opic(op);
3491 vtop->type.t = VT_PTRDIFF_T;
3492 vswap();
3493 gen_op(TOK_PDIV);
3494 } else {
3495 /* exactly one pointer : must be '+' or '-'. */
3496 if (op != '-' && op != '+')
3497 tcc_error("cannot use pointers here");
3498 /* Put pointer as first operand */
3499 if (bt2 == VT_PTR) {
3500 vswap();
3501 t = t1, t1 = t2, t2 = t;
3503 #if PTR_SIZE == 4
3504 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
3505 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3506 gen_cast_s(VT_INT);
3507 #endif
3508 type1 = vtop[-1].type;
3509 if (vtop[-1].type.t & VT_VLA)
3510 vla_runtime_pointed_size(&vtop[-1].type);
3511 else {
3512 u = pointed_size(&vtop[-1].type);
3513 if (u < 0)
3514 tcc_error("unknown array element size");
3515 #if PTR_SIZE == 8
3516 vpushll(u);
3517 #else
3518 /* XXX: cast to int ? (long long case) */
3519 vpushi(u);
3520 #endif
3522 gen_op('*');
3523 #ifdef CONFIG_TCC_BCHECK
3524 if (tcc_state->do_bounds_check && !const_wanted) {
3525 /* if bounded pointers, we generate a special code to
3526 test bounds */
3527 if (op == '-') {
3528 vpushi(0);
3529 vswap();
3530 gen_op('-');
3532 gen_bounded_ptr_add();
3533 } else
3534 #endif
3536 gen_opic(op);
3538 type1.t &= ~VT_ARRAY;
3539 /* put again type if gen_opic() swaped operands */
3540 vtop->type = type1;
3542 } else {
3543 /* floats can only be used for a few operations */
3544 if (is_float(combtype.t)
3545 && op != '+' && op != '-' && op != '*' && op != '/'
3546 && !TOK_ISCOND(op))
3547 tcc_error("invalid operands for binary operation");
3548 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3549 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3550 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3551 t |= VT_UNSIGNED;
3552 t |= (VT_LONG & t1);
3553 combtype.t = t;
3555 std_op:
3556 t = t2 = combtype.t;
3557 /* XXX: currently, some unsigned operations are explicit, so
3558 we modify them here */
3559 if (t & VT_UNSIGNED) {
3560 if (op == TOK_SAR)
3561 op = TOK_SHR;
3562 else if (op == '/')
3563 op = TOK_UDIV;
3564 else if (op == '%')
3565 op = TOK_UMOD;
3566 else if (op == TOK_LT)
3567 op = TOK_ULT;
3568 else if (op == TOK_GT)
3569 op = TOK_UGT;
3570 else if (op == TOK_LE)
3571 op = TOK_ULE;
3572 else if (op == TOK_GE)
3573 op = TOK_UGE;
3575 vswap();
3576 gen_cast_s(t);
3577 vswap();
3578 /* special case for shifts and long long: we keep the shift as
3579 an integer */
3580 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3581 t2 = VT_INT;
3582 gen_cast_s(t2);
3583 if (is_float(t))
3584 gen_opif(op);
3585 else
3586 gen_opic(op);
3587 if (TOK_ISCOND(op)) {
3588 /* relational op: the result is an int */
3589 vtop->type.t = VT_INT;
3590 } else {
3591 vtop->type.t = t;
3594 // Make sure that we have converted to an rvalue:
3595 if (vtop->r & VT_LVAL)
3596 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3599 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3600 #define gen_cvt_itof1 gen_cvt_itof
3601 #else
3602 /* generic itof for unsigned long long case */
3603 static void gen_cvt_itof1(int t)
3605 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3606 (VT_LLONG | VT_UNSIGNED)) {
3608 if (t == VT_FLOAT)
3609 vpush_helper_func(TOK___floatundisf);
3610 #if LDOUBLE_SIZE != 8
3611 else if (t == VT_LDOUBLE)
3612 vpush_helper_func(TOK___floatundixf);
3613 #endif
3614 else
3615 vpush_helper_func(TOK___floatundidf);
3616 vrott(2);
3617 gfunc_call(1);
3618 vpushi(0);
3619 PUT_R_RET(vtop, t);
3620 } else {
3621 gen_cvt_itof(t);
3624 #endif
3626 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3627 #define gen_cvt_ftoi1 gen_cvt_ftoi
3628 #else
3629 /* generic ftoi for unsigned long long case */
3630 static void gen_cvt_ftoi1(int t)
3632 int st;
3633 if (t == (VT_LLONG | VT_UNSIGNED)) {
3634 /* not handled natively */
3635 st = vtop->type.t & VT_BTYPE;
3636 if (st == VT_FLOAT)
3637 vpush_helper_func(TOK___fixunssfdi);
3638 #if LDOUBLE_SIZE != 8
3639 else if (st == VT_LDOUBLE)
3640 vpush_helper_func(TOK___fixunsxfdi);
3641 #endif
3642 else
3643 vpush_helper_func(TOK___fixunsdfdi);
3644 vrott(2);
3645 gfunc_call(1);
3646 vpushi(0);
3647 PUT_R_RET(vtop, t);
3648 } else {
3649 gen_cvt_ftoi(t);
3652 #endif
3654 /* special delayed cast for char/short */
3655 static void force_charshort_cast(void)
3657 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3658 int dbt = vtop->type.t;
3659 vtop->r &= ~VT_MUSTCAST;
3660 vtop->type.t = sbt;
3661 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3662 vtop->type.t = dbt;
3665 static void gen_cast_s(int t)
3667 CType type;
3668 type.t = t;
3669 type.ref = NULL;
3670 gen_cast(&type);
3673 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3674 static void gen_cast(CType *type)
3676 int sbt, dbt, sf, df, c;
3677 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3679 /* special delayed cast for char/short */
3680 if (vtop->r & VT_MUSTCAST)
3681 force_charshort_cast();
3683 /* bitfields first get cast to ints */
3684 if (vtop->type.t & VT_BITFIELD)
3685 gv(RC_INT);
3687 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3688 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3689 if (sbt == VT_FUNC)
3690 sbt = VT_PTR;
3692 again:
3693 if (sbt != dbt) {
3694 sf = is_float(sbt);
3695 df = is_float(dbt);
3696 dbt_bt = dbt & VT_BTYPE;
3697 sbt_bt = sbt & VT_BTYPE;
3698 if (dbt_bt == VT_VOID)
3699 goto done;
3700 if (sbt_bt == VT_VOID) {
3701 error:
3702 cast_error(&vtop->type, type);
3705 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3706 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3707 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3708 #endif
3709 if (c) {
3710 /* constant case: we can do it now */
3711 /* XXX: in ISOC, cannot do it if error in convert */
3712 if (sbt == VT_FLOAT)
3713 vtop->c.ld = vtop->c.f;
3714 else if (sbt == VT_DOUBLE)
3715 vtop->c.ld = vtop->c.d;
3717 if (df) {
3718 if (sbt_bt == VT_LLONG) {
3719 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3720 vtop->c.ld = vtop->c.i;
3721 else
3722 vtop->c.ld = -(long double)-vtop->c.i;
3723 } else if(!sf) {
3724 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3725 vtop->c.ld = (uint32_t)vtop->c.i;
3726 else
3727 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3730 if (dbt == VT_FLOAT)
3731 vtop->c.f = (float)vtop->c.ld;
3732 else if (dbt == VT_DOUBLE)
3733 vtop->c.d = (double)vtop->c.ld;
3734 } else if (sf && dbt == VT_BOOL) {
3735 vtop->c.i = (vtop->c.ld != 0);
3736 } else {
3737 if(sf)
3738 vtop->c.i = vtop->c.ld;
3739 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3741 else if (sbt & VT_UNSIGNED)
3742 vtop->c.i = (uint32_t)vtop->c.i;
3743 else
3744 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3746 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3748 else if (dbt == VT_BOOL)
3749 vtop->c.i = (vtop->c.i != 0);
3750 else {
3751 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3752 dbt_bt == VT_SHORT ? 0xffff :
3753 0xffffffff;
3754 vtop->c.i &= m;
3755 if (!(dbt & VT_UNSIGNED))
3756 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3759 goto done;
3761 } else if (dbt == VT_BOOL
3762 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3763 == (VT_CONST | VT_SYM)) {
3764 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3765 vtop->r = VT_CONST;
3766 vtop->c.i = 1;
3767 goto done;
3770 /* cannot generate code for global or static initializers */
3771 if (STATIC_DATA_WANTED)
3772 goto done;
3774 /* non constant case: generate code */
3775 if (dbt == VT_BOOL) {
3776 gen_test_zero(TOK_NE);
3777 goto done;
3780 if (sf || df) {
3781 if (sf && df) {
3782 /* convert from fp to fp */
3783 gen_cvt_ftof(dbt);
3784 } else if (df) {
3785 /* convert int to fp */
3786 gen_cvt_itof1(dbt);
3787 } else {
3788 /* convert fp to int */
3789 sbt = dbt;
3790 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3791 sbt = VT_INT;
3792 gen_cvt_ftoi1(sbt);
3793 goto again; /* may need char/short cast */
3795 goto done;
3798 ds = btype_size(dbt_bt);
3799 ss = btype_size(sbt_bt);
3800 if (ds == 0 || ss == 0)
3801 goto error;
3803 if (IS_ENUM(type->t) && type->ref->c < 0)
3804 tcc_error("cast to incomplete type");
3806 /* same size and no sign conversion needed */
3807 if (ds == ss && ds >= 4)
3808 goto done;
3809 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3810 tcc_warning("cast between pointer and integer of different size");
3811 if (sbt_bt == VT_PTR) {
3812 /* put integer type to allow logical operations below */
3813 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3817 /* processor allows { int a = 0, b = *(char*)&a; }
3818 That means that if we cast to less width, we can just
3819 change the type and read it still later. */
3820 #define ALLOW_SUBTYPE_ACCESS 1
3822 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3823 /* value still in memory */
3824 if (ds <= ss)
3825 goto done;
3826 /* ss <= 4 here */
3827 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3828 gv(RC_INT);
3829 goto done; /* no 64bit envolved */
3832 gv(RC_INT);
3834 trunc = 0;
3835 #if PTR_SIZE == 4
3836 if (ds == 8) {
3837 /* generate high word */
3838 if (sbt & VT_UNSIGNED) {
3839 vpushi(0);
3840 gv(RC_INT);
3841 } else {
3842 gv_dup();
3843 vpushi(31);
3844 gen_op(TOK_SAR);
3846 lbuild(dbt);
3847 } else if (ss == 8) {
3848 /* from long long: just take low order word */
3849 lexpand();
3850 vpop();
3852 ss = 4;
3854 #elif PTR_SIZE == 8
3855 if (ds == 8) {
3856 /* need to convert from 32bit to 64bit */
3857 if (sbt & VT_UNSIGNED) {
3858 #if defined(TCC_TARGET_RISCV64)
3859 /* RISC-V keeps 32bit vals in registers sign-extended.
3860 So here we need a zero-extension. */
3861 trunc = 32;
3862 #else
3863 goto done;
3864 #endif
3865 } else {
3866 gen_cvt_sxtw();
3867 goto done;
3869 ss = ds, ds = 4, dbt = sbt;
3870 } else if (ss == 8) {
3871 /* RISC-V keeps 32bit vals in registers sign-extended.
3872 So here we need a sign-extension for signed types and
3873 zero-extension. for unsigned types. */
3874 #if !defined(TCC_TARGET_RISCV64)
3875 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3876 #endif
3877 } else {
3878 ss = 4;
3880 #endif
3882 if (ds >= ss)
3883 goto done;
3884 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3885 if (ss == 4) {
3886 gen_cvt_csti(dbt);
3887 goto done;
3889 #endif
3890 bits = (ss - ds) * 8;
3891 /* for unsigned, gen_op will convert SAR to SHR */
3892 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3893 vpushi(bits);
3894 gen_op(TOK_SHL);
3895 vpushi(bits - trunc);
3896 gen_op(TOK_SAR);
3897 vpushi(trunc);
3898 gen_op(TOK_SHR);
3900 done:
3901 vtop->type = *type;
3902 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3905 /* return type size as known at compile time. Put alignment at 'a' */
3906 ST_FUNC int type_size(CType *type, int *a)
3908 Sym *s;
3909 int bt;
3911 bt = type->t & VT_BTYPE;
3912 if (bt == VT_STRUCT) {
3913 /* struct/union */
3914 s = type->ref;
3915 *a = s->r;
3916 return s->c;
3917 } else if (bt == VT_PTR) {
3918 if (type->t & VT_ARRAY) {
3919 int ts;
3921 s = type->ref;
3922 ts = type_size(&s->type, a);
3924 if (ts < 0 && s->c < 0)
3925 ts = -ts;
3927 return ts * s->c;
3928 } else {
3929 *a = PTR_SIZE;
3930 return PTR_SIZE;
3932 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3933 return -1; /* incomplete enum */
3934 } else if (bt == VT_LDOUBLE) {
3935 *a = LDOUBLE_ALIGN;
3936 return LDOUBLE_SIZE;
3937 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3938 #ifdef TCC_TARGET_I386
3939 #ifdef TCC_TARGET_PE
3940 *a = 8;
3941 #else
3942 *a = 4;
3943 #endif
3944 #elif defined(TCC_TARGET_ARM)
3945 #ifdef TCC_ARM_EABI
3946 *a = 8;
3947 #else
3948 *a = 4;
3949 #endif
3950 #else
3951 *a = 8;
3952 #endif
3953 return 8;
3954 } else if (bt == VT_INT || bt == VT_FLOAT) {
3955 *a = 4;
3956 return 4;
3957 } else if (bt == VT_SHORT) {
3958 *a = 2;
3959 return 2;
3960 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3961 *a = 8;
3962 return 16;
3963 } else {
3964 /* char, void, function, _Bool */
3965 *a = 1;
3966 return 1;
3970 /* push type size as known at runtime time on top of value stack. Put
3971 alignment at 'a' */
3972 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3974 if (type->t & VT_VLA) {
3975 type_size(&type->ref->type, a);
3976 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3977 } else {
3978 vpushi(type_size(type, a));
3982 /* return the pointed type of t */
3983 static inline CType *pointed_type(CType *type)
3985 return &type->ref->type;
3988 /* modify type so that its it is a pointer to type. */
3989 ST_FUNC void mk_pointer(CType *type)
3991 Sym *s;
3992 s = sym_push(SYM_FIELD, type, 0, -1);
3993 type->t = VT_PTR | (type->t & VT_STORAGE);
3994 type->ref = s;
3997 /* return true if type1 and type2 are exactly the same (including
3998 qualifiers).
4000 static int is_compatible_types(CType *type1, CType *type2)
4002 return compare_types(type1,type2,0);
4005 /* return true if type1 and type2 are the same (ignoring qualifiers).
4007 static int is_compatible_unqualified_types(CType *type1, CType *type2)
4009 return compare_types(type1,type2,1);
4012 static void cast_error(CType *st, CType *dt)
4014 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
4017 /* verify type compatibility to store vtop in 'dt' type */
4018 static void verify_assign_cast(CType *dt)
4020 CType *st, *type1, *type2;
4021 int dbt, sbt, qualwarn, lvl;
4023 st = &vtop->type; /* source type */
4024 dbt = dt->t & VT_BTYPE;
4025 sbt = st->t & VT_BTYPE;
4026 if (dt->t & VT_CONSTANT)
4027 tcc_warning("assignment of read-only location");
4028 switch(dbt) {
4029 case VT_VOID:
4030 if (sbt != dbt)
4031 tcc_error("assignment to void expression");
4032 break;
4033 case VT_PTR:
4034 /* special cases for pointers */
4035 /* '0' can also be a pointer */
4036 if (is_null_pointer(vtop))
4037 break;
4038 /* accept implicit pointer to integer cast with warning */
4039 if (is_integer_btype(sbt)) {
4040 tcc_warning("assignment makes pointer from integer without a cast");
4041 break;
4043 type1 = pointed_type(dt);
4044 if (sbt == VT_PTR)
4045 type2 = pointed_type(st);
4046 else if (sbt == VT_FUNC)
4047 type2 = st; /* a function is implicitly a function pointer */
4048 else
4049 goto error;
4050 if (is_compatible_types(type1, type2))
4051 break;
4052 for (qualwarn = lvl = 0;; ++lvl) {
4053 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
4054 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
4055 qualwarn = 1;
4056 dbt = type1->t & (VT_BTYPE|VT_LONG);
4057 sbt = type2->t & (VT_BTYPE|VT_LONG);
4058 if (dbt != VT_PTR || sbt != VT_PTR)
4059 break;
4060 type1 = pointed_type(type1);
4061 type2 = pointed_type(type2);
4063 if (!is_compatible_unqualified_types(type1, type2)) {
4064 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
4065 /* void * can match anything */
4066 } else if (dbt == sbt
4067 && is_integer_btype(sbt & VT_BTYPE)
4068 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
4069 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
4070 /* Like GCC don't warn by default for merely changes
4071 in pointer target signedness. Do warn for different
4072 base types, though, in particular for unsigned enums
4073 and signed int targets. */
4074 } else {
4075 tcc_warning("assignment from incompatible pointer type");
4076 break;
4079 if (qualwarn)
4080 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
4081 break;
4082 case VT_BYTE:
4083 case VT_SHORT:
4084 case VT_INT:
4085 case VT_LLONG:
4086 if (sbt == VT_PTR || sbt == VT_FUNC) {
4087 tcc_warning("assignment makes integer from pointer without a cast");
4088 } else if (sbt == VT_STRUCT) {
4089 goto case_VT_STRUCT;
4091 /* XXX: more tests */
4092 break;
4093 case VT_STRUCT:
4094 case_VT_STRUCT:
4095 if (!is_compatible_unqualified_types(dt, st)) {
4096 error:
4097 cast_error(st, dt);
4099 break;
4103 static void gen_assign_cast(CType *dt)
4105 verify_assign_cast(dt);
4106 gen_cast(dt);
4109 /* store vtop in lvalue pushed on stack */
4110 ST_FUNC void vstore(void)
4112 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
4114 ft = vtop[-1].type.t;
4115 sbt = vtop->type.t & VT_BTYPE;
4116 dbt = ft & VT_BTYPE;
4118 verify_assign_cast(&vtop[-1].type);
4120 if (sbt == VT_STRUCT) {
4121 /* if structure, only generate pointer */
4122 /* structure assignment : generate memcpy */
4123 /* XXX: optimize if small size */
4124 size = type_size(&vtop->type, &align);
4126 /* destination */
4127 vswap();
4128 #ifdef CONFIG_TCC_BCHECK
4129 if (vtop->r & VT_MUSTBOUND)
4130 gbound(); /* check would be wrong after gaddrof() */
4131 #endif
4132 vtop->type.t = VT_PTR;
4133 gaddrof();
4135 /* address of memcpy() */
4136 #ifdef TCC_ARM_EABI
4137 if(!(align & 7))
4138 vpush_helper_func(TOK_memmove8);
4139 else if(!(align & 3))
4140 vpush_helper_func(TOK_memmove4);
4141 else
4142 #endif
4143 /* Use memmove, rather than memcpy, as dest and src may be same: */
4144 vpush_helper_func(TOK_memmove);
4146 vswap();
4147 /* source */
4148 vpushv(vtop - 2);
4149 #ifdef CONFIG_TCC_BCHECK
4150 if (vtop->r & VT_MUSTBOUND)
4151 gbound();
4152 #endif
4153 vtop->type.t = VT_PTR;
4154 gaddrof();
4155 /* type size */
4156 vpushi(size);
4157 gfunc_call(3);
4158 /* leave source on stack */
4160 } else if (ft & VT_BITFIELD) {
4161 /* bitfield store handling */
4163 /* save lvalue as expression result (example: s.b = s.a = n;) */
4164 vdup(), vtop[-1] = vtop[-2];
4166 bit_pos = BIT_POS(ft);
4167 bit_size = BIT_SIZE(ft);
4168 /* remove bit field info to avoid loops */
4169 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
4171 if (dbt == VT_BOOL) {
4172 gen_cast(&vtop[-1].type);
4173 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
4175 r = adjust_bf(vtop - 1, bit_pos, bit_size);
4176 if (dbt != VT_BOOL) {
4177 gen_cast(&vtop[-1].type);
4178 dbt = vtop[-1].type.t & VT_BTYPE;
4180 if (r == VT_STRUCT) {
4181 store_packed_bf(bit_pos, bit_size);
4182 } else {
4183 unsigned long long mask = (1ULL << bit_size) - 1;
4184 if (dbt != VT_BOOL) {
4185 /* mask source */
4186 if (dbt == VT_LLONG)
4187 vpushll(mask);
4188 else
4189 vpushi((unsigned)mask);
4190 gen_op('&');
4192 /* shift source */
4193 vpushi(bit_pos);
4194 gen_op(TOK_SHL);
4195 vswap();
4196 /* duplicate destination */
4197 vdup();
4198 vrott(3);
4199 /* load destination, mask and or with source */
4200 if (dbt == VT_LLONG)
4201 vpushll(~(mask << bit_pos));
4202 else
4203 vpushi(~((unsigned)mask << bit_pos));
4204 gen_op('&');
4205 gen_op('|');
4206 /* store result */
4207 vstore();
4208 /* ... and discard */
4209 vpop();
4211 } else if (dbt == VT_VOID) {
4212 --vtop;
4213 } else {
4214 /* optimize char/short casts */
4215 delayed_cast = 0;
4216 if ((dbt == VT_BYTE || dbt == VT_SHORT)
4217 && is_integer_btype(sbt)
4219 if ((vtop->r & VT_MUSTCAST)
4220 && btype_size(dbt) > btype_size(sbt)
4222 force_charshort_cast();
4223 delayed_cast = 1;
4224 } else {
4225 gen_cast(&vtop[-1].type);
4228 #ifdef CONFIG_TCC_BCHECK
4229 /* bound check case */
4230 if (vtop[-1].r & VT_MUSTBOUND) {
4231 vswap();
4232 gbound();
4233 vswap();
4235 #endif
4236 gv(RC_TYPE(dbt)); /* generate value */
4238 if (delayed_cast) {
4239 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
4240 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4241 vtop->type.t = ft & VT_TYPE;
4244 /* if lvalue was saved on stack, must read it */
4245 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
4246 SValue sv;
4247 r = get_reg(RC_INT);
4248 sv.type.t = VT_PTRDIFF_T;
4249 sv.r = VT_LOCAL | VT_LVAL;
4250 sv.c.i = vtop[-1].c.i;
4251 load(r, &sv);
4252 vtop[-1].r = r | VT_LVAL;
4255 r = vtop->r & VT_VALMASK;
4256 /* two word case handling :
4257 store second register at word + 4 (or +8 for x86-64) */
4258 if (USING_TWO_WORDS(dbt)) {
4259 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
4260 vtop[-1].type.t = load_type;
4261 store(r, vtop - 1);
4262 vswap();
4263 /* convert to int to increment easily */
4264 vtop->type.t = VT_PTRDIFF_T;
4265 gaddrof();
4266 vpushs(PTR_SIZE);
4267 gen_op('+');
4268 vtop->r |= VT_LVAL;
4269 vswap();
4270 vtop[-1].type.t = load_type;
4271 /* XXX: it works because r2 is spilled last ! */
4272 store(vtop->r2, vtop - 1);
4273 } else {
4274 /* single word */
4275 store(r, vtop - 1);
4277 vswap();
4278 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
4282 /* post defines POST/PRE add. c is the token ++ or -- */
4283 ST_FUNC void inc(int post, int c)
4285 test_lvalue();
4286 vdup(); /* save lvalue */
4287 if (post) {
4288 gv_dup(); /* duplicate value */
4289 vrotb(3);
4290 vrotb(3);
4292 /* add constant */
4293 vpushi(c - TOK_MID);
4294 gen_op('+');
4295 vstore(); /* store value */
4296 if (post)
4297 vpop(); /* if post op, return saved value */
4300 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
4302 /* read the string */
4303 if (tok != TOK_STR)
4304 expect(msg);
4305 cstr_new(astr);
4306 while (tok == TOK_STR) {
4307 /* XXX: add \0 handling too ? */
4308 cstr_cat(astr, tokc.str.data, -1);
4309 next();
4311 cstr_ccat(astr, '\0');
4314 /* If I is >= 1 and a power of two, returns log2(i)+1.
4315 If I is 0 returns 0. */
4316 ST_FUNC int exact_log2p1(int i)
4318 int ret;
4319 if (!i)
4320 return 0;
4321 for (ret = 1; i >= 1 << 8; ret += 8)
4322 i >>= 8;
4323 if (i >= 1 << 4)
4324 ret += 4, i >>= 4;
4325 if (i >= 1 << 2)
4326 ret += 2, i >>= 2;
4327 if (i >= 1 << 1)
4328 ret++;
4329 return ret;
4332 /* Parse __attribute__((...)) GNUC extension. */
4333 static void parse_attribute(AttributeDef *ad)
4335 int t, n;
4336 CString astr;
4338 redo:
4339 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
4340 return;
4341 next();
4342 skip('(');
4343 skip('(');
4344 while (tok != ')') {
4345 if (tok < TOK_IDENT)
4346 expect("attribute name");
4347 t = tok;
4348 next();
4349 switch(t) {
4350 case TOK_CLEANUP1:
4351 case TOK_CLEANUP2:
4353 Sym *s;
4355 skip('(');
4356 s = sym_find(tok);
4357 if (!s) {
4358 tcc_warning_c(warn_implicit_function_declaration)(
4359 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
4360 s = external_global_sym(tok, &func_old_type);
4361 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
4362 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
4363 ad->cleanup_func = s;
4364 next();
4365 skip(')');
4366 break;
4368 case TOK_CONSTRUCTOR1:
4369 case TOK_CONSTRUCTOR2:
4370 ad->f.func_ctor = 1;
4371 break;
4372 case TOK_DESTRUCTOR1:
4373 case TOK_DESTRUCTOR2:
4374 ad->f.func_dtor = 1;
4375 break;
4376 case TOK_ALWAYS_INLINE1:
4377 case TOK_ALWAYS_INLINE2:
4378 ad->f.func_alwinl = 1;
4379 break;
4380 case TOK_SECTION1:
4381 case TOK_SECTION2:
4382 skip('(');
4383 parse_mult_str(&astr, "section name");
4384 ad->section = find_section(tcc_state, (char *)astr.data);
4385 skip(')');
4386 cstr_free(&astr);
4387 break;
4388 case TOK_ALIAS1:
4389 case TOK_ALIAS2:
4390 skip('(');
4391 parse_mult_str(&astr, "alias(\"target\")");
4392 ad->alias_target = /* save string as token, for later */
4393 tok_alloc((char*)astr.data, astr.size-1)->tok;
4394 skip(')');
4395 cstr_free(&astr);
4396 break;
4397 case TOK_VISIBILITY1:
4398 case TOK_VISIBILITY2:
4399 skip('(');
4400 parse_mult_str(&astr,
4401 "visibility(\"default|hidden|internal|protected\")");
4402 if (!strcmp (astr.data, "default"))
4403 ad->a.visibility = STV_DEFAULT;
4404 else if (!strcmp (astr.data, "hidden"))
4405 ad->a.visibility = STV_HIDDEN;
4406 else if (!strcmp (astr.data, "internal"))
4407 ad->a.visibility = STV_INTERNAL;
4408 else if (!strcmp (astr.data, "protected"))
4409 ad->a.visibility = STV_PROTECTED;
4410 else
4411 expect("visibility(\"default|hidden|internal|protected\")");
4412 skip(')');
4413 cstr_free(&astr);
4414 break;
4415 case TOK_ALIGNED1:
4416 case TOK_ALIGNED2:
4417 if (tok == '(') {
4418 next();
4419 n = expr_const();
4420 if (n <= 0 || (n & (n - 1)) != 0)
4421 tcc_error("alignment must be a positive power of two");
4422 skip(')');
4423 } else {
4424 n = MAX_ALIGN;
4426 ad->a.aligned = exact_log2p1(n);
4427 if (n != 1 << (ad->a.aligned - 1))
4428 tcc_error("alignment of %d is larger than implemented", n);
4429 break;
4430 case TOK_PACKED1:
4431 case TOK_PACKED2:
4432 ad->a.packed = 1;
4433 break;
4434 case TOK_WEAK1:
4435 case TOK_WEAK2:
4436 ad->a.weak = 1;
4437 break;
4438 case TOK_UNUSED1:
4439 case TOK_UNUSED2:
4440 /* currently, no need to handle it because tcc does not
4441 track unused objects */
4442 break;
4443 case TOK_NORETURN1:
4444 case TOK_NORETURN2:
4445 ad->f.func_noreturn = 1;
4446 break;
4447 case TOK_CDECL1:
4448 case TOK_CDECL2:
4449 case TOK_CDECL3:
4450 ad->f.func_call = FUNC_CDECL;
4451 break;
4452 case TOK_STDCALL1:
4453 case TOK_STDCALL2:
4454 case TOK_STDCALL3:
4455 ad->f.func_call = FUNC_STDCALL;
4456 break;
4457 #ifdef TCC_TARGET_I386
4458 case TOK_REGPARM1:
4459 case TOK_REGPARM2:
4460 skip('(');
4461 n = expr_const();
4462 if (n > 3)
4463 n = 3;
4464 else if (n < 0)
4465 n = 0;
4466 if (n > 0)
4467 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
4468 skip(')');
4469 break;
4470 case TOK_FASTCALL1:
4471 case TOK_FASTCALL2:
4472 case TOK_FASTCALL3:
4473 ad->f.func_call = FUNC_FASTCALLW;
4474 break;
4475 #endif
4476 case TOK_MODE:
4477 skip('(');
4478 switch(tok) {
4479 case TOK_MODE_DI:
4480 ad->attr_mode = VT_LLONG + 1;
4481 break;
4482 case TOK_MODE_QI:
4483 ad->attr_mode = VT_BYTE + 1;
4484 break;
4485 case TOK_MODE_HI:
4486 ad->attr_mode = VT_SHORT + 1;
4487 break;
4488 case TOK_MODE_SI:
4489 case TOK_MODE_word:
4490 ad->attr_mode = VT_INT + 1;
4491 break;
4492 default:
4493 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4494 break;
4496 next();
4497 skip(')');
4498 break;
4499 case TOK_DLLEXPORT:
4500 ad->a.dllexport = 1;
4501 break;
4502 case TOK_NODECORATE:
4503 ad->a.nodecorate = 1;
4504 break;
4505 case TOK_DLLIMPORT:
4506 ad->a.dllimport = 1;
4507 break;
4508 default:
4509 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
4510 /* skip parameters */
4511 if (tok == '(') {
4512 int parenthesis = 0;
4513 do {
4514 if (tok == '(')
4515 parenthesis++;
4516 else if (tok == ')')
4517 parenthesis--;
4518 next();
4519 } while (parenthesis && tok != -1);
4521 break;
4523 if (tok != ',')
4524 break;
4525 next();
4527 skip(')');
4528 skip(')');
4529 goto redo;
4532 static Sym * find_field (CType *type, int v, int *cumofs)
4534 Sym *s = type->ref;
4535 v |= SYM_FIELD;
4536 while ((s = s->next) != NULL) {
4537 if ((s->v & SYM_FIELD) &&
4538 (s->type.t & VT_BTYPE) == VT_STRUCT &&
4539 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
4540 Sym *ret = find_field (&s->type, v, cumofs);
4541 if (ret) {
4542 *cumofs += s->c;
4543 return ret;
4546 if (s->v == v)
4547 break;
4549 return s;
4552 static void check_fields (CType *type, int check)
4554 Sym *s = type->ref;
4556 while ((s = s->next) != NULL) {
4557 int v = s->v & ~SYM_FIELD;
4558 if (v < SYM_FIRST_ANOM) {
4559 TokenSym *ts = table_ident[v - TOK_IDENT];
4560 if (check && (ts->tok & SYM_FIELD))
4561 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4562 ts->tok ^= SYM_FIELD;
4563 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4564 check_fields (&s->type, check);
4568 static void struct_layout(CType *type, AttributeDef *ad)
4570 int size, align, maxalign, offset, c, bit_pos, bit_size;
4571 int packed, a, bt, prevbt, prev_bit_size;
4572 int pcc = !tcc_state->ms_bitfields;
4573 int pragma_pack = *tcc_state->pack_stack_ptr;
4574 Sym *f;
4576 maxalign = 1;
4577 offset = 0;
4578 c = 0;
4579 bit_pos = 0;
4580 prevbt = VT_STRUCT; /* make it never match */
4581 prev_bit_size = 0;
4583 //#define BF_DEBUG
4585 for (f = type->ref->next; f; f = f->next) {
4586 if (f->type.t & VT_BITFIELD)
4587 bit_size = BIT_SIZE(f->type.t);
4588 else
4589 bit_size = -1;
4590 size = type_size(&f->type, &align);
4591 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4592 packed = 0;
4594 if (pcc && bit_size == 0) {
4595 /* in pcc mode, packing does not affect zero-width bitfields */
4597 } else {
4598 /* in pcc mode, attribute packed overrides if set. */
4599 if (pcc && (f->a.packed || ad->a.packed))
4600 align = packed = 1;
4602 /* pragma pack overrides align if lesser and packs bitfields always */
4603 if (pragma_pack) {
4604 packed = 1;
4605 if (pragma_pack < align)
4606 align = pragma_pack;
4607 /* in pcc mode pragma pack also overrides individual align */
4608 if (pcc && pragma_pack < a)
4609 a = 0;
4612 /* some individual align was specified */
4613 if (a)
4614 align = a;
4616 if (type->ref->type.t == VT_UNION) {
4617 if (pcc && bit_size >= 0)
4618 size = (bit_size + 7) >> 3;
4619 offset = 0;
4620 if (size > c)
4621 c = size;
4623 } else if (bit_size < 0) {
4624 if (pcc)
4625 c += (bit_pos + 7) >> 3;
4626 c = (c + align - 1) & -align;
4627 offset = c;
4628 if (size > 0)
4629 c += size;
4630 bit_pos = 0;
4631 prevbt = VT_STRUCT;
4632 prev_bit_size = 0;
4634 } else {
4635 /* A bit-field. Layout is more complicated. There are two
4636 options: PCC (GCC) compatible and MS compatible */
4637 if (pcc) {
4638 /* In PCC layout a bit-field is placed adjacent to the
4639 preceding bit-fields, except if:
4640 - it has zero-width
4641 - an individual alignment was given
4642 - it would overflow its base type container and
4643 there is no packing */
4644 if (bit_size == 0) {
4645 new_field:
4646 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4647 bit_pos = 0;
4648 } else if (f->a.aligned) {
4649 goto new_field;
4650 } else if (!packed) {
4651 int a8 = align * 8;
4652 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4653 if (ofs > size / align)
4654 goto new_field;
4657 /* in pcc mode, long long bitfields have type int if they fit */
4658 if (size == 8 && bit_size <= 32)
4659 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4661 while (bit_pos >= align * 8)
4662 c += align, bit_pos -= align * 8;
4663 offset = c;
4665 /* In PCC layout named bit-fields influence the alignment
4666 of the containing struct using the base types alignment,
4667 except for packed fields (which here have correct align). */
4668 if (f->v & SYM_FIRST_ANOM
4669 // && bit_size // ??? gcc on ARM/rpi does that
4671 align = 1;
4673 } else {
4674 bt = f->type.t & VT_BTYPE;
4675 if ((bit_pos + bit_size > size * 8)
4676 || (bit_size > 0) == (bt != prevbt)
4678 c = (c + align - 1) & -align;
4679 offset = c;
4680 bit_pos = 0;
4681 /* In MS bitfield mode a bit-field run always uses
4682 at least as many bits as the underlying type.
4683 To start a new run it's also required that this
4684 or the last bit-field had non-zero width. */
4685 if (bit_size || prev_bit_size)
4686 c += size;
4688 /* In MS layout the records alignment is normally
4689 influenced by the field, except for a zero-width
4690 field at the start of a run (but by further zero-width
4691 fields it is again). */
4692 if (bit_size == 0 && prevbt != bt)
4693 align = 1;
4694 prevbt = bt;
4695 prev_bit_size = bit_size;
4698 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4699 | (bit_pos << VT_STRUCT_SHIFT);
4700 bit_pos += bit_size;
4702 if (align > maxalign)
4703 maxalign = align;
4705 #ifdef BF_DEBUG
4706 printf("set field %s offset %-2d size %-2d align %-2d",
4707 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4708 if (f->type.t & VT_BITFIELD) {
4709 printf(" pos %-2d bits %-2d",
4710 BIT_POS(f->type.t),
4711 BIT_SIZE(f->type.t)
4714 printf("\n");
4715 #endif
4717 f->c = offset;
4718 f->r = 0;
4721 if (pcc)
4722 c += (bit_pos + 7) >> 3;
4724 /* store size and alignment */
4725 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4726 if (a < maxalign)
4727 a = maxalign;
4728 type->ref->r = a;
4729 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4730 /* can happen if individual align for some member was given. In
4731 this case MSVC ignores maxalign when aligning the size */
4732 a = pragma_pack;
4733 if (a < bt)
4734 a = bt;
4736 c = (c + a - 1) & -a;
4737 type->ref->c = c;
4739 #ifdef BF_DEBUG
4740 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4741 #endif
4743 /* check whether we can access bitfields by their type */
4744 for (f = type->ref->next; f; f = f->next) {
4745 int s, px, cx, c0;
4746 CType t;
4748 if (0 == (f->type.t & VT_BITFIELD))
4749 continue;
4750 f->type.ref = f;
4751 f->auxtype = -1;
4752 bit_size = BIT_SIZE(f->type.t);
4753 if (bit_size == 0)
4754 continue;
4755 bit_pos = BIT_POS(f->type.t);
4756 size = type_size(&f->type, &align);
4758 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4759 #ifdef TCC_TARGET_ARM
4760 && !(f->c & (align - 1))
4761 #endif
4763 continue;
4765 /* try to access the field using a different type */
4766 c0 = -1, s = align = 1;
4767 t.t = VT_BYTE;
4768 for (;;) {
4769 px = f->c * 8 + bit_pos;
4770 cx = (px >> 3) & -align;
4771 px = px - (cx << 3);
4772 if (c0 == cx)
4773 break;
4774 s = (px + bit_size + 7) >> 3;
4775 if (s > 4) {
4776 t.t = VT_LLONG;
4777 } else if (s > 2) {
4778 t.t = VT_INT;
4779 } else if (s > 1) {
4780 t.t = VT_SHORT;
4781 } else {
4782 t.t = VT_BYTE;
4784 s = type_size(&t, &align);
4785 c0 = cx;
4788 if (px + bit_size <= s * 8 && cx + s <= c
4789 #ifdef TCC_TARGET_ARM
4790 && !(cx & (align - 1))
4791 #endif
4793 /* update offset and bit position */
4794 f->c = cx;
4795 bit_pos = px;
4796 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4797 | (bit_pos << VT_STRUCT_SHIFT);
4798 if (s != size)
4799 f->auxtype = t.t;
4800 #ifdef BF_DEBUG
4801 printf("FIX field %s offset %-2d size %-2d align %-2d "
4802 "pos %-2d bits %-2d\n",
4803 get_tok_str(f->v & ~SYM_FIELD, NULL),
4804 cx, s, align, px, bit_size);
4805 #endif
4806 } else {
4807 /* fall back to load/store single-byte wise */
4808 f->auxtype = VT_STRUCT;
4809 #ifdef BF_DEBUG
4810 printf("FIX field %s : load byte-wise\n",
4811 get_tok_str(f->v & ~SYM_FIELD, NULL));
4812 #endif
4817 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4818 static void struct_decl(CType *type, int u)
4820 int v, c, size, align, flexible;
4821 int bit_size, bsize, bt;
4822 Sym *s, *ss, **ps;
4823 AttributeDef ad, ad1;
4824 CType type1, btype;
4826 memset(&ad, 0, sizeof ad);
4827 next();
4828 parse_attribute(&ad);
4829 if (tok != '{') {
4830 v = tok;
4831 next();
4832 /* struct already defined ? return it */
4833 if (v < TOK_IDENT)
4834 expect("struct/union/enum name");
4835 s = struct_find(v);
4836 if (s && (s->sym_scope == local_scope || tok != '{')) {
4837 if (u == s->type.t)
4838 goto do_decl;
4839 if (u == VT_ENUM && IS_ENUM(s->type.t))
4840 goto do_decl;
4841 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4843 } else {
4844 v = anon_sym++;
4846 /* Record the original enum/struct/union token. */
4847 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4848 type1.ref = NULL;
4849 /* we put an undefined size for struct/union */
4850 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4851 s->r = 0; /* default alignment is zero as gcc */
4852 do_decl:
4853 type->t = s->type.t;
4854 type->ref = s;
4856 if (tok == '{') {
4857 next();
4858 if (s->c != -1)
4859 tcc_error("struct/union/enum already defined");
4860 s->c = -2;
4861 /* cannot be empty */
4862 /* non empty enums are not allowed */
4863 ps = &s->next;
4864 if (u == VT_ENUM) {
4865 long long ll = 0, pl = 0, nl = 0;
4866 CType t;
4867 t.ref = s;
4868 /* enum symbols have static storage */
4869 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4870 for(;;) {
4871 v = tok;
4872 if (v < TOK_UIDENT)
4873 expect("identifier");
4874 ss = sym_find(v);
4875 if (ss && !local_stack)
4876 tcc_error("redefinition of enumerator '%s'",
4877 get_tok_str(v, NULL));
4878 next();
4879 if (tok == '=') {
4880 next();
4881 ll = expr_const64();
4883 ss = sym_push(v, &t, VT_CONST, 0);
4884 ss->enum_val = ll;
4885 *ps = ss, ps = &ss->next;
4886 if (ll < nl)
4887 nl = ll;
4888 if (ll > pl)
4889 pl = ll;
4890 if (tok != ',')
4891 break;
4892 next();
4893 ll++;
4894 /* NOTE: we accept a trailing comma */
4895 if (tok == '}')
4896 break;
4898 skip('}');
4899 /* set integral type of the enum */
4900 t.t = VT_INT;
4901 if (nl >= 0) {
4902 if (pl != (unsigned)pl)
4903 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4904 t.t |= VT_UNSIGNED;
4905 } else if (pl != (int)pl || nl != (int)nl)
4906 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4907 s->type.t = type->t = t.t | VT_ENUM;
4908 s->c = 0;
4909 /* set type for enum members */
4910 for (ss = s->next; ss; ss = ss->next) {
4911 ll = ss->enum_val;
4912 if (ll == (int)ll) /* default is int if it fits */
4913 continue;
4914 if (t.t & VT_UNSIGNED) {
4915 ss->type.t |= VT_UNSIGNED;
4916 if (ll == (unsigned)ll)
4917 continue;
4919 ss->type.t = (ss->type.t & ~VT_BTYPE)
4920 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4922 } else {
4923 c = 0;
4924 flexible = 0;
4925 while (tok != '}') {
4926 if (!parse_btype(&btype, &ad1)) {
4927 skip(';');
4928 continue;
4930 while (1) {
4931 if (flexible)
4932 tcc_error("flexible array member '%s' not at the end of struct",
4933 get_tok_str(v, NULL));
4934 bit_size = -1;
4935 v = 0;
4936 type1 = btype;
4937 if (tok != ':') {
4938 if (tok != ';')
4939 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4940 if (v == 0) {
4941 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4942 expect("identifier");
4943 else {
4944 int v = btype.ref->v;
4945 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4946 if (tcc_state->ms_extensions == 0)
4947 expect("identifier");
4951 if (type_size(&type1, &align) < 0) {
4952 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4953 flexible = 1;
4954 else
4955 tcc_error("field '%s' has incomplete type",
4956 get_tok_str(v, NULL));
4958 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4959 (type1.t & VT_BTYPE) == VT_VOID ||
4960 (type1.t & VT_STORAGE))
4961 tcc_error("invalid type for '%s'",
4962 get_tok_str(v, NULL));
4964 if (tok == ':') {
4965 next();
4966 bit_size = expr_const();
4967 /* XXX: handle v = 0 case for messages */
4968 if (bit_size < 0)
4969 tcc_error("negative width in bit-field '%s'",
4970 get_tok_str(v, NULL));
4971 if (v && bit_size == 0)
4972 tcc_error("zero width for bit-field '%s'",
4973 get_tok_str(v, NULL));
4974 parse_attribute(&ad1);
4976 size = type_size(&type1, &align);
4977 if (bit_size >= 0) {
4978 bt = type1.t & VT_BTYPE;
4979 if (bt != VT_INT &&
4980 bt != VT_BYTE &&
4981 bt != VT_SHORT &&
4982 bt != VT_BOOL &&
4983 bt != VT_LLONG)
4984 tcc_error("bitfields must have scalar type");
4985 bsize = size * 8;
4986 if (bit_size > bsize) {
4987 tcc_error("width of '%s' exceeds its type",
4988 get_tok_str(v, NULL));
4989 } else if (bit_size == bsize
4990 && !ad.a.packed && !ad1.a.packed) {
4991 /* no need for bit fields */
4993 } else if (bit_size == 64) {
4994 tcc_error("field width 64 not implemented");
4995 } else {
4996 type1.t = (type1.t & ~VT_STRUCT_MASK)
4997 | VT_BITFIELD
4998 | (bit_size << (VT_STRUCT_SHIFT + 6));
5001 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
5002 /* Remember we've seen a real field to check
5003 for placement of flexible array member. */
5004 c = 1;
5006 /* If member is a struct or bit-field, enforce
5007 placing into the struct (as anonymous). */
5008 if (v == 0 &&
5009 ((type1.t & VT_BTYPE) == VT_STRUCT ||
5010 bit_size >= 0)) {
5011 v = anon_sym++;
5013 if (v) {
5014 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
5015 ss->a = ad1.a;
5016 *ps = ss;
5017 ps = &ss->next;
5019 if (tok == ';' || tok == TOK_EOF)
5020 break;
5021 skip(',');
5023 skip(';');
5025 skip('}');
5026 parse_attribute(&ad);
5027 if (ad.cleanup_func) {
5028 tcc_warning("attribute '__cleanup__' ignored on type");
5030 check_fields(type, 1);
5031 check_fields(type, 0);
5032 struct_layout(type, &ad);
5037 static void sym_to_attr(AttributeDef *ad, Sym *s)
5039 merge_symattr(&ad->a, &s->a);
5040 merge_funcattr(&ad->f, &s->f);
5043 /* Add type qualifiers to a type. If the type is an array then the qualifiers
5044 are added to the element type, copied because it could be a typedef. */
5045 static void parse_btype_qualify(CType *type, int qualifiers)
5047 while (type->t & VT_ARRAY) {
5048 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
5049 type = &type->ref->type;
5051 type->t |= qualifiers;
5054 /* return 0 if no type declaration. otherwise, return the basic type
5055 and skip it.
5057 static int parse_btype(CType *type, AttributeDef *ad)
5059 int t, u, bt, st, type_found, typespec_found, g, n;
5060 Sym *s;
5061 CType type1;
5063 memset(ad, 0, sizeof(AttributeDef));
5064 type_found = 0;
5065 typespec_found = 0;
5066 t = VT_INT;
5067 bt = st = -1;
5068 type->ref = NULL;
5070 while(1) {
5071 switch(tok) {
5072 case TOK_EXTENSION:
5073 /* currently, we really ignore extension */
5074 next();
5075 continue;
5077 /* basic types */
5078 case TOK_CHAR:
5079 u = VT_BYTE;
5080 basic_type:
5081 next();
5082 basic_type1:
5083 if (u == VT_SHORT || u == VT_LONG) {
5084 if (st != -1 || (bt != -1 && bt != VT_INT))
5085 tmbt: tcc_error("too many basic types");
5086 st = u;
5087 } else {
5088 if (bt != -1 || (st != -1 && u != VT_INT))
5089 goto tmbt;
5090 bt = u;
5092 if (u != VT_INT)
5093 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5094 typespec_found = 1;
5095 break;
5096 case TOK_VOID:
5097 u = VT_VOID;
5098 goto basic_type;
5099 case TOK_SHORT:
5100 u = VT_SHORT;
5101 goto basic_type;
5102 case TOK_INT:
5103 u = VT_INT;
5104 goto basic_type;
5105 case TOK_ALIGNAS:
5106 { int n;
5107 AttributeDef ad1;
5108 next();
5109 skip('(');
5110 memset(&ad1, 0, sizeof(AttributeDef));
5111 if (parse_btype(&type1, &ad1)) {
5112 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
5113 if (ad1.a.aligned)
5114 n = 1 << (ad1.a.aligned - 1);
5115 else
5116 type_size(&type1, &n);
5117 } else {
5118 n = expr_const();
5119 if (n <= 0 || (n & (n - 1)) != 0)
5120 tcc_error("alignment must be a positive power of two");
5122 skip(')');
5123 ad->a.aligned = exact_log2p1(n);
5125 continue;
5126 case TOK_LONG:
5127 if ((t & VT_BTYPE) == VT_DOUBLE) {
5128 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
5129 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
5130 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
5131 } else {
5132 u = VT_LONG;
5133 goto basic_type;
5135 next();
5136 break;
5137 #ifdef TCC_TARGET_ARM64
5138 case TOK_UINT128:
5139 /* GCC's __uint128_t appears in some Linux header files. Make it a
5140 synonym for long double to get the size and alignment right. */
5141 u = VT_LDOUBLE;
5142 goto basic_type;
5143 #endif
5144 case TOK_BOOL:
5145 u = VT_BOOL;
5146 goto basic_type;
5147 case TOK_FLOAT:
5148 u = VT_FLOAT;
5149 goto basic_type;
5150 case TOK_DOUBLE:
5151 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
5152 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
5153 } else {
5154 u = VT_DOUBLE;
5155 goto basic_type;
5157 next();
5158 break;
5159 case TOK_ENUM:
5160 struct_decl(&type1, VT_ENUM);
5161 basic_type2:
5162 u = type1.t;
5163 type->ref = type1.ref;
5164 goto basic_type1;
5165 case TOK_STRUCT:
5166 struct_decl(&type1, VT_STRUCT);
5167 goto basic_type2;
5168 case TOK_UNION:
5169 struct_decl(&type1, VT_UNION);
5170 goto basic_type2;
5172 /* type modifiers */
5173 case TOK__Atomic:
5174 next();
5175 type->t = t;
5176 parse_btype_qualify(type, VT_ATOMIC);
5177 t = type->t;
5178 if (tok == '(') {
5179 parse_expr_type(&type1);
5180 /* remove all storage modifiers except typedef */
5181 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5182 if (type1.ref)
5183 sym_to_attr(ad, type1.ref);
5184 goto basic_type2;
5186 break;
5187 case TOK_CONST1:
5188 case TOK_CONST2:
5189 case TOK_CONST3:
5190 type->t = t;
5191 parse_btype_qualify(type, VT_CONSTANT);
5192 t = type->t;
5193 next();
5194 break;
5195 case TOK_VOLATILE1:
5196 case TOK_VOLATILE2:
5197 case TOK_VOLATILE3:
5198 type->t = t;
5199 parse_btype_qualify(type, VT_VOLATILE);
5200 t = type->t;
5201 next();
5202 break;
5203 case TOK_SIGNED1:
5204 case TOK_SIGNED2:
5205 case TOK_SIGNED3:
5206 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
5207 tcc_error("signed and unsigned modifier");
5208 t |= VT_DEFSIGN;
5209 next();
5210 typespec_found = 1;
5211 break;
5212 case TOK_REGISTER:
5213 case TOK_AUTO:
5214 case TOK_RESTRICT1:
5215 case TOK_RESTRICT2:
5216 case TOK_RESTRICT3:
5217 next();
5218 break;
5219 case TOK_UNSIGNED:
5220 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
5221 tcc_error("signed and unsigned modifier");
5222 t |= VT_DEFSIGN | VT_UNSIGNED;
5223 next();
5224 typespec_found = 1;
5225 break;
5227 /* storage */
5228 case TOK_EXTERN:
5229 g = VT_EXTERN;
5230 goto storage;
5231 case TOK_STATIC:
5232 g = VT_STATIC;
5233 goto storage;
5234 case TOK_TYPEDEF:
5235 g = VT_TYPEDEF;
5236 goto storage;
5237 storage:
5238 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
5239 tcc_error("multiple storage classes");
5240 t |= g;
5241 next();
5242 break;
5243 case TOK_INLINE1:
5244 case TOK_INLINE2:
5245 case TOK_INLINE3:
5246 t |= VT_INLINE;
5247 next();
5248 break;
5249 case TOK_NORETURN3:
5250 next();
5251 ad->f.func_noreturn = 1;
5252 break;
5253 /* GNUC attribute */
5254 case TOK_ATTRIBUTE1:
5255 case TOK_ATTRIBUTE2:
5256 parse_attribute(ad);
5257 if (ad->attr_mode) {
5258 u = ad->attr_mode -1;
5259 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5261 continue;
5262 /* GNUC typeof */
5263 case TOK_TYPEOF1:
5264 case TOK_TYPEOF2:
5265 case TOK_TYPEOF3:
5266 next();
5267 parse_expr_type(&type1);
5268 /* remove all storage modifiers except typedef */
5269 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5270 if (type1.ref)
5271 sym_to_attr(ad, type1.ref);
5272 goto basic_type2;
5273 default:
5274 if (typespec_found)
5275 goto the_end;
5276 s = sym_find(tok);
5277 if (!s || !(s->type.t & VT_TYPEDEF))
5278 goto the_end;
5280 n = tok, next();
5281 if (tok == ':' && !in_generic) {
5282 /* ignore if it's a label */
5283 unget_tok(n);
5284 goto the_end;
5287 t &= ~(VT_BTYPE|VT_LONG);
5288 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
5289 type->t = (s->type.t & ~VT_TYPEDEF) | u;
5290 type->ref = s->type.ref;
5291 if (t)
5292 parse_btype_qualify(type, t);
5293 t = type->t;
5294 /* get attributes from typedef */
5295 sym_to_attr(ad, s);
5296 typespec_found = 1;
5297 st = bt = -2;
5298 break;
5300 type_found = 1;
5302 the_end:
5303 if (tcc_state->char_is_unsigned) {
5304 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
5305 t |= VT_UNSIGNED;
5307 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5308 bt = t & (VT_BTYPE|VT_LONG);
5309 if (bt == VT_LONG)
5310 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
5311 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5312 if (bt == VT_LDOUBLE)
5313 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
5314 #endif
5315 type->t = t;
5316 return type_found;
5319 /* convert a function parameter type (array to pointer and function to
5320 function pointer) */
5321 static inline void convert_parameter_type(CType *pt)
5323 /* remove const and volatile qualifiers (XXX: const could be used
5324 to indicate a const function parameter */
5325 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
5326 /* array must be transformed to pointer according to ANSI C */
5327 pt->t &= ~VT_ARRAY;
5328 if ((pt->t & VT_BTYPE) == VT_FUNC) {
5329 mk_pointer(pt);
5333 ST_FUNC void parse_asm_str(CString *astr)
5335 skip('(');
5336 parse_mult_str(astr, "string constant");
5339 /* Parse an asm label and return the token */
5340 static int asm_label_instr(void)
5342 int v;
5343 CString astr;
5345 next();
5346 parse_asm_str(&astr);
5347 skip(')');
5348 #ifdef ASM_DEBUG
5349 printf("asm_alias: \"%s\"\n", (char *)astr.data);
5350 #endif
5351 v = tok_alloc(astr.data, astr.size - 1)->tok;
5352 cstr_free(&astr);
5353 return v;
5356 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
5358 int n, l, t1, arg_size, align, unused_align;
5359 Sym **plast, *s, *first;
5360 AttributeDef ad1;
5361 CType pt;
5363 if (tok == '(') {
5364 /* function type, or recursive declarator (return if so) */
5365 next();
5366 if (td && !(td & TYPE_ABSTRACT))
5367 return 0;
5368 if (tok == ')')
5369 l = 0;
5370 else if (parse_btype(&pt, &ad1))
5371 l = FUNC_NEW;
5372 else if (td) {
5373 merge_attr (ad, &ad1);
5374 return 0;
5375 } else
5376 l = FUNC_OLD;
5377 first = NULL;
5378 plast = &first;
5379 arg_size = 0;
5380 if (l) {
5381 for(;;) {
5382 /* read param name and compute offset */
5383 if (l != FUNC_OLD) {
5384 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
5385 break;
5386 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
5387 if ((pt.t & VT_BTYPE) == VT_VOID)
5388 tcc_error("parameter declared as void");
5389 } else {
5390 n = tok;
5391 if (n < TOK_UIDENT)
5392 expect("identifier");
5393 pt.t = VT_VOID; /* invalid type */
5394 pt.ref = NULL;
5395 next();
5397 convert_parameter_type(&pt);
5398 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
5399 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
5400 *plast = s;
5401 plast = &s->next;
5402 if (tok == ')')
5403 break;
5404 skip(',');
5405 if (l == FUNC_NEW && tok == TOK_DOTS) {
5406 l = FUNC_ELLIPSIS;
5407 next();
5408 break;
5410 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
5411 tcc_error("invalid type");
5413 } else
5414 /* if no parameters, then old type prototype */
5415 l = FUNC_OLD;
5416 skip(')');
5417 /* NOTE: const is ignored in returned type as it has a special
5418 meaning in gcc / C++ */
5419 type->t &= ~VT_CONSTANT;
5420 /* some ancient pre-K&R C allows a function to return an array
5421 and the array brackets to be put after the arguments, such
5422 that "int c()[]" means something like "int[] c()" */
5423 if (tok == '[') {
5424 next();
5425 skip(']'); /* only handle simple "[]" */
5426 mk_pointer(type);
5428 /* we push a anonymous symbol which will contain the function prototype */
5429 ad->f.func_args = arg_size;
5430 ad->f.func_type = l;
5431 s = sym_push(SYM_FIELD, type, 0, 0);
5432 s->a = ad->a;
5433 s->f = ad->f;
5434 s->next = first;
5435 type->t = VT_FUNC;
5436 type->ref = s;
5437 } else if (tok == '[') {
5438 int saved_nocode_wanted = nocode_wanted;
5439 /* array definition */
5440 next();
5441 while (1) {
5442 /* XXX The optional type-quals and static should only be accepted
5443 in parameter decls. The '*' as well, and then even only
5444 in prototypes (not function defs). */
5445 switch (tok) {
5446 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5447 case TOK_CONST1:
5448 case TOK_VOLATILE1:
5449 case TOK_STATIC:
5450 case '*':
5451 next();
5452 continue;
5453 default:
5454 break;
5456 break;
5458 n = -1;
5459 t1 = 0;
5460 if (tok != ']') {
5461 if (!local_stack || (storage & VT_STATIC))
5462 vpushi(expr_const());
5463 else {
5464 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5465 length must always be evaluated, even under nocode_wanted,
5466 so that its size slot is initialized (e.g. under sizeof
5467 or typeof). */
5468 nocode_wanted = 0;
5469 gexpr();
5471 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5472 n = vtop->c.i;
5473 if (n < 0)
5474 tcc_error("invalid array size");
5475 } else {
5476 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5477 tcc_error("size of variable length array should be an integer");
5478 n = 0;
5479 t1 = VT_VLA;
5482 skip(']');
5483 /* parse next post type */
5484 post_type(type, ad, storage, 0);
5486 if ((type->t & VT_BTYPE) == VT_FUNC)
5487 tcc_error("declaration of an array of functions");
5488 if ((type->t & VT_BTYPE) == VT_VOID
5489 || type_size(type, &unused_align) < 0)
5490 tcc_error("declaration of an array of incomplete type elements");
5492 t1 |= type->t & VT_VLA;
5494 if (t1 & VT_VLA) {
5495 if (n < 0)
5496 tcc_error("need explicit inner array size in VLAs");
5497 loc -= type_size(&int_type, &align);
5498 loc &= -align;
5499 n = loc;
5501 vla_runtime_type_size(type, &align);
5502 gen_op('*');
5503 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5504 vswap();
5505 vstore();
5507 if (n != -1)
5508 vpop();
5509 nocode_wanted = saved_nocode_wanted;
5511 /* we push an anonymous symbol which will contain the array
5512 element type */
5513 s = sym_push(SYM_FIELD, type, 0, n);
5514 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5515 type->ref = s;
5517 return 1;
5520 /* Parse a type declarator (except basic type), and return the type
5521 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5522 expected. 'type' should contain the basic type. 'ad' is the
5523 attribute definition of the basic type. It can be modified by
5524 type_decl(). If this (possibly abstract) declarator is a pointer chain
5525 it returns the innermost pointed to type (equals *type, but is a different
5526 pointer), otherwise returns type itself, that's used for recursive calls. */
5527 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5529 CType *post, *ret;
5530 int qualifiers, storage;
5532 /* recursive type, remove storage bits first, apply them later again */
5533 storage = type->t & VT_STORAGE;
5534 type->t &= ~VT_STORAGE;
5535 post = ret = type;
5537 while (tok == '*') {
5538 qualifiers = 0;
5539 redo:
5540 next();
5541 switch(tok) {
5542 case TOK__Atomic:
5543 qualifiers |= VT_ATOMIC;
5544 goto redo;
5545 case TOK_CONST1:
5546 case TOK_CONST2:
5547 case TOK_CONST3:
5548 qualifiers |= VT_CONSTANT;
5549 goto redo;
5550 case TOK_VOLATILE1:
5551 case TOK_VOLATILE2:
5552 case TOK_VOLATILE3:
5553 qualifiers |= VT_VOLATILE;
5554 goto redo;
5555 case TOK_RESTRICT1:
5556 case TOK_RESTRICT2:
5557 case TOK_RESTRICT3:
5558 goto redo;
5559 /* XXX: clarify attribute handling */
5560 case TOK_ATTRIBUTE1:
5561 case TOK_ATTRIBUTE2:
5562 parse_attribute(ad);
5563 break;
5565 mk_pointer(type);
5566 type->t |= qualifiers;
5567 if (ret == type)
5568 /* innermost pointed to type is the one for the first derivation */
5569 ret = pointed_type(type);
5572 if (tok == '(') {
5573 /* This is possibly a parameter type list for abstract declarators
5574 ('int ()'), use post_type for testing this. */
5575 if (!post_type(type, ad, 0, td)) {
5576 /* It's not, so it's a nested declarator, and the post operations
5577 apply to the innermost pointed to type (if any). */
5578 /* XXX: this is not correct to modify 'ad' at this point, but
5579 the syntax is not clear */
5580 parse_attribute(ad);
5581 post = type_decl(type, ad, v, td);
5582 skip(')');
5583 } else
5584 goto abstract;
5585 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5586 /* type identifier */
5587 *v = tok;
5588 next();
5589 } else {
5590 abstract:
5591 if (!(td & TYPE_ABSTRACT))
5592 expect("identifier");
5593 *v = 0;
5595 post_type(post, ad, storage, 0);
5596 parse_attribute(ad);
5597 type->t |= storage;
5598 return ret;
5601 /* indirection with full error checking and bound check */
5602 ST_FUNC void indir(void)
5604 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5605 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5606 return;
5607 expect("pointer");
5609 if (vtop->r & VT_LVAL)
5610 gv(RC_INT);
5611 vtop->type = *pointed_type(&vtop->type);
5612 /* Arrays and functions are never lvalues */
5613 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5614 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5615 vtop->r |= VT_LVAL;
5616 /* if bound checking, the referenced pointer must be checked */
5617 #ifdef CONFIG_TCC_BCHECK
5618 if (tcc_state->do_bounds_check)
5619 vtop->r |= VT_MUSTBOUND;
5620 #endif
5624 /* pass a parameter to a function and do type checking and casting */
5625 static void gfunc_param_typed(Sym *func, Sym *arg)
5627 int func_type;
5628 CType type;
5630 func_type = func->f.func_type;
5631 if (func_type == FUNC_OLD ||
5632 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5633 /* default casting : only need to convert float to double */
5634 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5635 gen_cast_s(VT_DOUBLE);
5636 } else if (vtop->type.t & VT_BITFIELD) {
5637 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5638 type.ref = vtop->type.ref;
5639 gen_cast(&type);
5640 } else if (vtop->r & VT_MUSTCAST) {
5641 force_charshort_cast();
5643 } else if (arg == NULL) {
5644 tcc_error("too many arguments to function");
5645 } else {
5646 type = arg->type;
5647 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5648 gen_assign_cast(&type);
5652 /* parse an expression and return its type without any side effect. */
5653 static void expr_type(CType *type, void (*expr_fn)(void))
5655 nocode_wanted++;
5656 expr_fn();
5657 *type = vtop->type;
5658 vpop();
5659 nocode_wanted--;
5662 /* parse an expression of the form '(type)' or '(expr)' and return its
5663 type */
5664 static void parse_expr_type(CType *type)
5666 int n;
5667 AttributeDef ad;
5669 skip('(');
5670 if (parse_btype(type, &ad)) {
5671 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5672 } else {
5673 expr_type(type, gexpr);
5675 skip(')');
5678 static void parse_type(CType *type)
5680 AttributeDef ad;
5681 int n;
5683 if (!parse_btype(type, &ad)) {
5684 expect("type");
5686 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5689 static void parse_builtin_params(int nc, const char *args)
5691 char c, sep = '(';
5692 CType type;
5693 if (nc)
5694 nocode_wanted++;
5695 next();
5696 if (*args == 0)
5697 skip(sep);
5698 while ((c = *args++)) {
5699 skip(sep);
5700 sep = ',';
5701 if (c == 't') {
5702 parse_type(&type);
5703 vpush(&type);
5704 continue;
5706 expr_eq();
5707 type.ref = NULL;
5708 type.t = 0;
5709 switch (c) {
5710 case 'e':
5711 continue;
5712 case 'V':
5713 type.t = VT_CONSTANT;
5714 case 'v':
5715 type.t |= VT_VOID;
5716 mk_pointer (&type);
5717 break;
5718 case 'S':
5719 type.t = VT_CONSTANT;
5720 case 's':
5721 type.t |= char_type.t;
5722 mk_pointer (&type);
5723 break;
5724 case 'i':
5725 type.t = VT_INT;
5726 break;
5727 case 'l':
5728 type.t = VT_SIZE_T;
5729 break;
5730 default:
5731 break;
5733 gen_assign_cast(&type);
5735 skip(')');
5736 if (nc)
5737 nocode_wanted--;
5740 static void parse_atomic(int atok)
5742 int size, align, arg;
5743 CType *atom, *atom_ptr, ct = {0};
5744 char buf[40];
5745 static const char *const templates[] = {
5747 * Each entry consists of callback and function template.
5748 * The template represents argument types and return type.
5750 * ? void (return-only)
5751 * b bool
5752 * a atomic
5753 * A read-only atomic
5754 * p pointer to memory
5755 * v value
5756 * m memory model
5759 /* keep in order of appearance in tcctok.h: */
5760 /* __atomic_store */ "avm.?",
5761 /* __atomic_load */ "Am.v",
5762 /* __atomic_exchange */ "avm.v",
5763 /* __atomic_compare_exchange */ "apvbmm.b",
5764 /* __atomic_fetch_add */ "avm.v",
5765 /* __atomic_fetch_sub */ "avm.v",
5766 /* __atomic_fetch_or */ "avm.v",
5767 /* __atomic_fetch_xor */ "avm.v",
5768 /* __atomic_fetch_and */ "avm.v"
5770 const char *template = templates[(atok - TOK___atomic_store)];
5772 atom = atom_ptr = NULL;
5773 size = 0; /* pacify compiler */
5774 next();
5775 skip('(');
5776 for (arg = 0;;) {
5777 expr_eq();
5778 switch (template[arg]) {
5779 case 'a':
5780 case 'A':
5781 atom_ptr = &vtop->type;
5782 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5783 expect("pointer");
5784 atom = pointed_type(atom_ptr);
5785 size = type_size(atom, &align);
5786 if (size > 8
5787 || (size & (size - 1))
5788 || (atok > TOK___atomic_compare_exchange
5789 && (0 == btype_size(atom->t & VT_BTYPE)
5790 || (atom->t & VT_BTYPE) == VT_PTR)))
5791 expect("integral or integer-sized pointer target type");
5792 /* GCC does not care either: */
5793 /* if (!(atom->t & VT_ATOMIC))
5794 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5795 break;
5797 case 'p':
5798 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5799 || type_size(pointed_type(&vtop->type), &align) != size)
5800 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5801 gen_assign_cast(atom_ptr);
5802 break;
5803 case 'v':
5804 gen_assign_cast(atom);
5805 break;
5806 case 'm':
5807 gen_assign_cast(&int_type);
5808 break;
5809 case 'b':
5810 ct.t = VT_BOOL;
5811 gen_assign_cast(&ct);
5812 break;
5814 if ('.' == template[++arg])
5815 break;
5816 skip(',');
5818 skip(')');
5820 ct.t = VT_VOID;
5821 switch (template[arg + 1]) {
5822 case 'b':
5823 ct.t = VT_BOOL;
5824 break;
5825 case 'v':
5826 ct = *atom;
5827 break;
5830 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5831 vpush_helper_func(tok_alloc_const(buf));
5832 vrott(arg + 1);
5833 gfunc_call(arg);
5835 vpush(&ct);
5836 PUT_R_RET(vtop, ct.t);
5837 if (ct.t == VT_BOOL) {
5838 #ifdef PROMOTE_RET
5839 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5840 #else
5841 vtop->type.t = VT_INT;
5842 #endif
5846 ST_FUNC void unary(void)
5848 int n, t, align, size, r, sizeof_caller;
5849 CType type;
5850 Sym *s;
5851 AttributeDef ad;
5853 /* generate line number info */
5854 if (debug_modes)
5855 tcc_debug_line(tcc_state), tcc_tcov_check_line (1);
5857 sizeof_caller = in_sizeof;
5858 in_sizeof = 0;
5859 type.ref = NULL;
5860 /* XXX: GCC 2.95.3 does not generate a table although it should be
5861 better here */
5862 tok_next:
5863 switch(tok) {
5864 case TOK_EXTENSION:
5865 next();
5866 goto tok_next;
5867 case TOK_LCHAR:
5868 #ifdef TCC_TARGET_PE
5869 t = VT_SHORT|VT_UNSIGNED;
5870 goto push_tokc;
5871 #endif
5872 case TOK_CINT:
5873 case TOK_CCHAR:
5874 t = VT_INT;
5875 push_tokc:
5876 type.t = t;
5877 vsetc(&type, VT_CONST, &tokc);
5878 next();
5879 break;
5880 case TOK_CUINT:
5881 t = VT_INT | VT_UNSIGNED;
5882 goto push_tokc;
5883 case TOK_CLLONG:
5884 t = VT_LLONG;
5885 goto push_tokc;
5886 case TOK_CULLONG:
5887 t = VT_LLONG | VT_UNSIGNED;
5888 goto push_tokc;
5889 case TOK_CFLOAT:
5890 t = VT_FLOAT;
5891 goto push_tokc;
5892 case TOK_CDOUBLE:
5893 t = VT_DOUBLE;
5894 goto push_tokc;
5895 case TOK_CLDOUBLE:
5896 t = VT_LDOUBLE;
5897 goto push_tokc;
5898 case TOK_CLONG:
5899 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5900 goto push_tokc;
5901 case TOK_CULONG:
5902 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5903 goto push_tokc;
5904 case TOK___FUNCTION__:
5905 if (!gnu_ext)
5906 goto tok_identifier;
5907 /* fall thru */
5908 case TOK___FUNC__:
5910 Section *sec;
5911 int len;
5912 /* special function name identifier */
5913 len = strlen(funcname) + 1;
5914 /* generate char[len] type */
5915 type.t = char_type.t;
5916 if (tcc_state->warn_write_strings & WARN_ON)
5917 type.t |= VT_CONSTANT;
5918 mk_pointer(&type);
5919 type.t |= VT_ARRAY;
5920 type.ref->c = len;
5921 sec = rodata_section;
5922 vpush_ref(&type, sec, sec->data_offset, len);
5923 if (!NODATA_WANTED)
5924 memcpy(section_ptr_add(sec, len), funcname, len);
5925 next();
5927 break;
5928 case TOK_LSTR:
5929 #ifdef TCC_TARGET_PE
5930 t = VT_SHORT | VT_UNSIGNED;
5931 #else
5932 t = VT_INT;
5933 #endif
5934 goto str_init;
5935 case TOK_STR:
5936 /* string parsing */
5937 t = char_type.t;
5938 str_init:
5939 if (tcc_state->warn_write_strings & WARN_ON)
5940 t |= VT_CONSTANT;
5941 type.t = t;
5942 mk_pointer(&type);
5943 type.t |= VT_ARRAY;
5944 memset(&ad, 0, sizeof(AttributeDef));
5945 ad.section = rodata_section;
5946 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5947 break;
5948 case '(':
5949 next();
5950 /* cast ? */
5951 if (parse_btype(&type, &ad)) {
5952 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5953 skip(')');
5954 /* check ISOC99 compound literal */
5955 if (tok == '{') {
5956 /* data is allocated locally by default */
5957 if (global_expr)
5958 r = VT_CONST;
5959 else
5960 r = VT_LOCAL;
5961 /* all except arrays are lvalues */
5962 if (!(type.t & VT_ARRAY))
5963 r |= VT_LVAL;
5964 memset(&ad, 0, sizeof(AttributeDef));
5965 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5966 } else {
5967 if (sizeof_caller) {
5968 vpush(&type);
5969 return;
5971 unary();
5972 gen_cast(&type);
5974 } else if (tok == '{') {
5975 int saved_nocode_wanted = nocode_wanted;
5976 if (const_wanted && !(nocode_wanted & unevalmask))
5977 expect("constant");
5978 if (0 == local_scope)
5979 tcc_error("statement expression outside of function");
5980 /* save all registers */
5981 save_regs(0);
5982 /* statement expression : we do not accept break/continue
5983 inside as GCC does. We do retain the nocode_wanted state,
5984 as statement expressions can't ever be entered from the
5985 outside, so any reactivation of code emission (from labels
5986 or loop heads) can be disabled again after the end of it. */
5987 block(1);
5988 nocode_wanted = saved_nocode_wanted;
5989 skip(')');
5990 } else {
5991 gexpr();
5992 skip(')');
5994 break;
5995 case '*':
5996 next();
5997 unary();
5998 indir();
5999 break;
6000 case '&':
6001 next();
6002 unary();
6003 /* functions names must be treated as function pointers,
6004 except for unary '&' and sizeof. Since we consider that
6005 functions are not lvalues, we only have to handle it
6006 there and in function calls. */
6007 /* arrays can also be used although they are not lvalues */
6008 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
6009 !(vtop->type.t & VT_ARRAY))
6010 test_lvalue();
6011 if (vtop->sym)
6012 vtop->sym->a.addrtaken = 1;
6013 mk_pointer(&vtop->type);
6014 gaddrof();
6015 break;
6016 case '!':
6017 next();
6018 unary();
6019 gen_test_zero(TOK_EQ);
6020 break;
6021 case '~':
6022 next();
6023 unary();
6024 vpushi(-1);
6025 gen_op('^');
6026 break;
6027 case '+':
6028 next();
6029 unary();
6030 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
6031 tcc_error("pointer not accepted for unary plus");
6032 /* In order to force cast, we add zero, except for floating point
6033 where we really need an noop (otherwise -0.0 will be transformed
6034 into +0.0). */
6035 if (!is_float(vtop->type.t)) {
6036 vpushi(0);
6037 gen_op('+');
6039 break;
6040 case TOK_SIZEOF:
6041 case TOK_ALIGNOF1:
6042 case TOK_ALIGNOF2:
6043 case TOK_ALIGNOF3:
6044 t = tok;
6045 next();
6046 in_sizeof++;
6047 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
6048 s = NULL;
6049 if (vtop[1].r & VT_SYM)
6050 s = vtop[1].sym; /* hack: accessing previous vtop */
6051 size = type_size(&type, &align);
6052 if (s && s->a.aligned)
6053 align = 1 << (s->a.aligned - 1);
6054 if (t == TOK_SIZEOF) {
6055 if (!(type.t & VT_VLA)) {
6056 if (size < 0)
6057 tcc_error("sizeof applied to an incomplete type");
6058 vpushs(size);
6059 } else {
6060 vla_runtime_type_size(&type, &align);
6062 } else {
6063 vpushs(align);
6065 vtop->type.t |= VT_UNSIGNED;
6066 break;
6068 case TOK_builtin_expect:
6069 /* __builtin_expect is a no-op for now */
6070 parse_builtin_params(0, "ee");
6071 vpop();
6072 break;
6073 case TOK_builtin_types_compatible_p:
6074 parse_builtin_params(0, "tt");
6075 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
6076 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
6077 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
6078 vtop -= 2;
6079 vpushi(n);
6080 break;
6081 case TOK_builtin_choose_expr:
6083 int64_t c;
6084 next();
6085 skip('(');
6086 c = expr_const64();
6087 skip(',');
6088 if (!c) {
6089 nocode_wanted++;
6091 expr_eq();
6092 if (!c) {
6093 vpop();
6094 nocode_wanted--;
6096 skip(',');
6097 if (c) {
6098 nocode_wanted++;
6100 expr_eq();
6101 if (c) {
6102 vpop();
6103 nocode_wanted--;
6105 skip(')');
6107 break;
6108 case TOK_builtin_constant_p:
6109 parse_builtin_params(1, "e");
6110 n = (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6111 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6112 vtop--;
6113 vpushi(n);
6114 break;
6115 case TOK_builtin_frame_address:
6116 case TOK_builtin_return_address:
6118 int tok1 = tok;
6119 int level;
6120 next();
6121 skip('(');
6122 if (tok != TOK_CINT) {
6123 tcc_error("%s only takes positive integers",
6124 tok1 == TOK_builtin_return_address ?
6125 "__builtin_return_address" :
6126 "__builtin_frame_address");
6128 level = (uint32_t)tokc.i;
6129 next();
6130 skip(')');
6131 type.t = VT_VOID;
6132 mk_pointer(&type);
6133 vset(&type, VT_LOCAL, 0); /* local frame */
6134 while (level--) {
6135 #ifdef TCC_TARGET_RISCV64
6136 vpushi(2*PTR_SIZE);
6137 gen_op('-');
6138 #endif
6139 mk_pointer(&vtop->type);
6140 indir(); /* -> parent frame */
6142 if (tok1 == TOK_builtin_return_address) {
6143 // assume return address is just above frame pointer on stack
6144 #ifdef TCC_TARGET_ARM
6145 vpushi(2*PTR_SIZE);
6146 gen_op('+');
6147 #elif defined TCC_TARGET_RISCV64
6148 vpushi(PTR_SIZE);
6149 gen_op('-');
6150 #else
6151 vpushi(PTR_SIZE);
6152 gen_op('+');
6153 #endif
6154 mk_pointer(&vtop->type);
6155 indir();
6158 break;
6159 #ifdef TCC_TARGET_RISCV64
6160 case TOK_builtin_va_start:
6161 parse_builtin_params(0, "ee");
6162 r = vtop->r & VT_VALMASK;
6163 if (r == VT_LLOCAL)
6164 r = VT_LOCAL;
6165 if (r != VT_LOCAL)
6166 tcc_error("__builtin_va_start expects a local variable");
6167 gen_va_start();
6168 vstore();
6169 break;
6170 #endif
6171 #ifdef TCC_TARGET_X86_64
6172 #ifdef TCC_TARGET_PE
6173 case TOK_builtin_va_start:
6174 parse_builtin_params(0, "ee");
6175 r = vtop->r & VT_VALMASK;
6176 if (r == VT_LLOCAL)
6177 r = VT_LOCAL;
6178 if (r != VT_LOCAL)
6179 tcc_error("__builtin_va_start expects a local variable");
6180 vtop->r = r;
6181 vtop->type = char_pointer_type;
6182 vtop->c.i += 8;
6183 vstore();
6184 break;
6185 #else
6186 case TOK_builtin_va_arg_types:
6187 parse_builtin_params(0, "t");
6188 vpushi(classify_x86_64_va_arg(&vtop->type));
6189 vswap();
6190 vpop();
6191 break;
6192 #endif
6193 #endif
6195 #ifdef TCC_TARGET_ARM64
6196 case TOK_builtin_va_start: {
6197 parse_builtin_params(0, "ee");
6198 //xx check types
6199 gen_va_start();
6200 vpushi(0);
6201 vtop->type.t = VT_VOID;
6202 break;
6204 case TOK_builtin_va_arg: {
6205 parse_builtin_params(0, "et");
6206 type = vtop->type;
6207 vpop();
6208 //xx check types
6209 gen_va_arg(&type);
6210 vtop->type = type;
6211 break;
6213 case TOK___arm64_clear_cache: {
6214 parse_builtin_params(0, "ee");
6215 gen_clear_cache();
6216 vpushi(0);
6217 vtop->type.t = VT_VOID;
6218 break;
6220 #endif
6222 /* atomic operations */
6223 case TOK___atomic_store:
6224 case TOK___atomic_load:
6225 case TOK___atomic_exchange:
6226 case TOK___atomic_compare_exchange:
6227 case TOK___atomic_fetch_add:
6228 case TOK___atomic_fetch_sub:
6229 case TOK___atomic_fetch_or:
6230 case TOK___atomic_fetch_xor:
6231 case TOK___atomic_fetch_and:
6232 parse_atomic(tok);
6233 break;
6235 /* pre operations */
6236 case TOK_INC:
6237 case TOK_DEC:
6238 t = tok;
6239 next();
6240 unary();
6241 inc(0, t);
6242 break;
6243 case '-':
6244 next();
6245 unary();
6246 if (is_float(vtop->type.t)) {
6247 gen_opif(TOK_NEG);
6248 } else {
6249 vpushi(0);
6250 vswap();
6251 gen_op('-');
6253 break;
6254 case TOK_LAND:
6255 if (!gnu_ext)
6256 goto tok_identifier;
6257 next();
6258 /* allow to take the address of a label */
6259 if (tok < TOK_UIDENT)
6260 expect("label identifier");
6261 s = label_find(tok);
6262 if (!s) {
6263 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6264 } else {
6265 if (s->r == LABEL_DECLARED)
6266 s->r = LABEL_FORWARD;
6268 if (!s->type.t) {
6269 s->type.t = VT_VOID;
6270 mk_pointer(&s->type);
6271 s->type.t |= VT_STATIC;
6273 vpushsym(&s->type, s);
6274 next();
6275 break;
6277 case TOK_GENERIC:
6279 CType controlling_type;
6280 int has_default = 0;
6281 int has_match = 0;
6282 int learn = 0;
6283 TokenString *str = NULL;
6284 int saved_const_wanted = const_wanted;
6286 next();
6287 skip('(');
6288 const_wanted = 0;
6289 expr_type(&controlling_type, expr_eq);
6290 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
6291 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
6292 mk_pointer(&controlling_type);
6293 const_wanted = saved_const_wanted;
6294 for (;;) {
6295 learn = 0;
6296 skip(',');
6297 if (tok == TOK_DEFAULT) {
6298 if (has_default)
6299 tcc_error("too many 'default'");
6300 has_default = 1;
6301 if (!has_match)
6302 learn = 1;
6303 next();
6304 } else {
6305 AttributeDef ad_tmp;
6306 int itmp;
6307 CType cur_type;
6309 in_generic++;
6310 parse_btype(&cur_type, &ad_tmp);
6311 in_generic--;
6313 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
6314 if (compare_types(&controlling_type, &cur_type, 0)) {
6315 if (has_match) {
6316 tcc_error("type match twice");
6318 has_match = 1;
6319 learn = 1;
6322 skip(':');
6323 if (learn) {
6324 if (str)
6325 tok_str_free(str);
6326 skip_or_save_block(&str);
6327 } else {
6328 skip_or_save_block(NULL);
6330 if (tok == ')')
6331 break;
6333 if (!str) {
6334 char buf[60];
6335 type_to_str(buf, sizeof buf, &controlling_type, NULL);
6336 tcc_error("type '%s' does not match any association", buf);
6338 begin_macro(str, 1);
6339 next();
6340 expr_eq();
6341 if (tok != TOK_EOF)
6342 expect(",");
6343 end_macro();
6344 next();
6345 break;
6347 // special qnan , snan and infinity values
6348 case TOK___NAN__:
6349 n = 0x7fc00000;
6350 special_math_val:
6351 vpushi(n);
6352 vtop->type.t = VT_FLOAT;
6353 next();
6354 break;
6355 case TOK___SNAN__:
6356 n = 0x7f800001;
6357 goto special_math_val;
6358 case TOK___INF__:
6359 n = 0x7f800000;
6360 goto special_math_val;
6362 default:
6363 tok_identifier:
6364 t = tok;
6365 next();
6366 if (t < TOK_UIDENT)
6367 expect("identifier");
6368 s = sym_find(t);
6369 if (!s || IS_ASM_SYM(s)) {
6370 const char *name = get_tok_str(t, NULL);
6371 if (tok != '(')
6372 tcc_error("'%s' undeclared", name);
6373 /* for simple function calls, we tolerate undeclared
6374 external reference to int() function */
6375 tcc_warning_c(warn_implicit_function_declaration)(
6376 "implicit declaration of function '%s'", name);
6377 s = external_global_sym(t, &func_old_type);
6380 r = s->r;
6381 /* A symbol that has a register is a local register variable,
6382 which starts out as VT_LOCAL value. */
6383 if ((r & VT_VALMASK) < VT_CONST)
6384 r = (r & ~VT_VALMASK) | VT_LOCAL;
6386 vset(&s->type, r, s->c);
6387 /* Point to s as backpointer (even without r&VT_SYM).
6388 Will be used by at least the x86 inline asm parser for
6389 regvars. */
6390 vtop->sym = s;
6392 if (r & VT_SYM) {
6393 vtop->c.i = 0;
6394 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
6395 vtop->c.i = s->enum_val;
6397 break;
6400 /* post operations */
6401 while (1) {
6402 if (tok == TOK_INC || tok == TOK_DEC) {
6403 inc(1, tok);
6404 next();
6405 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
6406 int qualifiers, cumofs = 0;
6407 /* field */
6408 if (tok == TOK_ARROW)
6409 indir();
6410 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6411 test_lvalue();
6412 gaddrof();
6413 /* expect pointer on structure */
6414 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
6415 expect("struct or union");
6416 if (tok == TOK_CDOUBLE)
6417 expect("field name");
6418 next();
6419 if (tok == TOK_CINT || tok == TOK_CUINT)
6420 expect("field name");
6421 s = find_field(&vtop->type, tok, &cumofs);
6422 if (!s)
6423 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
6424 /* add field offset to pointer */
6425 vtop->type = char_pointer_type; /* change type to 'char *' */
6426 vpushi(cumofs + s->c);
6427 gen_op('+');
6428 /* change type to field type, and set to lvalue */
6429 vtop->type = s->type;
6430 vtop->type.t |= qualifiers;
6431 /* an array is never an lvalue */
6432 if (!(vtop->type.t & VT_ARRAY)) {
6433 vtop->r |= VT_LVAL;
6434 #ifdef CONFIG_TCC_BCHECK
6435 /* if bound checking, the referenced pointer must be checked */
6436 if (tcc_state->do_bounds_check)
6437 vtop->r |= VT_MUSTBOUND;
6438 #endif
6440 next();
6441 } else if (tok == '[') {
6442 next();
6443 gexpr();
6444 gen_op('+');
6445 indir();
6446 skip(']');
6447 } else if (tok == '(') {
6448 SValue ret;
6449 Sym *sa;
6450 int nb_args, ret_nregs, ret_align, regsize, variadic;
6452 /* function call */
6453 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6454 /* pointer test (no array accepted) */
6455 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6456 vtop->type = *pointed_type(&vtop->type);
6457 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6458 goto error_func;
6459 } else {
6460 error_func:
6461 expect("function pointer");
6463 } else {
6464 vtop->r &= ~VT_LVAL; /* no lvalue */
6466 /* get return type */
6467 s = vtop->type.ref;
6468 next();
6469 sa = s->next; /* first parameter */
6470 nb_args = regsize = 0;
6471 ret.r2 = VT_CONST;
6472 /* compute first implicit argument if a structure is returned */
6473 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6474 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6475 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6476 &ret_align, &regsize);
6477 if (ret_nregs <= 0) {
6478 /* get some space for the returned structure */
6479 size = type_size(&s->type, &align);
6480 #ifdef TCC_TARGET_ARM64
6481 /* On arm64, a small struct is return in registers.
6482 It is much easier to write it to memory if we know
6483 that we are allowed to write some extra bytes, so
6484 round the allocated space up to a power of 2: */
6485 if (size < 16)
6486 while (size & (size - 1))
6487 size = (size | (size - 1)) + 1;
6488 #endif
6489 loc = (loc - size) & -align;
6490 ret.type = s->type;
6491 ret.r = VT_LOCAL | VT_LVAL;
6492 /* pass it as 'int' to avoid structure arg passing
6493 problems */
6494 vseti(VT_LOCAL, loc);
6495 #ifdef CONFIG_TCC_BCHECK
6496 if (tcc_state->do_bounds_check)
6497 --loc;
6498 #endif
6499 ret.c = vtop->c;
6500 if (ret_nregs < 0)
6501 vtop--;
6502 else
6503 nb_args++;
6505 } else {
6506 ret_nregs = 1;
6507 ret.type = s->type;
6510 if (ret_nregs > 0) {
6511 /* return in register */
6512 ret.c.i = 0;
6513 PUT_R_RET(&ret, ret.type.t);
6515 if (tok != ')') {
6516 for(;;) {
6517 expr_eq();
6518 gfunc_param_typed(s, sa);
6519 nb_args++;
6520 if (sa)
6521 sa = sa->next;
6522 if (tok == ')')
6523 break;
6524 skip(',');
6527 if (sa)
6528 tcc_error("too few arguments to function");
6529 skip(')');
6530 gfunc_call(nb_args);
6532 if (ret_nregs < 0) {
6533 vsetc(&ret.type, ret.r, &ret.c);
6534 #ifdef TCC_TARGET_RISCV64
6535 arch_transfer_ret_regs(1);
6536 #endif
6537 } else {
6538 /* return value */
6539 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6540 vsetc(&ret.type, r, &ret.c);
6541 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6544 /* handle packed struct return */
6545 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6546 int addr, offset;
6548 size = type_size(&s->type, &align);
6549 /* We're writing whole regs often, make sure there's enough
6550 space. Assume register size is power of 2. */
6551 if (regsize > align)
6552 align = regsize;
6553 loc = (loc - size) & -align;
6554 addr = loc;
6555 offset = 0;
6556 for (;;) {
6557 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6558 vswap();
6559 vstore();
6560 vtop--;
6561 if (--ret_nregs == 0)
6562 break;
6563 offset += regsize;
6565 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6568 /* Promote char/short return values. This is matters only
6569 for calling function that were not compiled by TCC and
6570 only on some architectures. For those where it doesn't
6571 matter we expect things to be already promoted to int,
6572 but not larger. */
6573 t = s->type.t & VT_BTYPE;
6574 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6575 #ifdef PROMOTE_RET
6576 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6577 #else
6578 vtop->type.t = VT_INT;
6579 #endif
6582 if (s->f.func_noreturn) {
6583 if (debug_modes)
6584 tcc_tcov_block_end (tcov_data.line);
6585 CODE_OFF();
6587 } else {
6588 break;
6593 #ifndef precedence_parser /* original top-down parser */
6595 static void expr_prod(void)
6597 int t;
6599 unary();
6600 while ((t = tok) == '*' || t == '/' || t == '%') {
6601 next();
6602 unary();
6603 gen_op(t);
6607 static void expr_sum(void)
6609 int t;
6611 expr_prod();
6612 while ((t = tok) == '+' || t == '-') {
6613 next();
6614 expr_prod();
6615 gen_op(t);
6619 static void expr_shift(void)
6621 int t;
6623 expr_sum();
6624 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6625 next();
6626 expr_sum();
6627 gen_op(t);
6631 static void expr_cmp(void)
6633 int t;
6635 expr_shift();
6636 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6637 t == TOK_ULT || t == TOK_UGE) {
6638 next();
6639 expr_shift();
6640 gen_op(t);
6644 static void expr_cmpeq(void)
6646 int t;
6648 expr_cmp();
6649 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6650 next();
6651 expr_cmp();
6652 gen_op(t);
6656 static void expr_and(void)
6658 expr_cmpeq();
6659 while (tok == '&') {
6660 next();
6661 expr_cmpeq();
6662 gen_op('&');
6666 static void expr_xor(void)
6668 expr_and();
6669 while (tok == '^') {
6670 next();
6671 expr_and();
6672 gen_op('^');
6676 static void expr_or(void)
6678 expr_xor();
6679 while (tok == '|') {
6680 next();
6681 expr_xor();
6682 gen_op('|');
6686 static void expr_landor(int op);
6688 static void expr_land(void)
6690 expr_or();
6691 if (tok == TOK_LAND)
6692 expr_landor(tok);
6695 static void expr_lor(void)
6697 expr_land();
6698 if (tok == TOK_LOR)
6699 expr_landor(tok);
6702 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6703 #else /* defined precedence_parser */
6704 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6705 # define expr_lor() unary(), expr_infix(1)
6707 static int precedence(int tok)
6709 switch (tok) {
6710 case TOK_LOR: return 1;
6711 case TOK_LAND: return 2;
6712 case '|': return 3;
6713 case '^': return 4;
6714 case '&': return 5;
6715 case TOK_EQ: case TOK_NE: return 6;
6716 relat: case TOK_ULT: case TOK_UGE: return 7;
6717 case TOK_SHL: case TOK_SAR: return 8;
6718 case '+': case '-': return 9;
6719 case '*': case '/': case '%': return 10;
6720 default:
6721 if (tok >= TOK_ULE && tok <= TOK_GT)
6722 goto relat;
6723 return 0;
6726 static unsigned char prec[256];
6727 static void init_prec(void)
6729 int i;
6730 for (i = 0; i < 256; i++)
6731 prec[i] = precedence(i);
6733 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6735 static void expr_landor(int op);
6737 static void expr_infix(int p)
6739 int t = tok, p2;
6740 while ((p2 = precedence(t)) >= p) {
6741 if (t == TOK_LOR || t == TOK_LAND) {
6742 expr_landor(t);
6743 } else {
6744 next();
6745 unary();
6746 if (precedence(tok) > p2)
6747 expr_infix(p2 + 1);
6748 gen_op(t);
6750 t = tok;
6753 #endif
6755 /* Assuming vtop is a value used in a conditional context
6756 (i.e. compared with zero) return 0 if it's false, 1 if
6757 true and -1 if it can't be statically determined. */
6758 static int condition_3way(void)
6760 int c = -1;
6761 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6762 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6763 vdup();
6764 gen_cast_s(VT_BOOL);
6765 c = vtop->c.i;
6766 vpop();
6768 return c;
6771 static void expr_landor(int op)
6773 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6774 for(;;) {
6775 c = f ? i : condition_3way();
6776 if (c < 0)
6777 save_regs(1), cc = 0;
6778 else if (c != i)
6779 nocode_wanted++, f = 1;
6780 if (tok != op)
6781 break;
6782 if (c < 0)
6783 t = gvtst(i, t);
6784 else
6785 vpop();
6786 next();
6787 expr_landor_next(op);
6789 if (cc || f) {
6790 vpop();
6791 vpushi(i ^ f);
6792 gsym(t);
6793 nocode_wanted -= f;
6794 } else {
6795 gvtst_set(i, t);
6799 static int is_cond_bool(SValue *sv)
6801 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6802 && (sv->type.t & VT_BTYPE) == VT_INT)
6803 return (unsigned)sv->c.i < 2;
6804 if (sv->r == VT_CMP)
6805 return 1;
6806 return 0;
6809 static void expr_cond(void)
6811 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6812 SValue sv;
6813 CType type;
6814 int ncw_prev;
6816 expr_lor();
6817 if (tok == '?') {
6818 next();
6819 c = condition_3way();
6820 g = (tok == ':' && gnu_ext);
6821 tt = 0;
6822 if (!g) {
6823 if (c < 0) {
6824 save_regs(1);
6825 tt = gvtst(1, 0);
6826 } else {
6827 vpop();
6829 } else if (c < 0) {
6830 /* needed to avoid having different registers saved in
6831 each branch */
6832 save_regs(1);
6833 gv_dup();
6834 tt = gvtst(0, 0);
6837 ncw_prev = nocode_wanted;
6838 if (c == 0)
6839 nocode_wanted++;
6840 if (!g)
6841 gexpr();
6843 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6844 mk_pointer(&vtop->type);
6845 sv = *vtop; /* save value to handle it later */
6846 vtop--; /* no vpop so that FP stack is not flushed */
6848 if (g) {
6849 u = tt;
6850 } else if (c < 0) {
6851 u = gjmp(0);
6852 gsym(tt);
6853 } else
6854 u = 0;
6856 nocode_wanted = ncw_prev;
6857 if (c == 1)
6858 nocode_wanted++;
6859 skip(':');
6860 expr_cond();
6862 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6863 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6864 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6865 this code jumps directly to the if's then/else branches. */
6866 t1 = gvtst(0, 0);
6867 t2 = gjmp(0);
6868 gsym(u);
6869 vpushv(&sv);
6870 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6871 gvtst_set(0, t1);
6872 gvtst_set(1, t2);
6873 nocode_wanted = ncw_prev;
6874 // tcc_warning("two conditions expr_cond");
6875 return;
6878 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6879 mk_pointer(&vtop->type);
6881 /* cast operands to correct type according to ISOC rules */
6882 if (!combine_types(&type, &sv, vtop, '?'))
6883 type_incompatibility_error(&sv.type, &vtop->type,
6884 "type mismatch in conditional expression (have '%s' and '%s')");
6885 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6886 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6887 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6889 /* now we convert second operand */
6890 if (c != 1) {
6891 gen_cast(&type);
6892 if (islv) {
6893 mk_pointer(&vtop->type);
6894 gaddrof();
6895 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6896 gaddrof();
6899 rc = RC_TYPE(type.t);
6900 /* for long longs, we use fixed registers to avoid having
6901 to handle a complicated move */
6902 if (USING_TWO_WORDS(type.t))
6903 rc = RC_RET(type.t);
6905 tt = r2 = 0;
6906 if (c < 0) {
6907 r2 = gv(rc);
6908 tt = gjmp(0);
6910 gsym(u);
6911 nocode_wanted = ncw_prev;
6913 /* this is horrible, but we must also convert first
6914 operand */
6915 if (c != 0) {
6916 *vtop = sv;
6917 gen_cast(&type);
6918 if (islv) {
6919 mk_pointer(&vtop->type);
6920 gaddrof();
6921 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6922 gaddrof();
6925 if (c < 0) {
6926 r1 = gv(rc);
6927 move_reg(r2, r1, islv ? VT_PTR : type.t);
6928 vtop->r = r2;
6929 gsym(tt);
6932 if (islv)
6933 indir();
6937 static void expr_eq(void)
6939 int t;
6941 expr_cond();
6942 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6943 test_lvalue();
6944 next();
6945 if (t == '=') {
6946 expr_eq();
6947 } else {
6948 vdup();
6949 expr_eq();
6950 gen_op(TOK_ASSIGN_OP(t));
6952 vstore();
6956 ST_FUNC void gexpr(void)
6958 while (1) {
6959 expr_eq();
6960 if (tok != ',')
6961 break;
6962 vpop();
6963 next();
6967 /* parse a constant expression and return value in vtop. */
6968 static void expr_const1(void)
6970 const_wanted++;
6971 nocode_wanted += unevalmask + 1;
6972 expr_cond();
6973 nocode_wanted -= unevalmask + 1;
6974 const_wanted--;
6977 /* parse an integer constant and return its value. */
6978 static inline int64_t expr_const64(void)
6980 int64_t c;
6981 expr_const1();
6982 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6983 expect("constant expression");
6984 c = vtop->c.i;
6985 vpop();
6986 return c;
6989 /* parse an integer constant and return its value.
6990 Complain if it doesn't fit 32bit (signed or unsigned). */
6991 ST_FUNC int expr_const(void)
6993 int c;
6994 int64_t wc = expr_const64();
6995 c = wc;
6996 if (c != wc && (unsigned)c != wc)
6997 tcc_error("constant exceeds 32 bit");
6998 return c;
7001 /* ------------------------------------------------------------------------- */
7002 /* return from function */
7004 #ifndef TCC_TARGET_ARM64
7005 static void gfunc_return(CType *func_type)
7007 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
7008 CType type, ret_type;
7009 int ret_align, ret_nregs, regsize;
7010 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
7011 &ret_align, &regsize);
7012 if (ret_nregs < 0) {
7013 #ifdef TCC_TARGET_RISCV64
7014 arch_transfer_ret_regs(0);
7015 #endif
7016 } else if (0 == ret_nregs) {
7017 /* if returning structure, must copy it to implicit
7018 first pointer arg location */
7019 type = *func_type;
7020 mk_pointer(&type);
7021 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
7022 indir();
7023 vswap();
7024 /* copy structure value to pointer */
7025 vstore();
7026 } else {
7027 /* returning structure packed into registers */
7028 int size, addr, align, rc;
7029 size = type_size(func_type,&align);
7030 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
7031 (vtop->c.i & (ret_align-1)))
7032 && (align & (ret_align-1))) {
7033 loc = (loc - size) & -ret_align;
7034 addr = loc;
7035 type = *func_type;
7036 vset(&type, VT_LOCAL | VT_LVAL, addr);
7037 vswap();
7038 vstore();
7039 vpop();
7040 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
7042 vtop->type = ret_type;
7043 rc = RC_RET(ret_type.t);
7044 if (ret_nregs == 1)
7045 gv(rc);
7046 else {
7047 for (;;) {
7048 vdup();
7049 gv(rc);
7050 vpop();
7051 if (--ret_nregs == 0)
7052 break;
7053 /* We assume that when a structure is returned in multiple
7054 registers, their classes are consecutive values of the
7055 suite s(n) = 2^n */
7056 rc <<= 1;
7057 vtop->c.i += regsize;
7061 } else {
7062 gv(RC_RET(func_type->t));
7064 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
7066 #endif
7068 static void check_func_return(void)
7070 if ((func_vt.t & VT_BTYPE) == VT_VOID)
7071 return;
7072 if (!strcmp (funcname, "main")
7073 && (func_vt.t & VT_BTYPE) == VT_INT) {
7074 /* main returns 0 by default */
7075 vpushi(0);
7076 gen_assign_cast(&func_vt);
7077 gfunc_return(&func_vt);
7078 } else {
7079 tcc_warning("function might return no value: '%s'", funcname);
7083 /* ------------------------------------------------------------------------- */
7084 /* switch/case */
7086 static int case_cmpi(const void *pa, const void *pb)
7088 int64_t a = (*(struct case_t**) pa)->v1;
7089 int64_t b = (*(struct case_t**) pb)->v1;
7090 return a < b ? -1 : a > b;
7093 static int case_cmpu(const void *pa, const void *pb)
7095 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
7096 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
7097 return a < b ? -1 : a > b;
7100 static void gtst_addr(int t, int a)
7102 gsym_addr(gvtst(0, t), a);
7105 static void gcase(struct case_t **base, int len, int *bsym)
7107 struct case_t *p;
7108 int e;
7109 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
7110 while (len > 8) {
7111 /* binary search */
7112 p = base[len/2];
7113 vdup();
7114 if (ll)
7115 vpushll(p->v2);
7116 else
7117 vpushi(p->v2);
7118 gen_op(TOK_LE);
7119 e = gvtst(1, 0);
7120 vdup();
7121 if (ll)
7122 vpushll(p->v1);
7123 else
7124 vpushi(p->v1);
7125 gen_op(TOK_GE);
7126 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
7127 /* x < v1 */
7128 gcase(base, len/2, bsym);
7129 /* x > v2 */
7130 gsym(e);
7131 e = len/2 + 1;
7132 base += e; len -= e;
7134 /* linear scan */
7135 while (len--) {
7136 p = *base++;
7137 vdup();
7138 if (ll)
7139 vpushll(p->v2);
7140 else
7141 vpushi(p->v2);
7142 if (p->v1 == p->v2) {
7143 gen_op(TOK_EQ);
7144 gtst_addr(0, p->sym);
7145 } else {
7146 gen_op(TOK_LE);
7147 e = gvtst(1, 0);
7148 vdup();
7149 if (ll)
7150 vpushll(p->v1);
7151 else
7152 vpushi(p->v1);
7153 gen_op(TOK_GE);
7154 gtst_addr(0, p->sym);
7155 gsym(e);
7158 *bsym = gjmp(*bsym);
7161 /* ------------------------------------------------------------------------- */
7162 /* __attribute__((cleanup(fn))) */
7164 static void try_call_scope_cleanup(Sym *stop)
7166 Sym *cls = cur_scope->cl.s;
7168 for (; cls != stop; cls = cls->ncl) {
7169 Sym *fs = cls->next;
7170 Sym *vs = cls->prev_tok;
7172 vpushsym(&fs->type, fs);
7173 vset(&vs->type, vs->r, vs->c);
7174 vtop->sym = vs;
7175 mk_pointer(&vtop->type);
7176 gaddrof();
7177 gfunc_call(1);
7181 static void try_call_cleanup_goto(Sym *cleanupstate)
7183 Sym *oc, *cc;
7184 int ocd, ccd;
7186 if (!cur_scope->cl.s)
7187 return;
7189 /* search NCA of both cleanup chains given parents and initial depth */
7190 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
7191 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
7193 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
7195 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
7198 try_call_scope_cleanup(cc);
7201 /* call 'func' for each __attribute__((cleanup(func))) */
7202 static void block_cleanup(struct scope *o)
7204 int jmp = 0;
7205 Sym *g, **pg;
7206 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
7207 if (g->prev_tok->r & LABEL_FORWARD) {
7208 Sym *pcl = g->next;
7209 if (!jmp)
7210 jmp = gjmp(0);
7211 gsym(pcl->jnext);
7212 try_call_scope_cleanup(o->cl.s);
7213 pcl->jnext = gjmp(0);
7214 if (!o->cl.n)
7215 goto remove_pending;
7216 g->c = o->cl.n;
7217 pg = &g->prev;
7218 } else {
7219 remove_pending:
7220 *pg = g->prev;
7221 sym_free(g);
7224 gsym(jmp);
7225 try_call_scope_cleanup(o->cl.s);
7228 /* ------------------------------------------------------------------------- */
7229 /* VLA */
7231 static void vla_restore(int loc)
7233 if (loc)
7234 gen_vla_sp_restore(loc);
7237 static void vla_leave(struct scope *o)
7239 struct scope *c = cur_scope, *v = NULL;
7240 for (; c != o && c; c = c->prev)
7241 if (c->vla.num)
7242 v = c;
7243 if (v)
7244 vla_restore(v->vla.locorig);
7247 /* ------------------------------------------------------------------------- */
7248 /* local scopes */
7250 void new_scope(struct scope *o)
7252 /* copy and link previous scope */
7253 *o = *cur_scope;
7254 o->prev = cur_scope;
7255 cur_scope = o;
7256 cur_scope->vla.num = 0;
7258 /* record local declaration stack position */
7259 o->lstk = local_stack;
7260 o->llstk = local_label_stack;
7261 ++local_scope;
7263 if (debug_modes)
7264 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
7267 void prev_scope(struct scope *o, int is_expr)
7269 vla_leave(o->prev);
7271 if (o->cl.s != o->prev->cl.s)
7272 block_cleanup(o->prev);
7274 /* pop locally defined labels */
7275 label_pop(&local_label_stack, o->llstk, is_expr);
7277 /* In the is_expr case (a statement expression is finished here),
7278 vtop might refer to symbols on the local_stack. Either via the
7279 type or via vtop->sym. We can't pop those nor any that in turn
7280 might be referred to. To make it easier we don't roll back
7281 any symbols in that case; some upper level call to block() will
7282 do that. We do have to remove such symbols from the lookup
7283 tables, though. sym_pop will do that. */
7285 /* pop locally defined symbols */
7286 pop_local_syms(o->lstk, is_expr);
7287 cur_scope = o->prev;
7288 --local_scope;
7290 if (debug_modes)
7291 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7294 /* leave a scope via break/continue(/goto) */
7295 void leave_scope(struct scope *o)
7297 if (!o)
7298 return;
7299 try_call_scope_cleanup(o->cl.s);
7300 vla_leave(o);
7303 /* ------------------------------------------------------------------------- */
7304 /* call block from 'for do while' loops */
7306 static void lblock(int *bsym, int *csym)
7308 struct scope *lo = loop_scope, *co = cur_scope;
7309 int *b = co->bsym, *c = co->csym;
7310 if (csym) {
7311 co->csym = csym;
7312 loop_scope = co;
7314 co->bsym = bsym;
7315 block(0);
7316 co->bsym = b;
7317 if (csym) {
7318 co->csym = c;
7319 loop_scope = lo;
7323 static void block(int is_expr)
7325 int a, b, c, d, e, t;
7326 struct scope o;
7327 Sym *s;
7329 if (is_expr) {
7330 /* default return value is (void) */
7331 vpushi(0);
7332 vtop->type.t = VT_VOID;
7335 again:
7336 t = tok;
7337 /* If the token carries a value, next() might destroy it. Only with
7338 invalid code such as f(){"123"4;} */
7339 if (TOK_HAS_VALUE(t))
7340 goto expr;
7341 next();
7343 if (debug_modes)
7344 tcc_tcov_check_line (0), tcc_tcov_block_begin ();
7346 if (t == TOK_IF) {
7347 skip('(');
7348 gexpr();
7349 skip(')');
7350 a = gvtst(1, 0);
7351 block(0);
7352 if (tok == TOK_ELSE) {
7353 d = gjmp(0);
7354 gsym(a);
7355 next();
7356 block(0);
7357 gsym(d); /* patch else jmp */
7358 } else {
7359 gsym(a);
7362 } else if (t == TOK_WHILE) {
7363 d = gind();
7364 skip('(');
7365 gexpr();
7366 skip(')');
7367 a = gvtst(1, 0);
7368 b = 0;
7369 lblock(&a, &b);
7370 gjmp_addr(d);
7371 gsym_addr(b, d);
7372 gsym(a);
7374 } else if (t == '{') {
7375 new_scope(&o);
7377 /* handle local labels declarations */
7378 while (tok == TOK_LABEL) {
7379 do {
7380 next();
7381 if (tok < TOK_UIDENT)
7382 expect("label identifier");
7383 label_push(&local_label_stack, tok, LABEL_DECLARED);
7384 next();
7385 } while (tok == ',');
7386 skip(';');
7389 while (tok != '}') {
7390 decl(VT_LOCAL);
7391 if (tok != '}') {
7392 if (is_expr)
7393 vpop();
7394 block(is_expr);
7398 prev_scope(&o, is_expr);
7399 if (local_scope)
7400 next();
7401 else if (!nocode_wanted)
7402 check_func_return();
7404 } else if (t == TOK_RETURN) {
7405 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7406 if (tok != ';') {
7407 gexpr();
7408 if (b) {
7409 gen_assign_cast(&func_vt);
7410 } else {
7411 if (vtop->type.t != VT_VOID)
7412 tcc_warning("void function returns a value");
7413 vtop--;
7415 } else if (b) {
7416 tcc_warning("'return' with no value");
7417 b = 0;
7419 leave_scope(root_scope);
7420 if (b)
7421 gfunc_return(&func_vt);
7422 skip(';');
7423 /* jump unless last stmt in top-level block */
7424 if (tok != '}' || local_scope != 1)
7425 rsym = gjmp(rsym);
7426 if (debug_modes)
7427 tcc_tcov_block_end (tcov_data.line);
7428 CODE_OFF();
7430 } else if (t == TOK_BREAK) {
7431 /* compute jump */
7432 if (!cur_scope->bsym)
7433 tcc_error("cannot break");
7434 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7435 leave_scope(cur_switch->scope);
7436 else
7437 leave_scope(loop_scope);
7438 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7439 skip(';');
7441 } else if (t == TOK_CONTINUE) {
7442 /* compute jump */
7443 if (!cur_scope->csym)
7444 tcc_error("cannot continue");
7445 leave_scope(loop_scope);
7446 *cur_scope->csym = gjmp(*cur_scope->csym);
7447 skip(';');
7449 } else if (t == TOK_FOR) {
7450 new_scope(&o);
7452 skip('(');
7453 if (tok != ';') {
7454 /* c99 for-loop init decl? */
7455 if (!decl0(VT_LOCAL, 1, NULL)) {
7456 /* no, regular for-loop init expr */
7457 gexpr();
7458 vpop();
7461 skip(';');
7462 a = b = 0;
7463 c = d = gind();
7464 if (tok != ';') {
7465 gexpr();
7466 a = gvtst(1, 0);
7468 skip(';');
7469 if (tok != ')') {
7470 e = gjmp(0);
7471 d = gind();
7472 gexpr();
7473 vpop();
7474 gjmp_addr(c);
7475 gsym(e);
7477 skip(')');
7478 lblock(&a, &b);
7479 gjmp_addr(d);
7480 gsym_addr(b, d);
7481 gsym(a);
7482 prev_scope(&o, 0);
7484 } else if (t == TOK_DO) {
7485 a = b = 0;
7486 d = gind();
7487 lblock(&a, &b);
7488 gsym(b);
7489 skip(TOK_WHILE);
7490 skip('(');
7491 gexpr();
7492 skip(')');
7493 skip(';');
7494 c = gvtst(0, 0);
7495 gsym_addr(c, d);
7496 gsym(a);
7498 } else if (t == TOK_SWITCH) {
7499 struct switch_t *sw;
7501 sw = tcc_mallocz(sizeof *sw);
7502 sw->bsym = &a;
7503 sw->scope = cur_scope;
7504 sw->prev = cur_switch;
7505 cur_switch = sw;
7507 skip('(');
7508 gexpr();
7509 skip(')');
7510 sw->sv = *vtop--; /* save switch value */
7512 a = 0;
7513 b = gjmp(0); /* jump to first case */
7514 lblock(&a, NULL);
7515 a = gjmp(a); /* add implicit break */
7516 /* case lookup */
7517 gsym(b);
7519 if (sw->sv.type.t & VT_UNSIGNED)
7520 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7521 else
7522 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7524 for (b = 1; b < sw->n; b++)
7525 if (sw->sv.type.t & VT_UNSIGNED
7526 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7527 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7528 tcc_error("duplicate case value");
7530 vpushv(&sw->sv);
7531 gv(RC_INT);
7532 d = 0, gcase(sw->p, sw->n, &d);
7533 vpop();
7534 if (sw->def_sym)
7535 gsym_addr(d, sw->def_sym);
7536 else
7537 gsym(d);
7538 /* break label */
7539 gsym(a);
7541 dynarray_reset(&sw->p, &sw->n);
7542 cur_switch = sw->prev;
7543 tcc_free(sw);
7545 } else if (t == TOK_CASE) {
7546 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7547 if (!cur_switch)
7548 expect("switch");
7549 cr->v1 = cr->v2 = expr_const64();
7550 if (gnu_ext && tok == TOK_DOTS) {
7551 next();
7552 cr->v2 = expr_const64();
7553 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7554 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7555 tcc_warning("empty case range");
7557 tcov_data.ind = 0;
7558 cr->sym = gind();
7559 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7560 skip(':');
7561 is_expr = 0;
7562 goto block_after_label;
7564 } else if (t == TOK_DEFAULT) {
7565 if (!cur_switch)
7566 expect("switch");
7567 if (cur_switch->def_sym)
7568 tcc_error("too many 'default'");
7569 tcov_data.ind = 0;
7570 cur_switch->def_sym = gind();
7571 skip(':');
7572 is_expr = 0;
7573 goto block_after_label;
7575 } else if (t == TOK_GOTO) {
7576 if (cur_scope->vla.num)
7577 vla_restore(cur_scope->vla.locorig);
7578 if (tok == '*' && gnu_ext) {
7579 /* computed goto */
7580 next();
7581 gexpr();
7582 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7583 expect("pointer");
7584 ggoto();
7586 } else if (tok >= TOK_UIDENT) {
7587 s = label_find(tok);
7588 /* put forward definition if needed */
7589 if (!s)
7590 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7591 else if (s->r == LABEL_DECLARED)
7592 s->r = LABEL_FORWARD;
7594 if (s->r & LABEL_FORWARD) {
7595 /* start new goto chain for cleanups, linked via label->next */
7596 if (cur_scope->cl.s && !nocode_wanted) {
7597 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7598 pending_gotos->prev_tok = s;
7599 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7600 pending_gotos->next = s;
7602 s->jnext = gjmp(s->jnext);
7603 } else {
7604 try_call_cleanup_goto(s->cleanupstate);
7605 gjmp_addr(s->jnext);
7607 next();
7609 } else {
7610 expect("label identifier");
7612 skip(';');
7614 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7615 asm_instr();
7617 } else {
7618 if (tok == ':' && t >= TOK_UIDENT) {
7619 /* label case */
7620 next();
7621 s = label_find(t);
7622 if (s) {
7623 if (s->r == LABEL_DEFINED)
7624 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7625 s->r = LABEL_DEFINED;
7626 if (s->next) {
7627 Sym *pcl; /* pending cleanup goto */
7628 for (pcl = s->next; pcl; pcl = pcl->prev)
7629 gsym(pcl->jnext);
7630 sym_pop(&s->next, NULL, 0);
7631 } else
7632 gsym(s->jnext);
7633 } else {
7634 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7636 s->jnext = gind();
7637 s->cleanupstate = cur_scope->cl.s;
7639 block_after_label:
7640 vla_restore(cur_scope->vla.loc);
7641 if (tok != '}')
7642 goto again;
7643 /* we accept this, but it is a mistake */
7644 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7646 } else {
7647 /* expression case */
7648 if (t != ';') {
7649 unget_tok(t);
7650 expr:
7651 if (is_expr) {
7652 vpop();
7653 gexpr();
7654 } else {
7655 gexpr();
7656 vpop();
7658 skip(';');
7663 if (debug_modes)
7664 tcc_tcov_check_line (0), tcc_tcov_block_end (0);
7667 /* This skips over a stream of tokens containing balanced {} and ()
7668 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7669 with a '{'). If STR then allocates and stores the skipped tokens
7670 in *STR. This doesn't check if () and {} are nested correctly,
7671 i.e. "({)}" is accepted. */
7672 static void skip_or_save_block(TokenString **str)
7674 int braces = tok == '{';
7675 int level = 0;
7676 if (str)
7677 *str = tok_str_alloc();
7679 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7680 int t;
7681 if (tok == TOK_EOF) {
7682 if (str || level > 0)
7683 tcc_error("unexpected end of file");
7684 else
7685 break;
7687 if (str)
7688 tok_str_add_tok(*str);
7689 t = tok;
7690 next();
7691 if (t == '{' || t == '(') {
7692 level++;
7693 } else if (t == '}' || t == ')') {
7694 level--;
7695 if (level == 0 && braces && t == '}')
7696 break;
7699 if (str) {
7700 tok_str_add(*str, -1);
7701 tok_str_add(*str, 0);
7705 #define EXPR_CONST 1
7706 #define EXPR_ANY 2
7708 static void parse_init_elem(int expr_type)
7710 int saved_global_expr;
7711 switch(expr_type) {
7712 case EXPR_CONST:
7713 /* compound literals must be allocated globally in this case */
7714 saved_global_expr = global_expr;
7715 global_expr = 1;
7716 expr_const1();
7717 global_expr = saved_global_expr;
7718 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7719 (compound literals). */
7720 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7721 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7722 || vtop->sym->v < SYM_FIRST_ANOM))
7723 #ifdef TCC_TARGET_PE
7724 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7725 #endif
7727 tcc_error("initializer element is not constant");
7728 break;
7729 case EXPR_ANY:
7730 expr_eq();
7731 break;
7735 #if 1
7736 static void init_assert(init_params *p, int offset)
7738 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7739 : !nocode_wanted && offset > p->local_offset)
7740 tcc_internal_error("initializer overflow");
7742 #else
7743 #define init_assert(sec, offset)
7744 #endif
7746 /* put zeros for variable based init */
7747 static void init_putz(init_params *p, unsigned long c, int size)
7749 init_assert(p, c + size);
7750 if (p->sec) {
7751 /* nothing to do because globals are already set to zero */
7752 } else {
7753 vpush_helper_func(TOK_memset);
7754 vseti(VT_LOCAL, c);
7755 #ifdef TCC_TARGET_ARM
7756 vpushs(size);
7757 vpushi(0);
7758 #else
7759 vpushi(0);
7760 vpushs(size);
7761 #endif
7762 gfunc_call(3);
7766 #define DIF_FIRST 1
7767 #define DIF_SIZE_ONLY 2
7768 #define DIF_HAVE_ELEM 4
7769 #define DIF_CLEAR 8
7771 /* delete relocations for specified range c ... c + size. Unfortunatly
7772 in very special cases, relocations may occur unordered */
7773 static void decl_design_delrels(Section *sec, int c, int size)
7775 ElfW_Rel *rel, *rel2, *rel_end;
7776 if (!sec || !sec->reloc)
7777 return;
7778 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7779 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7780 while (rel < rel_end) {
7781 if (rel->r_offset >= c && rel->r_offset < c + size) {
7782 sec->reloc->data_offset -= sizeof *rel;
7783 } else {
7784 if (rel2 != rel)
7785 memcpy(rel2, rel, sizeof *rel);
7786 ++rel2;
7788 ++rel;
7792 static void decl_design_flex(init_params *p, Sym *ref, int index)
7794 if (ref == p->flex_array_ref) {
7795 if (index >= ref->c)
7796 ref->c = index + 1;
7797 } else if (ref->c < 0)
7798 tcc_error("flexible array has zero size in this context");
7801 /* t is the array or struct type. c is the array or struct
7802 address. cur_field is the pointer to the current
7803 field, for arrays the 'c' member contains the current start
7804 index. 'flags' is as in decl_initializer.
7805 'al' contains the already initialized length of the
7806 current container (starting at c). This returns the new length of that. */
7807 static int decl_designator(init_params *p, CType *type, unsigned long c,
7808 Sym **cur_field, int flags, int al)
7810 Sym *s, *f;
7811 int index, index_last, align, l, nb_elems, elem_size;
7812 unsigned long corig = c;
7814 elem_size = 0;
7815 nb_elems = 1;
7817 if (flags & DIF_HAVE_ELEM)
7818 goto no_designator;
7820 if (gnu_ext && tok >= TOK_UIDENT) {
7821 l = tok, next();
7822 if (tok == ':')
7823 goto struct_field;
7824 unget_tok(l);
7827 /* NOTE: we only support ranges for last designator */
7828 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7829 if (tok == '[') {
7830 if (!(type->t & VT_ARRAY))
7831 expect("array type");
7832 next();
7833 index = index_last = expr_const();
7834 if (tok == TOK_DOTS && gnu_ext) {
7835 next();
7836 index_last = expr_const();
7838 skip(']');
7839 s = type->ref;
7840 decl_design_flex(p, s, index_last);
7841 if (index < 0 || index_last >= s->c || index_last < index)
7842 tcc_error("index exceeds array bounds or range is empty");
7843 if (cur_field)
7844 (*cur_field)->c = index_last;
7845 type = pointed_type(type);
7846 elem_size = type_size(type, &align);
7847 c += index * elem_size;
7848 nb_elems = index_last - index + 1;
7849 } else {
7850 int cumofs;
7851 next();
7852 l = tok;
7853 struct_field:
7854 next();
7855 if ((type->t & VT_BTYPE) != VT_STRUCT)
7856 expect("struct/union type");
7857 cumofs = 0;
7858 f = find_field(type, l, &cumofs);
7859 if (!f)
7860 expect("field");
7861 if (cur_field)
7862 *cur_field = f;
7863 type = &f->type;
7864 c += cumofs + f->c;
7866 cur_field = NULL;
7868 if (!cur_field) {
7869 if (tok == '=') {
7870 next();
7871 } else if (!gnu_ext) {
7872 expect("=");
7874 } else {
7875 no_designator:
7876 if (type->t & VT_ARRAY) {
7877 index = (*cur_field)->c;
7878 s = type->ref;
7879 decl_design_flex(p, s, index);
7880 if (index >= s->c)
7881 tcc_error("too many initializers");
7882 type = pointed_type(type);
7883 elem_size = type_size(type, &align);
7884 c += index * elem_size;
7885 } else {
7886 f = *cur_field;
7887 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7888 *cur_field = f = f->next;
7889 if (!f)
7890 tcc_error("too many initializers");
7891 type = &f->type;
7892 c += f->c;
7896 if (!elem_size) /* for structs */
7897 elem_size = type_size(type, &align);
7899 /* Using designators the same element can be initialized more
7900 than once. In that case we need to delete possibly already
7901 existing relocations. */
7902 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7903 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7904 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7907 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7909 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7910 Sym aref = {0};
7911 CType t1;
7912 int i;
7913 if (p->sec || (type->t & VT_ARRAY)) {
7914 /* make init_putv/vstore believe it were a struct */
7915 aref.c = elem_size;
7916 t1.t = VT_STRUCT, t1.ref = &aref;
7917 type = &t1;
7919 if (p->sec)
7920 vpush_ref(type, p->sec, c, elem_size);
7921 else
7922 vset(type, VT_LOCAL|VT_LVAL, c);
7923 for (i = 1; i < nb_elems; i++) {
7924 vdup();
7925 init_putv(p, type, c + elem_size * i);
7927 vpop();
7930 c += nb_elems * elem_size;
7931 if (c - corig > al)
7932 al = c - corig;
7933 return al;
7936 /* store a value or an expression directly in global data or in local array */
7937 static void init_putv(init_params *p, CType *type, unsigned long c)
7939 int bt;
7940 void *ptr;
7941 CType dtype;
7942 int size, align;
7943 Section *sec = p->sec;
7944 uint64_t val;
7946 dtype = *type;
7947 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7949 size = type_size(type, &align);
7950 if (type->t & VT_BITFIELD)
7951 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7952 init_assert(p, c + size);
7954 if (sec) {
7955 /* XXX: not portable */
7956 /* XXX: generate error if incorrect relocation */
7957 gen_assign_cast(&dtype);
7958 bt = type->t & VT_BTYPE;
7960 if ((vtop->r & VT_SYM)
7961 && bt != VT_PTR
7962 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7963 || (type->t & VT_BITFIELD))
7964 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7966 tcc_error("initializer element is not computable at load time");
7968 if (NODATA_WANTED) {
7969 vtop--;
7970 return;
7973 ptr = sec->data + c;
7974 val = vtop->c.i;
7976 /* XXX: make code faster ? */
7977 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7978 vtop->sym->v >= SYM_FIRST_ANOM &&
7979 /* XXX This rejects compound literals like
7980 '(void *){ptr}'. The problem is that '&sym' is
7981 represented the same way, which would be ruled out
7982 by the SYM_FIRST_ANOM check above, but also '"string"'
7983 in 'char *p = "string"' is represented the same
7984 with the type being VT_PTR and the symbol being an
7985 anonymous one. That is, there's no difference in vtop
7986 between '(void *){x}' and '&(void *){x}'. Ignore
7987 pointer typed entities here. Hopefully no real code
7988 will ever use compound literals with scalar type. */
7989 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7990 /* These come from compound literals, memcpy stuff over. */
7991 Section *ssec;
7992 ElfSym *esym;
7993 ElfW_Rel *rel;
7994 esym = elfsym(vtop->sym);
7995 ssec = tcc_state->sections[esym->st_shndx];
7996 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7997 if (ssec->reloc) {
7998 /* We need to copy over all memory contents, and that
7999 includes relocations. Use the fact that relocs are
8000 created it order, so look from the end of relocs
8001 until we hit one before the copied region. */
8002 unsigned long relofs = ssec->reloc->data_offset;
8003 while (relofs >= sizeof(*rel)) {
8004 relofs -= sizeof(*rel);
8005 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
8006 if (rel->r_offset >= esym->st_value + size)
8007 continue;
8008 if (rel->r_offset < esym->st_value)
8009 break;
8010 put_elf_reloca(symtab_section, sec,
8011 c + rel->r_offset - esym->st_value,
8012 ELFW(R_TYPE)(rel->r_info),
8013 ELFW(R_SYM)(rel->r_info),
8014 #if PTR_SIZE == 8
8015 rel->r_addend
8016 #else
8018 #endif
8022 } else {
8023 if (type->t & VT_BITFIELD) {
8024 int bit_pos, bit_size, bits, n;
8025 unsigned char *p, v, m;
8026 bit_pos = BIT_POS(vtop->type.t);
8027 bit_size = BIT_SIZE(vtop->type.t);
8028 p = (unsigned char*)ptr + (bit_pos >> 3);
8029 bit_pos &= 7, bits = 0;
8030 while (bit_size) {
8031 n = 8 - bit_pos;
8032 if (n > bit_size)
8033 n = bit_size;
8034 v = val >> bits << bit_pos;
8035 m = ((1 << n) - 1) << bit_pos;
8036 *p = (*p & ~m) | (v & m);
8037 bits += n, bit_size -= n, bit_pos = 0, ++p;
8039 } else
8040 switch(bt) {
8041 case VT_BOOL:
8042 *(char *)ptr = val != 0;
8043 break;
8044 case VT_BYTE:
8045 *(char *)ptr = val;
8046 break;
8047 case VT_SHORT:
8048 write16le(ptr, val);
8049 break;
8050 case VT_FLOAT:
8051 write32le(ptr, val);
8052 break;
8053 case VT_DOUBLE:
8054 write64le(ptr, val);
8055 break;
8056 case VT_LDOUBLE:
8057 #if defined TCC_IS_NATIVE_387
8058 /* Host and target platform may be different but both have x87.
8059 On windows, tcc does not use VT_LDOUBLE, except when it is a
8060 cross compiler. In this case a mingw gcc as host compiler
8061 comes here with 10-byte long doubles, while msvc or tcc won't.
8062 tcc itself can still translate by asm.
8063 In any case we avoid possibly random bytes 11 and 12.
8065 if (sizeof (long double) >= 10)
8066 memcpy(ptr, &vtop->c.ld, 10);
8067 #ifdef __TINYC__
8068 else if (sizeof (long double) == sizeof (double))
8069 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
8070 #endif
8071 else if (vtop->c.ld == 0.0)
8073 else
8074 #endif
8075 /* For other platforms it should work natively, but may not work
8076 for cross compilers */
8077 if (sizeof(long double) == LDOUBLE_SIZE)
8078 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
8079 else if (sizeof(double) == LDOUBLE_SIZE)
8080 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
8081 #ifndef TCC_CROSS_TEST
8082 else
8083 tcc_error("can't cross compile long double constants");
8084 #endif
8085 break;
8087 #if PTR_SIZE == 8
8088 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
8089 case VT_LLONG:
8090 case VT_PTR:
8091 if (vtop->r & VT_SYM)
8092 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
8093 else
8094 write64le(ptr, val);
8095 break;
8096 case VT_INT:
8097 write32le(ptr, val);
8098 break;
8099 #else
8100 case VT_LLONG:
8101 write64le(ptr, val);
8102 break;
8103 case VT_PTR:
8104 case VT_INT:
8105 if (vtop->r & VT_SYM)
8106 greloc(sec, vtop->sym, c, R_DATA_PTR);
8107 write32le(ptr, val);
8108 break;
8109 #endif
8110 default:
8111 //tcc_internal_error("unexpected type");
8112 break;
8115 vtop--;
8116 } else {
8117 vset(&dtype, VT_LOCAL|VT_LVAL, c);
8118 vswap();
8119 vstore();
8120 vpop();
8124 /* 't' contains the type and storage info. 'c' is the offset of the
8125 object in section 'sec'. If 'sec' is NULL, it means stack based
8126 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
8127 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
8128 size only evaluation is wanted (only for arrays). */
8129 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
8131 int len, n, no_oblock, i;
8132 int size1, align1;
8133 Sym *s, *f;
8134 Sym indexsym;
8135 CType *t1;
8137 /* generate line number info */
8138 if (debug_modes && !p->sec)
8139 tcc_debug_line(tcc_state), tcc_tcov_check_line (1);
8141 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
8142 /* In case of strings we have special handling for arrays, so
8143 don't consume them as initializer value (which would commit them
8144 to some anonymous symbol). */
8145 tok != TOK_LSTR && tok != TOK_STR &&
8146 !(flags & DIF_SIZE_ONLY)) {
8147 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8148 flags |= DIF_HAVE_ELEM;
8151 if ((flags & DIF_HAVE_ELEM) &&
8152 !(type->t & VT_ARRAY) &&
8153 /* Use i_c_parameter_t, to strip toplevel qualifiers.
8154 The source type might have VT_CONSTANT set, which is
8155 of course assignable to non-const elements. */
8156 is_compatible_unqualified_types(type, &vtop->type)) {
8157 goto init_putv;
8159 } else if (type->t & VT_ARRAY) {
8160 no_oblock = 1;
8161 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
8162 tok == '{') {
8163 skip('{');
8164 no_oblock = 0;
8167 s = type->ref;
8168 n = s->c;
8169 t1 = pointed_type(type);
8170 size1 = type_size(t1, &align1);
8172 /* only parse strings here if correct type (otherwise: handle
8173 them as ((w)char *) expressions */
8174 if ((tok == TOK_LSTR &&
8175 #ifdef TCC_TARGET_PE
8176 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
8177 #else
8178 (t1->t & VT_BTYPE) == VT_INT
8179 #endif
8180 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
8181 len = 0;
8182 cstr_reset(&initstr);
8183 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
8184 tcc_error("unhandled string literal merging");
8185 while (tok == TOK_STR || tok == TOK_LSTR) {
8186 if (initstr.size)
8187 initstr.size -= size1;
8188 if (tok == TOK_STR)
8189 len += tokc.str.size;
8190 else
8191 len += tokc.str.size / sizeof(nwchar_t);
8192 len--;
8193 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
8194 next();
8196 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
8197 && tok != TOK_EOF) {
8198 /* Not a lone literal but part of a bigger expression. */
8199 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
8200 tokc.str.size = initstr.size;
8201 tokc.str.data = initstr.data;
8202 goto do_init_array;
8205 decl_design_flex(p, s, len);
8206 if (!(flags & DIF_SIZE_ONLY)) {
8207 int nb = n;
8208 if (len < nb)
8209 nb = len;
8210 if (len > nb)
8211 tcc_warning("initializer-string for array is too long");
8212 /* in order to go faster for common case (char
8213 string in global variable, we handle it
8214 specifically */
8215 if (p->sec && size1 == 1) {
8216 init_assert(p, c + nb);
8217 if (!NODATA_WANTED)
8218 memcpy(p->sec->data + c, initstr.data, nb);
8219 } else {
8220 for(i=0;i<n;i++) {
8221 if (i >= nb) {
8222 /* only add trailing zero if enough storage (no
8223 warning in this case since it is standard) */
8224 if (flags & DIF_CLEAR)
8225 break;
8226 if (n - i >= 4) {
8227 init_putz(p, c + i * size1, (n - i) * size1);
8228 break;
8230 ch = 0;
8231 } else if (size1 == 1)
8232 ch = ((unsigned char *)initstr.data)[i];
8233 else
8234 ch = ((nwchar_t *)initstr.data)[i];
8235 vpushi(ch);
8236 init_putv(p, t1, c + i * size1);
8240 } else {
8242 do_init_array:
8243 indexsym.c = 0;
8244 f = &indexsym;
8246 do_init_list:
8247 /* zero memory once in advance */
8248 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
8249 init_putz(p, c, n*size1);
8250 flags |= DIF_CLEAR;
8253 len = 0;
8254 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
8255 len = decl_designator(p, type, c, &f, flags, len);
8256 flags &= ~DIF_HAVE_ELEM;
8257 if (type->t & VT_ARRAY) {
8258 ++indexsym.c;
8259 /* special test for multi dimensional arrays (may not
8260 be strictly correct if designators are used at the
8261 same time) */
8262 if (no_oblock && len >= n*size1)
8263 break;
8264 } else {
8265 if (s->type.t == VT_UNION)
8266 f = NULL;
8267 else
8268 f = f->next;
8269 if (no_oblock && f == NULL)
8270 break;
8273 if (tok == '}')
8274 break;
8275 skip(',');
8278 if (!no_oblock)
8279 skip('}');
8280 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
8281 no_oblock = 1;
8282 if ((flags & DIF_FIRST) || tok == '{') {
8283 skip('{');
8284 no_oblock = 0;
8286 s = type->ref;
8287 f = s->next;
8288 n = s->c;
8289 size1 = 1;
8290 goto do_init_list;
8291 } else if (tok == '{') {
8292 if (flags & DIF_HAVE_ELEM)
8293 skip(';');
8294 next();
8295 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
8296 skip('}');
8297 } else if ((flags & DIF_SIZE_ONLY)) {
8298 /* If we supported only ISO C we wouldn't have to accept calling
8299 this on anything than an array if DIF_SIZE_ONLY (and even then
8300 only on the outermost level, so no recursion would be needed),
8301 because initializing a flex array member isn't supported.
8302 But GNU C supports it, so we need to recurse even into
8303 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8304 /* just skip expression */
8305 skip_or_save_block(NULL);
8306 } else {
8307 if (!(flags & DIF_HAVE_ELEM)) {
8308 /* This should happen only when we haven't parsed
8309 the init element above for fear of committing a
8310 string constant to memory too early. */
8311 if (tok != TOK_STR && tok != TOK_LSTR)
8312 expect("string constant");
8313 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8315 init_putv:
8316 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
8317 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
8318 && vtop->c.i == 0
8319 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
8321 vpop();
8322 else
8323 init_putv(p, type, c);
8327 /* parse an initializer for type 't' if 'has_init' is non zero, and
8328 allocate space in local or global data space ('r' is either
8329 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8330 variable 'v' of scope 'scope' is declared before initializers
8331 are parsed. If 'v' is zero, then a reference to the new object
8332 is put in the value stack. If 'has_init' is 2, a special parsing
8333 is done to handle string constants. */
8334 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
8335 int has_init, int v, int scope)
8337 int size, align, addr;
8338 TokenString *init_str = NULL;
8340 Section *sec;
8341 Sym *flexible_array;
8342 Sym *sym;
8343 int saved_nocode_wanted = nocode_wanted;
8344 #ifdef CONFIG_TCC_BCHECK
8345 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8346 #endif
8347 init_params p = {0};
8349 /* Always allocate static or global variables */
8350 if (v && (r & VT_VALMASK) == VT_CONST)
8351 nocode_wanted |= 0x80000000;
8353 flexible_array = NULL;
8354 size = type_size(type, &align);
8356 /* exactly one flexible array may be initialized, either the
8357 toplevel array or the last member of the toplevel struct */
8359 if (size < 0) {
8360 /* If the base type itself was an array type of unspecified size
8361 (like in 'typedef int arr[]; arr x = {1};') then we will
8362 overwrite the unknown size by the real one for this decl.
8363 We need to unshare the ref symbol holding that size. */
8364 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8365 p.flex_array_ref = type->ref;
8367 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8368 Sym *field = type->ref->next;
8369 if (field) {
8370 while (field->next)
8371 field = field->next;
8372 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8373 flexible_array = field;
8374 p.flex_array_ref = field->type.ref;
8375 size = -1;
8380 if (size < 0) {
8381 /* If unknown size, do a dry-run 1st pass */
8382 if (!has_init)
8383 tcc_error("unknown type size");
8384 if (has_init == 2) {
8385 /* only get strings */
8386 init_str = tok_str_alloc();
8387 while (tok == TOK_STR || tok == TOK_LSTR) {
8388 tok_str_add_tok(init_str);
8389 next();
8391 tok_str_add(init_str, -1);
8392 tok_str_add(init_str, 0);
8393 } else
8394 skip_or_save_block(&init_str);
8395 unget_tok(0);
8397 /* compute size */
8398 begin_macro(init_str, 1);
8399 next();
8400 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8401 /* prepare second initializer parsing */
8402 macro_ptr = init_str->str;
8403 next();
8405 /* if still unknown size, error */
8406 size = type_size(type, &align);
8407 if (size < 0)
8408 tcc_error("unknown type size");
8410 /* If there's a flex member and it was used in the initializer
8411 adjust size. */
8412 if (flexible_array && flexible_array->type.ref->c > 0)
8413 size += flexible_array->type.ref->c
8414 * pointed_size(&flexible_array->type);
8417 /* take into account specified alignment if bigger */
8418 if (ad->a.aligned) {
8419 int speca = 1 << (ad->a.aligned - 1);
8420 if (speca > align)
8421 align = speca;
8422 } else if (ad->a.packed) {
8423 align = 1;
8426 if (!v && NODATA_WANTED)
8427 size = 0, align = 1;
8429 if ((r & VT_VALMASK) == VT_LOCAL) {
8430 sec = NULL;
8431 #ifdef CONFIG_TCC_BCHECK
8432 if (bcheck && v) {
8433 /* add padding between stack variables for bound checking */
8434 loc -= align;
8436 #endif
8437 loc = (loc - size) & -align;
8438 addr = loc;
8439 p.local_offset = addr + size;
8440 #ifdef CONFIG_TCC_BCHECK
8441 if (bcheck && v) {
8442 /* add padding between stack variables for bound checking */
8443 loc -= align;
8445 #endif
8446 if (v) {
8447 /* local variable */
8448 #ifdef CONFIG_TCC_ASM
8449 if (ad->asm_label) {
8450 int reg = asm_parse_regvar(ad->asm_label);
8451 if (reg >= 0)
8452 r = (r & ~VT_VALMASK) | reg;
8454 #endif
8455 sym = sym_push(v, type, r, addr);
8456 if (ad->cleanup_func) {
8457 Sym *cls = sym_push2(&all_cleanups,
8458 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8459 cls->prev_tok = sym;
8460 cls->next = ad->cleanup_func;
8461 cls->ncl = cur_scope->cl.s;
8462 cur_scope->cl.s = cls;
8465 sym->a = ad->a;
8466 } else {
8467 /* push local reference */
8468 vset(type, r, addr);
8470 } else {
8471 sym = NULL;
8472 if (v && scope == VT_CONST) {
8473 /* see if the symbol was already defined */
8474 sym = sym_find(v);
8475 if (sym) {
8476 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8477 && sym->type.ref->c > type->ref->c) {
8478 /* flex array was already declared with explicit size
8479 extern int arr[10];
8480 int arr[] = { 1,2,3 }; */
8481 type->ref->c = sym->type.ref->c;
8482 size = type_size(type, &align);
8484 patch_storage(sym, ad, type);
8485 /* we accept several definitions of the same global variable. */
8486 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8487 goto no_alloc;
8491 /* allocate symbol in corresponding section */
8492 sec = ad->section;
8493 if (!sec) {
8494 CType *tp = type;
8495 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8496 tp = &tp->ref->type;
8497 if (tp->t & VT_CONSTANT) {
8498 sec = rodata_section;
8499 } else if (has_init) {
8500 sec = data_section;
8501 /*if (tcc_state->g_debug & 4)
8502 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8503 } else if (tcc_state->nocommon)
8504 sec = bss_section;
8507 if (sec) {
8508 addr = section_add(sec, size, align);
8509 #ifdef CONFIG_TCC_BCHECK
8510 /* add padding if bound check */
8511 if (bcheck)
8512 section_add(sec, 1, 1);
8513 #endif
8514 } else {
8515 addr = align; /* SHN_COMMON is special, symbol value is align */
8516 sec = common_section;
8519 if (v) {
8520 if (!sym) {
8521 sym = sym_push(v, type, r | VT_SYM, 0);
8522 patch_storage(sym, ad, NULL);
8524 /* update symbol definition */
8525 put_extern_sym(sym, sec, addr, size);
8526 } else {
8527 /* push global reference */
8528 vpush_ref(type, sec, addr, size);
8529 sym = vtop->sym;
8530 vtop->r |= r;
8533 #ifdef CONFIG_TCC_BCHECK
8534 /* handles bounds now because the symbol must be defined
8535 before for the relocation */
8536 if (bcheck) {
8537 addr_t *bounds_ptr;
8539 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8540 /* then add global bound info */
8541 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8542 bounds_ptr[0] = 0; /* relocated */
8543 bounds_ptr[1] = size;
8545 #endif
8548 if (type->t & VT_VLA) {
8549 int a;
8551 if (NODATA_WANTED)
8552 goto no_alloc;
8554 /* save before-VLA stack pointer if needed */
8555 if (cur_scope->vla.num == 0) {
8556 if (cur_scope->prev && cur_scope->prev->vla.num) {
8557 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8558 } else {
8559 gen_vla_sp_save(loc -= PTR_SIZE);
8560 cur_scope->vla.locorig = loc;
8564 vla_runtime_type_size(type, &a);
8565 gen_vla_alloc(type, a);
8566 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8567 /* on _WIN64, because of the function args scratch area, the
8568 result of alloca differs from RSP and is returned in RAX. */
8569 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8570 #endif
8571 gen_vla_sp_save(addr);
8572 cur_scope->vla.loc = addr;
8573 cur_scope->vla.num++;
8574 } else if (has_init) {
8575 p.sec = sec;
8576 decl_initializer(&p, type, addr, DIF_FIRST);
8577 /* patch flexible array member size back to -1, */
8578 /* for possible subsequent similar declarations */
8579 if (flexible_array)
8580 flexible_array->type.ref->c = -1;
8583 no_alloc:
8584 /* restore parse state if needed */
8585 if (init_str) {
8586 end_macro();
8587 next();
8590 nocode_wanted = saved_nocode_wanted;
8593 /* parse a function defined by symbol 'sym' and generate its code in
8594 'cur_text_section' */
8595 static void gen_function(Sym *sym)
8597 struct scope f = { 0 };
8598 cur_scope = root_scope = &f;
8599 nocode_wanted = 0;
8600 ind = cur_text_section->data_offset;
8601 if (sym->a.aligned) {
8602 size_t newoff = section_add(cur_text_section, 0,
8603 1 << (sym->a.aligned - 1));
8604 gen_fill_nops(newoff - ind);
8606 /* NOTE: we patch the symbol size later */
8607 put_extern_sym(sym, cur_text_section, ind, 0);
8608 if (sym->type.ref->f.func_ctor)
8609 add_array (tcc_state, ".init_array", sym->c);
8610 if (sym->type.ref->f.func_dtor)
8611 add_array (tcc_state, ".fini_array", sym->c);
8613 funcname = get_tok_str(sym->v, NULL);
8614 func_ind = ind;
8615 func_vt = sym->type.ref->type;
8616 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8618 /* put debug symbol */
8619 tcc_debug_funcstart(tcc_state, sym);
8620 /* push a dummy symbol to enable local sym storage */
8621 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8622 local_scope = 1; /* for function parameters */
8623 gfunc_prolog(sym);
8624 local_scope = 0;
8625 rsym = 0;
8626 clear_temp_local_var_list();
8627 block(0);
8628 gsym(rsym);
8629 nocode_wanted = 0;
8630 /* reset local stack */
8631 pop_local_syms(NULL, 0);
8632 gfunc_epilog();
8633 cur_text_section->data_offset = ind;
8634 local_scope = 0;
8635 label_pop(&global_label_stack, NULL, 0);
8636 sym_pop(&all_cleanups, NULL, 0);
8637 /* patch symbol size */
8638 elfsym(sym)->st_size = ind - func_ind;
8639 /* end of function */
8640 tcc_debug_funcend(tcc_state, ind - func_ind);
8641 /* It's better to crash than to generate wrong code */
8642 cur_text_section = NULL;
8643 funcname = ""; /* for safety */
8644 func_vt.t = VT_VOID; /* for safety */
8645 func_var = 0; /* for safety */
8646 ind = 0; /* for safety */
8647 nocode_wanted = 0x80000000;
8648 check_vstack();
8649 /* do this after funcend debug info */
8650 next();
8653 static void gen_inline_functions(TCCState *s)
8655 Sym *sym;
8656 int inline_generated, i;
8657 struct InlineFunc *fn;
8659 tcc_open_bf(s, ":inline:", 0);
8660 /* iterate while inline function are referenced */
8661 do {
8662 inline_generated = 0;
8663 for (i = 0; i < s->nb_inline_fns; ++i) {
8664 fn = s->inline_fns[i];
8665 sym = fn->sym;
8666 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8667 /* the function was used or forced (and then not internal):
8668 generate its code and convert it to a normal function */
8669 fn->sym = NULL;
8670 tcc_debug_putfile(s, fn->filename);
8671 begin_macro(fn->func_str, 1);
8672 next();
8673 cur_text_section = text_section;
8674 gen_function(sym);
8675 end_macro();
8677 inline_generated = 1;
8680 } while (inline_generated);
8681 tcc_close();
8684 static void free_inline_functions(TCCState *s)
8686 int i;
8687 /* free tokens of unused inline functions */
8688 for (i = 0; i < s->nb_inline_fns; ++i) {
8689 struct InlineFunc *fn = s->inline_fns[i];
8690 if (fn->sym)
8691 tok_str_free(fn->func_str);
8693 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8696 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8697 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8698 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8700 int v, has_init, r, oldint;
8701 CType type, btype;
8702 Sym *sym;
8703 AttributeDef ad, adbase;
8705 while (1) {
8706 if (tok == TOK_STATIC_ASSERT) {
8707 CString error_str;
8708 int c;
8710 next();
8711 skip('(');
8712 c = expr_const();
8714 if (tok == ')') {
8715 if (!c)
8716 tcc_error("_Static_assert fail");
8717 next();
8718 goto static_assert_out;
8721 skip(',');
8722 parse_mult_str(&error_str, "string constant");
8723 if (c == 0)
8724 tcc_error("%s", (char *)error_str.data);
8725 cstr_free(&error_str);
8726 skip(')');
8727 static_assert_out:
8728 skip(';');
8729 continue;
8732 oldint = 0;
8733 if (!parse_btype(&btype, &adbase)) {
8734 if (is_for_loop_init)
8735 return 0;
8736 /* skip redundant ';' if not in old parameter decl scope */
8737 if (tok == ';' && l != VT_CMP) {
8738 next();
8739 continue;
8741 if (l != VT_CONST)
8742 break;
8743 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8744 /* global asm block */
8745 asm_global_instr();
8746 continue;
8748 if (tok >= TOK_UIDENT) {
8749 /* special test for old K&R protos without explicit int
8750 type. Only accepted when defining global data */
8751 btype.t = VT_INT;
8752 oldint = 1;
8753 } else {
8754 if (tok != TOK_EOF)
8755 expect("declaration");
8756 break;
8760 if (tok == ';') {
8761 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8762 v = btype.ref->v;
8763 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8764 tcc_warning("unnamed struct/union that defines no instances");
8765 next();
8766 continue;
8768 if (IS_ENUM(btype.t)) {
8769 next();
8770 continue;
8774 while (1) { /* iterate thru each declaration */
8775 type = btype;
8776 ad = adbase;
8777 type_decl(&type, &ad, &v, TYPE_DIRECT);
8778 #if 0
8780 char buf[500];
8781 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8782 printf("type = '%s'\n", buf);
8784 #endif
8785 if ((type.t & VT_BTYPE) == VT_FUNC) {
8786 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8787 tcc_error("function without file scope cannot be static");
8788 /* if old style function prototype, we accept a
8789 declaration list */
8790 sym = type.ref;
8791 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8792 decl0(VT_CMP, 0, sym);
8793 #ifdef TCC_TARGET_MACHO
8794 if (sym->f.func_alwinl
8795 && ((type.t & (VT_EXTERN | VT_INLINE))
8796 == (VT_EXTERN | VT_INLINE))) {
8797 /* always_inline functions must be handled as if they
8798 don't generate multiple global defs, even if extern
8799 inline, i.e. GNU inline semantics for those. Rewrite
8800 them into static inline. */
8801 type.t &= ~VT_EXTERN;
8802 type.t |= VT_STATIC;
8804 #endif
8805 /* always compile 'extern inline' */
8806 if (type.t & VT_EXTERN)
8807 type.t &= ~VT_INLINE;
8809 } else if (oldint) {
8810 tcc_warning("type defaults to int");
8813 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8814 ad.asm_label = asm_label_instr();
8815 /* parse one last attribute list, after asm label */
8816 parse_attribute(&ad);
8817 #if 0
8818 /* gcc does not allow __asm__("label") with function definition,
8819 but why not ... */
8820 if (tok == '{')
8821 expect(";");
8822 #endif
8825 #ifdef TCC_TARGET_PE
8826 if (ad.a.dllimport || ad.a.dllexport) {
8827 if (type.t & VT_STATIC)
8828 tcc_error("cannot have dll linkage with static");
8829 if (type.t & VT_TYPEDEF) {
8830 tcc_warning("'%s' attribute ignored for typedef",
8831 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8832 (ad.a.dllexport = 0, "dllexport"));
8833 } else if (ad.a.dllimport) {
8834 if ((type.t & VT_BTYPE) == VT_FUNC)
8835 ad.a.dllimport = 0;
8836 else
8837 type.t |= VT_EXTERN;
8840 #endif
8841 if (tok == '{') {
8842 if (l != VT_CONST)
8843 tcc_error("cannot use local functions");
8844 if ((type.t & VT_BTYPE) != VT_FUNC)
8845 expect("function definition");
8847 /* reject abstract declarators in function definition
8848 make old style params without decl have int type */
8849 sym = type.ref;
8850 while ((sym = sym->next) != NULL) {
8851 if (!(sym->v & ~SYM_FIELD))
8852 expect("identifier");
8853 if (sym->type.t == VT_VOID)
8854 sym->type = int_type;
8857 /* apply post-declaraton attributes */
8858 merge_funcattr(&type.ref->f, &ad.f);
8860 /* put function symbol */
8861 type.t &= ~VT_EXTERN;
8862 sym = external_sym(v, &type, 0, &ad);
8864 /* static inline functions are just recorded as a kind
8865 of macro. Their code will be emitted at the end of
8866 the compilation unit only if they are used */
8867 if (sym->type.t & VT_INLINE) {
8868 struct InlineFunc *fn;
8869 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8870 strcpy(fn->filename, file->filename);
8871 fn->sym = sym;
8872 skip_or_save_block(&fn->func_str);
8873 dynarray_add(&tcc_state->inline_fns,
8874 &tcc_state->nb_inline_fns, fn);
8875 } else {
8876 /* compute text section */
8877 cur_text_section = ad.section;
8878 if (!cur_text_section)
8879 cur_text_section = text_section;
8880 gen_function(sym);
8882 break;
8883 } else {
8884 if (l == VT_CMP) {
8885 /* find parameter in function parameter list */
8886 for (sym = func_sym->next; sym; sym = sym->next)
8887 if ((sym->v & ~SYM_FIELD) == v)
8888 goto found;
8889 tcc_error("declaration for parameter '%s' but no such parameter",
8890 get_tok_str(v, NULL));
8891 found:
8892 if (type.t & VT_STORAGE) /* 'register' is okay */
8893 tcc_error("storage class specified for '%s'",
8894 get_tok_str(v, NULL));
8895 if (sym->type.t != VT_VOID)
8896 tcc_error("redefinition of parameter '%s'",
8897 get_tok_str(v, NULL));
8898 convert_parameter_type(&type);
8899 sym->type = type;
8900 } else if (type.t & VT_TYPEDEF) {
8901 /* save typedefed type */
8902 /* XXX: test storage specifiers ? */
8903 sym = sym_find(v);
8904 if (sym && sym->sym_scope == local_scope) {
8905 if (!is_compatible_types(&sym->type, &type)
8906 || !(sym->type.t & VT_TYPEDEF))
8907 tcc_error("incompatible redefinition of '%s'",
8908 get_tok_str(v, NULL));
8909 sym->type = type;
8910 } else {
8911 sym = sym_push(v, &type, 0, 0);
8913 sym->a = ad.a;
8914 sym->f = ad.f;
8915 if (debug_modes)
8916 tcc_debug_typedef (tcc_state, sym);
8917 } else if ((type.t & VT_BTYPE) == VT_VOID
8918 && !(type.t & VT_EXTERN)) {
8919 tcc_error("declaration of void object");
8920 } else {
8921 r = 0;
8922 if ((type.t & VT_BTYPE) == VT_FUNC) {
8923 /* external function definition */
8924 /* specific case for func_call attribute */
8925 type.ref->f = ad.f;
8926 } else if (!(type.t & VT_ARRAY)) {
8927 /* not lvalue if array */
8928 r |= VT_LVAL;
8930 has_init = (tok == '=');
8931 if (has_init && (type.t & VT_VLA))
8932 tcc_error("variable length array cannot be initialized");
8933 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8934 || (type.t & VT_BTYPE) == VT_FUNC
8935 /* as with GCC, uninitialized global arrays with no size
8936 are considered extern: */
8937 || ((type.t & VT_ARRAY) && !has_init
8938 && l == VT_CONST && type.ref->c < 0)
8940 /* external variable or function */
8941 type.t |= VT_EXTERN;
8942 sym = external_sym(v, &type, r, &ad);
8943 if (ad.alias_target) {
8944 /* Aliases need to be emitted when their target
8945 symbol is emitted, even if perhaps unreferenced.
8946 We only support the case where the base is
8947 already defined, otherwise we would need
8948 deferring to emit the aliases until the end of
8949 the compile unit. */
8950 Sym *alias_target = sym_find(ad.alias_target);
8951 ElfSym *esym = elfsym(alias_target);
8952 if (!esym)
8953 tcc_error("unsupported forward __alias__ attribute");
8954 put_extern_sym2(sym, esym->st_shndx,
8955 esym->st_value, esym->st_size, 1);
8957 } else {
8958 if (type.t & VT_STATIC)
8959 r |= VT_CONST;
8960 else
8961 r |= l;
8962 if (has_init)
8963 next();
8964 else if (l == VT_CONST)
8965 /* uninitialized global variables may be overridden */
8966 type.t |= VT_EXTERN;
8967 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8970 if (tok != ',') {
8971 if (is_for_loop_init)
8972 return 1;
8973 skip(';');
8974 break;
8976 next();
8980 return 0;
8983 static void decl(int l)
8985 decl0(l, 0, NULL);
8988 /* ------------------------------------------------------------------------- */
8989 #undef gjmp_addr
8990 #undef gjmp
8991 /* ------------------------------------------------------------------------- */