tccelf.c: write section headers before sections
[tinycc.git] / tccgen.c
blob1cf9330448d43e2fb109123393b2414ba4b08b70
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 ST_DATA char debug_modes;
48 ST_DATA SValue *vtop;
49 static SValue _vstack[1 + VSTACK_SIZE];
50 #define vstack (_vstack + 1)
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
56 /* no code output after unconditional jumps such as with if (0) ... */
57 #define CODE_OFF_BIT 0x20000000
58 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
59 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
61 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
62 #define NOEVAL_MASK 0x0000FFFF
63 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
65 /* no code output when parsing constant expressions */
66 #define CONST_WANTED_BIT 0x00010000
67 #define CONST_WANTED_MASK 0x0FFF0000
68 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
70 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
71 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
72 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
73 ST_DATA int func_vc;
74 ST_DATA int func_ind;
75 ST_DATA const char *funcname;
76 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
77 static CString initstr;
79 #if PTR_SIZE == 4
80 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
81 #define VT_PTRDIFF_T VT_INT
82 #elif LONG_SIZE == 4
83 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
84 #define VT_PTRDIFF_T VT_LLONG
85 #else
86 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
87 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
88 #endif
90 static struct switch_t {
91 struct case_t {
92 int64_t v1, v2;
93 int sym;
94 } **p; int n; /* list of case ranges */
95 int def_sym; /* default symbol */
96 int nocode_wanted;
97 int *bsym;
98 struct scope *scope;
99 struct switch_t *prev;
100 SValue sv;
101 } *cur_switch; /* current switch */
103 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
104 /*list of temporary local variables on the stack in current function. */
105 static struct temp_local_variable {
106 int location; //offset on stack. Svalue.c.i
107 short size;
108 short align;
109 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
110 static int nb_temp_local_vars;
112 static struct scope {
113 struct scope *prev;
114 struct { int loc, locorig, num; } vla;
115 struct { Sym *s; int n; } cl;
116 int *bsym, *csym;
117 Sym *lstk, *llstk;
118 } *cur_scope, *loop_scope, *root_scope;
120 typedef struct {
121 Section *sec;
122 int local_offset;
123 Sym *flex_array_ref;
124 } init_params;
126 #if 1
127 #define precedence_parser
128 static void init_prec(void);
129 #endif
131 static void block(int flags);
132 #define STMT_EXPR 1
133 #define STMT_COMPOUND 2
135 static void gen_cast(CType *type);
136 static void gen_cast_s(int t);
137 static inline CType *pointed_type(CType *type);
138 static int is_compatible_types(CType *type1, CType *type2);
139 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
140 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
141 static void parse_expr_type(CType *type);
142 static void init_putv(init_params *p, CType *type, unsigned long c);
143 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
144 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
145 static int decl(int l);
146 static void expr_eq(void);
147 static void vpush_type_size(CType *type, int *a);
148 static int is_compatible_unqualified_types(CType *type1, CType *type2);
149 static inline int64_t expr_const64(void);
150 static void vpush64(int ty, unsigned long long v);
151 static void vpush(CType *type);
152 static int gvtst(int inv, int t);
153 static void gen_inline_functions(TCCState *s);
154 static void free_inline_functions(TCCState *s);
155 static void skip_or_save_block(TokenString **str);
156 static void gv_dup(void);
157 static int get_temp_local_var(int size,int align);
158 static void clear_temp_local_var_list();
159 static void cast_error(CType *st, CType *dt);
160 static void end_switch(void);
162 /* ------------------------------------------------------------------------- */
163 /* Automagical code suppression */
165 /* Clear 'nocode_wanted' at forward label if it was used */
166 ST_FUNC void gsym(int t)
168 if (t) {
169 gsym_addr(t, ind);
170 CODE_ON();
174 /* Clear 'nocode_wanted' if current pc is a label */
175 static int gind()
177 int t = ind;
178 CODE_ON();
179 if (debug_modes)
180 tcc_tcov_block_begin(tcc_state);
181 return t;
184 /* Set 'nocode_wanted' after unconditional (backwards) jump */
185 static void gjmp_addr_acs(int t)
187 gjmp_addr(t);
188 CODE_OFF();
191 /* Set 'nocode_wanted' after unconditional (forwards) jump */
192 static int gjmp_acs(int t)
194 t = gjmp(t);
195 CODE_OFF();
196 return t;
199 /* These are #undef'd at the end of this file */
200 #define gjmp_addr gjmp_addr_acs
201 #define gjmp gjmp_acs
202 /* ------------------------------------------------------------------------- */
204 ST_INLN int is_float(int t)
206 int bt = t & VT_BTYPE;
207 return bt == VT_LDOUBLE
208 || bt == VT_DOUBLE
209 || bt == VT_FLOAT
210 || bt == VT_QFLOAT;
213 static inline int is_integer_btype(int bt)
215 return bt == VT_BYTE
216 || bt == VT_BOOL
217 || bt == VT_SHORT
218 || bt == VT_INT
219 || bt == VT_LLONG;
222 static int btype_size(int bt)
224 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
225 bt == VT_SHORT ? 2 :
226 bt == VT_INT ? 4 :
227 bt == VT_LLONG ? 8 :
228 bt == VT_PTR ? PTR_SIZE : 0;
231 /* returns function return register from type */
232 static int R_RET(int t)
234 if (!is_float(t))
235 return REG_IRET;
236 #ifdef TCC_TARGET_X86_64
237 if ((t & VT_BTYPE) == VT_LDOUBLE)
238 return TREG_ST0;
239 #elif defined TCC_TARGET_RISCV64
240 if ((t & VT_BTYPE) == VT_LDOUBLE)
241 return REG_IRET;
242 #endif
243 return REG_FRET;
246 /* returns 2nd function return register, if any */
247 static int R2_RET(int t)
249 t &= VT_BTYPE;
250 #if PTR_SIZE == 4
251 if (t == VT_LLONG)
252 return REG_IRE2;
253 #elif defined TCC_TARGET_X86_64
254 if (t == VT_QLONG)
255 return REG_IRE2;
256 if (t == VT_QFLOAT)
257 return REG_FRE2;
258 #elif defined TCC_TARGET_RISCV64
259 if (t == VT_LDOUBLE)
260 return REG_IRE2;
261 #endif
262 return VT_CONST;
265 /* returns true for two-word types */
266 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
268 /* put function return registers to stack value */
269 static void PUT_R_RET(SValue *sv, int t)
271 sv->r = R_RET(t), sv->r2 = R2_RET(t);
274 /* returns function return register class for type t */
275 static int RC_RET(int t)
277 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
280 /* returns generic register class for type t */
281 static int RC_TYPE(int t)
283 if (!is_float(t))
284 return RC_INT;
285 #ifdef TCC_TARGET_X86_64
286 if ((t & VT_BTYPE) == VT_LDOUBLE)
287 return RC_ST0;
288 if ((t & VT_BTYPE) == VT_QFLOAT)
289 return RC_FRET;
290 #elif defined TCC_TARGET_RISCV64
291 if ((t & VT_BTYPE) == VT_LDOUBLE)
292 return RC_INT;
293 #endif
294 return RC_FLOAT;
297 /* returns 2nd register class corresponding to t and rc */
298 static int RC2_TYPE(int t, int rc)
300 if (!USING_TWO_WORDS(t))
301 return 0;
302 #ifdef RC_IRE2
303 if (rc == RC_IRET)
304 return RC_IRE2;
305 #endif
306 #ifdef RC_FRE2
307 if (rc == RC_FRET)
308 return RC_FRE2;
309 #endif
310 if (rc & RC_FLOAT)
311 return RC_FLOAT;
312 return RC_INT;
315 /* we use our own 'finite' function to avoid potential problems with
316 non standard math libs */
317 /* XXX: endianness dependent */
318 ST_FUNC int ieee_finite(double d)
320 int p[4];
321 memcpy(p, &d, sizeof(double));
322 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
325 /* compiling intel long double natively */
326 #if (defined __i386__ || defined __x86_64__) \
327 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
328 # define TCC_IS_NATIVE_387
329 #endif
331 ST_FUNC void test_lvalue(void)
333 if (!(vtop->r & VT_LVAL))
334 expect("lvalue");
337 ST_FUNC void check_vstack(void)
339 if (vtop != vstack - 1)
340 tcc_error("internal compiler error: vstack leak (%d)",
341 (int)(vtop - vstack + 1));
344 /* vstack debugging aid */
345 #if 0
346 void pv (const char *lbl, int a, int b)
348 int i;
349 for (i = a; i < a + b; ++i) {
350 SValue *p = &vtop[-i];
351 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
352 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
355 #endif
357 /* ------------------------------------------------------------------------- */
358 /* initialize vstack and types. This must be done also for tcc -E */
359 ST_FUNC void tccgen_init(TCCState *s1)
361 vtop = vstack - 1;
362 memset(vtop, 0, sizeof *vtop);
364 /* define some often used types */
365 int_type.t = VT_INT;
367 char_type.t = VT_BYTE;
368 if (s1->char_is_unsigned)
369 char_type.t |= VT_UNSIGNED;
370 char_pointer_type = char_type;
371 mk_pointer(&char_pointer_type);
373 func_old_type.t = VT_FUNC;
374 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
375 func_old_type.ref->f.func_call = FUNC_CDECL;
376 func_old_type.ref->f.func_type = FUNC_OLD;
377 #ifdef precedence_parser
378 init_prec();
379 #endif
380 cstr_new(&initstr);
383 ST_FUNC int tccgen_compile(TCCState *s1)
385 funcname = "";
386 func_ind = -1;
387 anon_sym = SYM_FIRST_ANOM;
388 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
389 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
391 tcc_debug_start(s1);
392 tcc_tcov_start (s1);
393 #ifdef TCC_TARGET_ARM
394 arm_init(s1);
395 #endif
396 #ifdef INC_DEBUG
397 printf("%s: **** new file\n", file->filename);
398 #endif
399 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
400 next();
401 decl(VT_CONST);
402 gen_inline_functions(s1);
403 check_vstack();
404 /* end of translation unit info */
405 tcc_debug_end(s1);
406 tcc_tcov_end(s1);
407 return 0;
410 ST_FUNC void tccgen_finish(TCCState *s1)
412 tcc_debug_end(s1); /* just in case of errors: free memory */
413 free_inline_functions(s1);
414 sym_pop(&global_stack, NULL, 0);
415 sym_pop(&local_stack, NULL, 0);
416 /* free preprocessor macros */
417 free_defines(NULL);
418 /* free sym_pools */
419 dynarray_reset(&sym_pools, &nb_sym_pools);
420 cstr_free(&initstr);
421 dynarray_reset(&stk_data, &nb_stk_data);
422 while (cur_switch)
423 end_switch();
424 local_scope = 0;
425 loop_scope = NULL;
426 all_cleanups = NULL;
427 pending_gotos = NULL;
428 nb_temp_local_vars = 0;
429 global_label_stack = NULL;
430 local_label_stack = NULL;
431 cur_text_section = NULL;
432 sym_free_first = NULL;
435 /* ------------------------------------------------------------------------- */
436 ST_FUNC ElfSym *elfsym(Sym *s)
438 if (!s || !s->c)
439 return NULL;
440 return &((ElfSym *)symtab_section->data)[s->c];
443 /* apply storage attributes to Elf symbol */
444 ST_FUNC void update_storage(Sym *sym)
446 ElfSym *esym;
447 int sym_bind, old_sym_bind;
449 esym = elfsym(sym);
450 if (!esym)
451 return;
453 if (sym->a.visibility)
454 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
455 | sym->a.visibility;
457 if (sym->type.t & (VT_STATIC | VT_INLINE))
458 sym_bind = STB_LOCAL;
459 else if (sym->a.weak)
460 sym_bind = STB_WEAK;
461 else
462 sym_bind = STB_GLOBAL;
463 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
464 if (sym_bind != old_sym_bind) {
465 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
468 #ifdef TCC_TARGET_PE
469 if (sym->a.dllimport)
470 esym->st_other |= ST_PE_IMPORT;
471 if (sym->a.dllexport)
472 esym->st_other |= ST_PE_EXPORT;
473 #endif
475 #if 0
476 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
477 get_tok_str(sym->v, NULL),
478 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
479 sym->a.visibility,
480 sym->a.dllexport,
481 sym->a.dllimport
483 #endif
486 /* ------------------------------------------------------------------------- */
487 /* update sym->c so that it points to an external symbol in section
488 'section' with value 'value' */
490 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
491 addr_t value, unsigned long size,
492 int can_add_underscore)
494 int sym_type, sym_bind, info, other, t;
495 ElfSym *esym;
496 const char *name;
497 char buf1[256];
499 if (!sym->c) {
500 name = get_tok_str(sym->v, NULL);
501 t = sym->type.t;
502 if ((t & VT_BTYPE) == VT_FUNC) {
503 sym_type = STT_FUNC;
504 } else if ((t & VT_BTYPE) == VT_VOID) {
505 sym_type = STT_NOTYPE;
506 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
507 sym_type = STT_FUNC;
508 } else {
509 sym_type = STT_OBJECT;
511 if (t & (VT_STATIC | VT_INLINE))
512 sym_bind = STB_LOCAL;
513 else
514 sym_bind = STB_GLOBAL;
515 other = 0;
517 #ifdef TCC_TARGET_PE
518 if (sym_type == STT_FUNC && sym->type.ref) {
519 Sym *ref = sym->type.ref;
520 if (ref->a.nodecorate) {
521 can_add_underscore = 0;
523 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
524 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
525 name = buf1;
526 other |= ST_PE_STDCALL;
527 can_add_underscore = 0;
530 #endif
532 if (sym->asm_label) {
533 name = get_tok_str(sym->asm_label, NULL);
534 can_add_underscore = 0;
537 if (tcc_state->leading_underscore && can_add_underscore) {
538 buf1[0] = '_';
539 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
540 name = buf1;
543 info = ELFW(ST_INFO)(sym_bind, sym_type);
544 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
546 if (debug_modes)
547 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
549 } else {
550 esym = elfsym(sym);
551 esym->st_value = value;
552 esym->st_size = size;
553 esym->st_shndx = sh_num;
555 update_storage(sym);
558 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
560 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
561 return;
562 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
565 /* add a new relocation entry to symbol 'sym' in section 's' */
566 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
567 addr_t addend)
569 int c = 0;
571 if (nocode_wanted && s == cur_text_section)
572 return;
574 if (sym) {
575 if (0 == sym->c)
576 put_extern_sym(sym, NULL, 0, 0);
577 c = sym->c;
580 /* now we can add ELF relocation info */
581 put_elf_reloca(symtab_section, s, offset, type, c, addend);
584 #if PTR_SIZE == 4
585 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
587 greloca(s, sym, offset, type, 0);
589 #endif
591 /* ------------------------------------------------------------------------- */
592 /* symbol allocator */
593 static Sym *__sym_malloc(void)
595 Sym *sym_pool, *sym, *last_sym;
596 int i;
598 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
599 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
601 last_sym = sym_free_first;
602 sym = sym_pool;
603 for(i = 0; i < SYM_POOL_NB; i++) {
604 sym->next = last_sym;
605 last_sym = sym;
606 sym++;
608 sym_free_first = last_sym;
609 return last_sym;
612 static inline Sym *sym_malloc(void)
614 Sym *sym;
615 #ifndef SYM_DEBUG
616 sym = sym_free_first;
617 if (!sym)
618 sym = __sym_malloc();
619 sym_free_first = sym->next;
620 return sym;
621 #else
622 sym = tcc_malloc(sizeof(Sym));
623 return sym;
624 #endif
627 ST_INLN void sym_free(Sym *sym)
629 #ifndef SYM_DEBUG
630 sym->next = sym_free_first;
631 sym_free_first = sym;
632 #else
633 tcc_free(sym);
634 #endif
637 /* push, without hashing */
638 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
640 Sym *s;
642 s = sym_malloc();
643 memset(s, 0, sizeof *s);
644 s->v = v;
645 s->type.t = t;
646 s->c = c;
647 /* add in stack */
648 s->prev = *ps;
649 *ps = s;
650 return s;
653 /* find a symbol and return its associated structure. 's' is the top
654 of the symbol stack */
655 ST_FUNC Sym *sym_find2(Sym *s, int v)
657 while (s) {
658 if (s->v == v)
659 return s;
660 s = s->prev;
662 return NULL;
665 /* structure lookup */
666 ST_INLN Sym *struct_find(int v)
668 v -= TOK_IDENT;
669 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
670 return NULL;
671 return table_ident[v]->sym_struct;
674 /* find an identifier */
675 ST_INLN Sym *sym_find(int v)
677 v -= TOK_IDENT;
678 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
679 return NULL;
680 return table_ident[v]->sym_identifier;
683 static int sym_scope(Sym *s)
685 if (IS_ENUM_VAL (s->type.t))
686 return s->type.ref->sym_scope;
687 else
688 return s->sym_scope;
691 /* push a given symbol on the symbol stack */
692 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
694 Sym *s, **ps;
695 TokenSym *ts;
697 if (local_stack)
698 ps = &local_stack;
699 else
700 ps = &global_stack;
701 s = sym_push2(ps, v, type->t, c);
702 s->type.ref = type->ref;
703 s->r = r;
704 /* don't record fields or anonymous symbols */
705 /* XXX: simplify */
706 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
707 /* record symbol in token array */
708 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
709 if (v & SYM_STRUCT)
710 ps = &ts->sym_struct;
711 else
712 ps = &ts->sym_identifier;
713 s->prev_tok = *ps;
714 *ps = s;
715 s->sym_scope = local_scope;
716 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
717 tcc_error("redeclaration of '%s'",
718 get_tok_str(v & ~SYM_STRUCT, NULL));
720 return s;
723 /* push a global identifier */
724 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
726 Sym *s, **ps;
727 s = sym_push2(&global_stack, v, t, c);
728 s->r = VT_CONST | VT_SYM;
729 /* don't record anonymous symbol */
730 if (v < SYM_FIRST_ANOM) {
731 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
732 /* modify the top most local identifier, so that sym_identifier will
733 point to 's' when popped; happens when called from inline asm */
734 while (*ps != NULL && (*ps)->sym_scope)
735 ps = &(*ps)->prev_tok;
736 s->prev_tok = *ps;
737 *ps = s;
739 return s;
742 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
743 pop them yet from the list, but do remove them from the token array. */
744 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
746 Sym *s, *ss, **ps;
747 TokenSym *ts;
748 int v;
750 s = *ptop;
751 while(s != b) {
752 ss = s->prev;
753 v = s->v;
754 /* remove symbol in token array */
755 /* XXX: simplify */
756 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
757 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
758 if (v & SYM_STRUCT)
759 ps = &ts->sym_struct;
760 else
761 ps = &ts->sym_identifier;
762 *ps = s->prev_tok;
764 if (!keep)
765 sym_free(s);
766 s = ss;
768 if (!keep)
769 *ptop = b;
772 /* label lookup */
773 ST_FUNC Sym *label_find(int v)
775 v -= TOK_IDENT;
776 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
777 return NULL;
778 return table_ident[v]->sym_label;
781 ST_FUNC Sym *label_push(Sym **ptop, int v, int flags)
783 Sym *s, **ps;
784 s = sym_push2(ptop, v, VT_STATIC, 0);
785 s->r = flags;
786 ps = &table_ident[v - TOK_IDENT]->sym_label;
787 if (ptop == &global_label_stack) {
788 /* modify the top most local identifier, so that
789 sym_identifier will point to 's' when popped */
790 while (*ps != NULL)
791 ps = &(*ps)->prev_tok;
793 s->prev_tok = *ps;
794 *ps = s;
795 return s;
798 /* pop labels until element last is reached. Look if any labels are
799 undefined. Define symbols if '&&label' was used. */
800 ST_FUNC void label_pop(Sym **ptop, Sym *slast, int keep)
802 Sym *s, *s1;
803 for(s = *ptop; s != slast; s = s1) {
804 s1 = s->prev;
805 if (s->r == LABEL_DECLARED) {
806 tcc_warning_c(warn_all)("label '%s' declared but not used", get_tok_str(s->v, NULL));
807 } else if (s->r == LABEL_FORWARD) {
808 tcc_error("label '%s' used but not defined",
809 get_tok_str(s->v, NULL));
810 } else {
811 if (s->c) {
812 /* define corresponding symbol. A size of
813 1 is put. */
814 put_extern_sym(s, cur_text_section, s->jnext, 1);
817 /* remove label */
818 if (s->r != LABEL_GONE)
819 table_ident[s->v - TOK_IDENT]->sym_label = s->prev_tok;
820 if (!keep)
821 sym_free(s);
822 else
823 s->r = LABEL_GONE;
825 if (!keep)
826 *ptop = slast;
829 /* ------------------------------------------------------------------------- */
830 static void vcheck_cmp(void)
832 /* cannot let cpu flags if other instruction are generated. Also
833 avoid leaving VT_JMP anywhere except on the top of the stack
834 because it would complicate the code generator.
836 Don't do this when nocode_wanted. vtop might come from
837 !nocode_wanted regions (see 88_codeopt.c) and transforming
838 it to a register without actually generating code is wrong
839 as their value might still be used for real. All values
840 we push under nocode_wanted will eventually be popped
841 again, so that the VT_CMP/VT_JMP value will be in vtop
842 when code is unsuppressed again. */
844 /* However if it's just automatic suppression via CODE_OFF/ON()
845 then it seems that we better let things work undisturbed.
846 How can it work at all under nocode_wanted? Well, gv() will
847 actually clear it at the gsym() in load()/VT_JMP in the
848 generator backends */
850 if (vtop->r == VT_CMP && 0 == (nocode_wanted & ~CODE_OFF_BIT))
851 gv(RC_INT);
854 static void vsetc(CType *type, int r, CValue *vc)
856 if (vtop >= vstack + (VSTACK_SIZE - 1))
857 tcc_error("memory full (vstack)");
858 vcheck_cmp();
859 vtop++;
860 vtop->type = *type;
861 vtop->r = r;
862 vtop->r2 = VT_CONST;
863 vtop->c = *vc;
864 vtop->sym = NULL;
867 ST_FUNC void vswap(void)
869 SValue tmp;
871 vcheck_cmp();
872 tmp = vtop[0];
873 vtop[0] = vtop[-1];
874 vtop[-1] = tmp;
877 /* pop stack value */
878 ST_FUNC void vpop(void)
880 int v;
881 v = vtop->r & VT_VALMASK;
882 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
883 /* for x86, we need to pop the FP stack */
884 if (v == TREG_ST0) {
885 o(0xd8dd); /* fstp %st(0) */
886 } else
887 #endif
888 if (v == VT_CMP) {
889 /* need to put correct jump if && or || without test */
890 gsym(vtop->jtrue);
891 gsym(vtop->jfalse);
893 vtop--;
896 /* push constant of type "type" with useless value */
897 static void vpush(CType *type)
899 vset(type, VT_CONST, 0);
902 /* push arbitrary 64bit constant */
903 static void vpush64(int ty, unsigned long long v)
905 CValue cval;
906 CType ctype;
907 ctype.t = ty;
908 ctype.ref = NULL;
909 cval.i = v;
910 vsetc(&ctype, VT_CONST, &cval);
913 /* push integer constant */
914 ST_FUNC void vpushi(int v)
916 vpush64(VT_INT, v);
919 /* push a pointer sized constant */
920 static void vpushs(addr_t v)
922 vpush64(VT_SIZE_T, v);
925 /* push long long constant */
926 static inline void vpushll(long long v)
928 vpush64(VT_LLONG, v);
931 ST_FUNC void vset(CType *type, int r, int v)
933 CValue cval;
934 cval.i = v;
935 vsetc(type, r, &cval);
938 static void vseti(int r, int v)
940 CType type;
941 type.t = VT_INT;
942 type.ref = NULL;
943 vset(&type, r, v);
946 ST_FUNC void vpushv(SValue *v)
948 if (vtop >= vstack + (VSTACK_SIZE - 1))
949 tcc_error("memory full (vstack)");
950 vtop++;
951 *vtop = *v;
954 static void vdup(void)
956 vpushv(vtop);
959 /* rotate n first stack elements to the bottom
960 I1 ... In -> I2 ... In I1 [top is right]
962 ST_FUNC void vrotb(int n)
964 int i;
965 SValue tmp;
967 vcheck_cmp();
968 tmp = vtop[-n + 1];
969 for(i=-n+1;i!=0;i++)
970 vtop[i] = vtop[i+1];
971 vtop[0] = tmp;
974 /* rotate the n elements before entry e towards the top
975 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
977 ST_FUNC void vrote(SValue *e, int n)
979 int i;
980 SValue tmp;
982 vcheck_cmp();
983 tmp = *e;
984 for(i = 0;i < n - 1; i++)
985 e[-i] = e[-i - 1];
986 e[-n + 1] = tmp;
989 /* rotate n first stack elements to the top
990 I1 ... In -> In I1 ... I(n-1) [top is right]
992 ST_FUNC void vrott(int n)
994 vrote(vtop, n);
997 /* ------------------------------------------------------------------------- */
998 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1000 /* called from generators to set the result from relational ops */
1001 ST_FUNC void vset_VT_CMP(int op)
1003 vtop->r = VT_CMP;
1004 vtop->cmp_op = op;
1005 vtop->jfalse = 0;
1006 vtop->jtrue = 0;
1009 /* called once before asking generators to load VT_CMP to a register */
1010 static void vset_VT_JMP(void)
1012 int op = vtop->cmp_op;
1014 if (vtop->jtrue || vtop->jfalse) {
1015 int origt = vtop->type.t;
1016 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1017 int inv = op & (op < 2); /* small optimization */
1018 vseti(VT_JMP+inv, gvtst(inv, 0));
1019 vtop->type.t |= origt & (VT_UNSIGNED | VT_DEFSIGN);
1020 } else {
1021 /* otherwise convert flags (rsp. 0/1) to register */
1022 vtop->c.i = op;
1023 if (op < 2) /* doesn't seem to happen */
1024 vtop->r = VT_CONST;
1028 /* Set CPU Flags, doesn't yet jump */
1029 static void gvtst_set(int inv, int t)
1031 int *p;
1033 if (vtop->r != VT_CMP) {
1034 vpushi(0);
1035 gen_op(TOK_NE);
1036 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1037 vset_VT_CMP(vtop->c.i != 0);
1040 p = inv ? &vtop->jfalse : &vtop->jtrue;
1041 *p = gjmp_append(*p, t);
1044 /* Generate value test
1046 * Generate a test for any value (jump, comparison and integers) */
1047 static int gvtst(int inv, int t)
1049 int op, x, u;
1051 gvtst_set(inv, t);
1052 t = vtop->jtrue, u = vtop->jfalse;
1053 if (inv)
1054 x = u, u = t, t = x;
1055 op = vtop->cmp_op;
1057 /* jump to the wanted target */
1058 if (op > 1)
1059 t = gjmp_cond(op ^ inv, t);
1060 else if (op != inv)
1061 t = gjmp(t);
1062 /* resolve complementary jumps to here */
1063 gsym(u);
1065 vtop--;
1066 return t;
1069 /* generate a zero or nozero test */
1070 static void gen_test_zero(int op)
1072 if (vtop->r == VT_CMP) {
1073 int j;
1074 if (op == TOK_EQ) {
1075 j = vtop->jfalse;
1076 vtop->jfalse = vtop->jtrue;
1077 vtop->jtrue = j;
1078 vtop->cmp_op ^= 1;
1080 } else {
1081 vpushi(0);
1082 gen_op(op);
1086 /* ------------------------------------------------------------------------- */
1087 /* push a symbol value of TYPE */
1088 ST_FUNC void vpushsym(CType *type, Sym *sym)
1090 CValue cval;
1091 cval.i = 0;
1092 vsetc(type, VT_CONST | VT_SYM, &cval);
1093 vtop->sym = sym;
1096 /* Return a static symbol pointing to a section */
1097 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1099 int v;
1100 Sym *sym;
1102 v = anon_sym++;
1103 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1104 sym->type.t |= VT_STATIC;
1105 put_extern_sym(sym, sec, offset, size);
1106 return sym;
1109 /* push a reference to a section offset by adding a dummy symbol */
1110 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1112 vpushsym(type, get_sym_ref(type, sec, offset, size));
1115 /* define a new external reference to a symbol 'v' of type 'u' */
1116 ST_FUNC Sym *external_global_sym(int v, CType *type)
1118 Sym *s;
1120 s = sym_find(v);
1121 if (!s) {
1122 /* push forward reference */
1123 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1124 s->type.ref = type->ref;
1125 } else if (IS_ASM_SYM(s)) {
1126 s->type.t = type->t | (s->type.t & VT_EXTERN);
1127 s->type.ref = type->ref;
1128 update_storage(s);
1130 return s;
1133 /* create an external reference with no specific type similar to asm labels.
1134 This avoids type conflicts if the symbol is used from C too */
1135 ST_FUNC Sym *external_helper_sym(int v)
1137 CType ct = { VT_ASM_FUNC, NULL };
1138 return external_global_sym(v, &ct);
1141 /* push a reference to an helper function (such as memmove) */
1142 ST_FUNC void vpush_helper_func(int v)
1144 vpushsym(&func_old_type, external_helper_sym(v));
1147 /* Merge symbol attributes. */
1148 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1150 if (sa1->aligned && !sa->aligned)
1151 sa->aligned = sa1->aligned;
1152 sa->packed |= sa1->packed;
1153 sa->weak |= sa1->weak;
1154 sa->nodebug |= sa1->nodebug;
1155 if (sa1->visibility != STV_DEFAULT) {
1156 int vis = sa->visibility;
1157 if (vis == STV_DEFAULT
1158 || vis > sa1->visibility)
1159 vis = sa1->visibility;
1160 sa->visibility = vis;
1162 sa->dllexport |= sa1->dllexport;
1163 sa->nodecorate |= sa1->nodecorate;
1164 sa->dllimport |= sa1->dllimport;
1167 /* Merge function attributes. */
1168 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1170 if (fa1->func_call && !fa->func_call)
1171 fa->func_call = fa1->func_call;
1172 if (fa1->func_type && !fa->func_type)
1173 fa->func_type = fa1->func_type;
1174 if (fa1->func_args && !fa->func_args)
1175 fa->func_args = fa1->func_args;
1176 if (fa1->func_noreturn)
1177 fa->func_noreturn = 1;
1178 if (fa1->func_ctor)
1179 fa->func_ctor = 1;
1180 if (fa1->func_dtor)
1181 fa->func_dtor = 1;
1184 /* Merge attributes. */
1185 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1187 merge_symattr(&ad->a, &ad1->a);
1188 merge_funcattr(&ad->f, &ad1->f);
1190 if (ad1->section)
1191 ad->section = ad1->section;
1192 if (ad1->alias_target)
1193 ad->alias_target = ad1->alias_target;
1194 if (ad1->asm_label)
1195 ad->asm_label = ad1->asm_label;
1196 if (ad1->attr_mode)
1197 ad->attr_mode = ad1->attr_mode;
1200 /* Merge some type attributes. */
1201 static void patch_type(Sym *sym, CType *type)
1203 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1204 if (!(sym->type.t & VT_EXTERN))
1205 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1206 sym->type.t &= ~VT_EXTERN;
1209 if (IS_ASM_SYM(sym)) {
1210 /* stay static if both are static */
1211 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1212 sym->type.ref = type->ref;
1213 if ((type->t & VT_BTYPE) != VT_FUNC && !(type->t & VT_ARRAY))
1214 sym->r |= VT_LVAL;
1217 if (!is_compatible_types(&sym->type, type)) {
1218 tcc_error("incompatible types for redefinition of '%s'",
1219 get_tok_str(sym->v, NULL));
1221 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1222 int static_proto = sym->type.t & VT_STATIC;
1223 /* warn if static follows non-static function declaration */
1224 if ((type->t & VT_STATIC) && !static_proto
1225 /* XXX this test for inline shouldn't be here. Until we
1226 implement gnu-inline mode again it silences a warning for
1227 mingw caused by our workarounds. */
1228 && !((type->t | sym->type.t) & VT_INLINE))
1229 tcc_warning("static storage ignored for redefinition of '%s'",
1230 get_tok_str(sym->v, NULL));
1232 /* set 'inline' if both agree or if one has static */
1233 if ((type->t | sym->type.t) & VT_INLINE) {
1234 if (!((type->t ^ sym->type.t) & VT_INLINE)
1235 || ((type->t | sym->type.t) & VT_STATIC))
1236 static_proto |= VT_INLINE;
1239 if (0 == (type->t & VT_EXTERN)) {
1240 struct FuncAttr f = sym->type.ref->f;
1241 /* put complete type, use static from prototype */
1242 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1243 sym->type.ref = type->ref;
1244 merge_funcattr(&sym->type.ref->f, &f);
1245 } else {
1246 sym->type.t &= ~VT_INLINE | static_proto;
1249 if (sym->type.ref->f.func_type == FUNC_OLD
1250 && type->ref->f.func_type != FUNC_OLD) {
1251 sym->type.ref = type->ref;
1254 } else {
1255 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1256 /* set array size if it was omitted in extern declaration */
1257 sym->type.ref->c = type->ref->c;
1259 if ((type->t ^ sym->type.t) & VT_STATIC)
1260 tcc_warning("storage mismatch for redefinition of '%s'",
1261 get_tok_str(sym->v, NULL));
1265 /* Merge some storage attributes. */
1266 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1268 if (type)
1269 patch_type(sym, type);
1271 #ifdef TCC_TARGET_PE
1272 if (sym->a.dllimport != ad->a.dllimport)
1273 tcc_error("incompatible dll linkage for redefinition of '%s'",
1274 get_tok_str(sym->v, NULL));
1275 #endif
1276 merge_symattr(&sym->a, &ad->a);
1277 if (ad->asm_label)
1278 sym->asm_label = ad->asm_label;
1279 update_storage(sym);
1282 /* copy sym to other stack */
1283 static Sym *sym_copy(Sym *s0, Sym **ps)
1285 Sym *s;
1286 s = sym_malloc(), *s = *s0;
1287 s->prev = *ps, *ps = s;
1288 if (s->v < SYM_FIRST_ANOM) {
1289 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1290 s->prev_tok = *ps, *ps = s;
1292 return s;
1295 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1296 static void sym_copy_ref(Sym *s, Sym **ps)
1298 int bt = s->type.t & VT_BTYPE;
1299 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1300 Sym **sp = &s->type.ref;
1301 for (s = *sp, *sp = NULL; s; s = s->next) {
1302 Sym *s2 = sym_copy(s, ps);
1303 sp = &(*sp = s2)->next;
1304 sym_copy_ref(s2, ps);
1309 /* define a new external reference to a symbol 'v' */
1310 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1312 Sym *s;
1314 /* look for global symbol */
1315 s = sym_find(v);
1316 while (s && s->sym_scope)
1317 s = s->prev_tok;
1319 if (!s) {
1320 /* push forward reference */
1321 s = global_identifier_push(v, type->t, 0);
1322 s->r |= r;
1323 s->a = ad->a;
1324 s->asm_label = ad->asm_label;
1325 s->type.ref = type->ref;
1326 /* copy type to the global stack */
1327 if (local_stack)
1328 sym_copy_ref(s, &global_stack);
1329 } else {
1330 patch_storage(s, ad, type);
1332 /* push variables on local_stack if any */
1333 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1334 s = sym_copy(s, &local_stack);
1335 return s;
1338 /* save registers up to (vtop - n) stack entry */
1339 ST_FUNC void save_regs(int n)
1341 SValue *p, *p1;
1342 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1343 save_reg(p->r);
1346 /* save r to the memory stack, and mark it as being free */
1347 ST_FUNC void save_reg(int r)
1349 save_reg_upstack(r, 0);
1352 /* save r to the memory stack, and mark it as being free,
1353 if seen up to (vtop - n) stack entry */
1354 ST_FUNC void save_reg_upstack(int r, int n)
1356 int l, size, align, bt;
1357 SValue *p, *p1, sv;
1359 if ((r &= VT_VALMASK) >= VT_CONST)
1360 return;
1361 if (nocode_wanted)
1362 return;
1363 l = 0;
1364 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1365 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1366 /* must save value on stack if not already done */
1367 if (!l) {
1368 bt = p->type.t & VT_BTYPE;
1369 if (bt == VT_VOID)
1370 continue;
1371 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1372 bt = VT_PTR;
1373 sv.type.t = bt;
1374 size = type_size(&sv.type, &align);
1375 l = get_temp_local_var(size,align);
1376 sv.r = VT_LOCAL | VT_LVAL;
1377 sv.c.i = l;
1378 store(p->r & VT_VALMASK, &sv);
1379 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1380 /* x86 specific: need to pop fp register ST0 if saved */
1381 if (r == TREG_ST0) {
1382 o(0xd8dd); /* fstp %st(0) */
1384 #endif
1385 /* special long long case */
1386 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1387 sv.c.i += PTR_SIZE;
1388 store(p->r2, &sv);
1391 /* mark that stack entry as being saved on the stack */
1392 if (p->r & VT_LVAL) {
1393 /* also clear the bounded flag because the
1394 relocation address of the function was stored in
1395 p->c.i */
1396 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1397 } else {
1398 p->r = VT_LVAL | VT_LOCAL;
1399 p->type.t &= ~VT_ARRAY; /* cannot combine VT_LVAL with VT_ARRAY */
1401 p->sym = NULL;
1402 p->r2 = VT_CONST;
1403 p->c.i = l;
1408 #ifdef TCC_TARGET_ARM
1409 /* find a register of class 'rc2' with at most one reference on stack.
1410 * If none, call get_reg(rc) */
1411 ST_FUNC int get_reg_ex(int rc, int rc2)
1413 int r;
1414 SValue *p;
1416 for(r=0;r<NB_REGS;r++) {
1417 if (reg_classes[r] & rc2) {
1418 int n;
1419 n=0;
1420 for(p = vstack; p <= vtop; p++) {
1421 if ((p->r & VT_VALMASK) == r ||
1422 p->r2 == r)
1423 n++;
1425 if (n <= 1)
1426 return r;
1429 return get_reg(rc);
1431 #endif
1433 /* find a free register of class 'rc'. If none, save one register */
1434 ST_FUNC int get_reg(int rc)
1436 int r;
1437 SValue *p;
1439 /* find a free register */
1440 for(r=0;r<NB_REGS;r++) {
1441 if (reg_classes[r] & rc) {
1442 if (nocode_wanted)
1443 return r;
1444 for(p=vstack;p<=vtop;p++) {
1445 if ((p->r & VT_VALMASK) == r ||
1446 p->r2 == r)
1447 goto notfound;
1449 return r;
1451 notfound: ;
1454 /* no register left : free the first one on the stack (VERY
1455 IMPORTANT to start from the bottom to ensure that we don't
1456 spill registers used in gen_opi()) */
1457 for(p=vstack;p<=vtop;p++) {
1458 /* look at second register (if long long) */
1459 r = p->r2;
1460 if (r < VT_CONST && (reg_classes[r] & rc))
1461 goto save_found;
1462 r = p->r & VT_VALMASK;
1463 if (r < VT_CONST && (reg_classes[r] & rc)) {
1464 save_found:
1465 save_reg(r);
1466 return r;
1469 /* Should never comes here */
1470 return -1;
1473 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1474 static int get_temp_local_var(int size,int align){
1475 int i;
1476 struct temp_local_variable *temp_var;
1477 int found_var;
1478 SValue *p;
1479 int r;
1480 char free;
1481 char found;
1482 found=0;
1483 for(i=0;i<nb_temp_local_vars;i++){
1484 temp_var=&arr_temp_local_vars[i];
1485 if(temp_var->size<size||align!=temp_var->align){
1486 continue;
1488 /*check if temp_var is free*/
1489 free=1;
1490 for(p=vstack;p<=vtop;p++) {
1491 r=p->r&VT_VALMASK;
1492 if(r==VT_LOCAL||r==VT_LLOCAL){
1493 if(p->c.i==temp_var->location){
1494 free=0;
1495 break;
1499 if(free){
1500 found_var=temp_var->location;
1501 found=1;
1502 break;
1505 if(!found){
1506 loc = (loc - size) & -align;
1507 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1508 temp_var=&arr_temp_local_vars[i];
1509 temp_var->location=loc;
1510 temp_var->size=size;
1511 temp_var->align=align;
1512 nb_temp_local_vars++;
1514 found_var=loc;
1516 return found_var;
1519 static void clear_temp_local_var_list(){
1520 nb_temp_local_vars=0;
1523 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1524 if needed */
1525 static void move_reg(int r, int s, int t)
1527 SValue sv;
1529 if (r != s) {
1530 save_reg(r);
1531 sv.type.t = t;
1532 sv.type.ref = NULL;
1533 sv.r = s;
1534 sv.c.i = 0;
1535 load(r, &sv);
1539 /* get address of vtop (vtop MUST BE an lvalue) */
1540 ST_FUNC void gaddrof(void)
1542 vtop->r &= ~VT_LVAL;
1543 /* tricky: if saved lvalue, then we can go back to lvalue */
1544 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1545 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1548 #ifdef CONFIG_TCC_BCHECK
1549 /* generate a bounded pointer addition */
1550 static void gen_bounded_ptr_add(void)
1552 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1553 if (save) {
1554 vpushv(&vtop[-1]);
1555 vrott(3);
1557 vpush_helper_func(TOK___bound_ptr_add);
1558 vrott(3);
1559 gfunc_call(2);
1560 vtop -= save;
1561 vpushi(0);
1562 /* returned pointer is in REG_IRET */
1563 vtop->r = REG_IRET | VT_BOUNDED;
1564 if (nocode_wanted)
1565 return;
1566 /* relocation offset of the bounding function call point */
1567 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1570 /* patch pointer addition in vtop so that pointer dereferencing is
1571 also tested */
1572 static void gen_bounded_ptr_deref(void)
1574 addr_t func;
1575 int size, align;
1576 ElfW_Rel *rel;
1577 Sym *sym;
1579 if (nocode_wanted)
1580 return;
1582 size = type_size(&vtop->type, &align);
1583 switch(size) {
1584 case 1: func = TOK___bound_ptr_indir1; break;
1585 case 2: func = TOK___bound_ptr_indir2; break;
1586 case 4: func = TOK___bound_ptr_indir4; break;
1587 case 8: func = TOK___bound_ptr_indir8; break;
1588 case 12: func = TOK___bound_ptr_indir12; break;
1589 case 16: func = TOK___bound_ptr_indir16; break;
1590 default:
1591 /* may happen with struct member access */
1592 return;
1594 sym = external_helper_sym(func);
1595 if (!sym->c)
1596 put_extern_sym(sym, NULL, 0, 0);
1597 /* patch relocation */
1598 /* XXX: find a better solution ? */
1599 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1600 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1603 /* generate lvalue bound code */
1604 static void gbound(void)
1606 CType type1;
1608 vtop->r &= ~VT_MUSTBOUND;
1609 /* if lvalue, then use checking code before dereferencing */
1610 if (vtop->r & VT_LVAL) {
1611 /* if not VT_BOUNDED value, then make one */
1612 if (!(vtop->r & VT_BOUNDED)) {
1613 /* must save type because we must set it to int to get pointer */
1614 type1 = vtop->type;
1615 vtop->type.t = VT_PTR;
1616 gaddrof();
1617 vpushi(0);
1618 gen_bounded_ptr_add();
1619 vtop->r |= VT_LVAL;
1620 vtop->type = type1;
1622 /* then check for dereferencing */
1623 gen_bounded_ptr_deref();
1627 /* we need to call __bound_ptr_add before we start to load function
1628 args into registers */
1629 ST_FUNC void gbound_args(int nb_args)
1631 int i, v;
1632 SValue *sv;
1634 for (i = 1; i <= nb_args; ++i)
1635 if (vtop[1 - i].r & VT_MUSTBOUND) {
1636 vrotb(i);
1637 gbound();
1638 vrott(i);
1641 sv = vtop - nb_args;
1642 if (sv->r & VT_SYM) {
1643 v = sv->sym->v;
1644 if (v == TOK_setjmp
1645 || v == TOK__setjmp
1646 #ifndef TCC_TARGET_PE
1647 || v == TOK_sigsetjmp
1648 || v == TOK___sigsetjmp
1649 #endif
1651 vpush_helper_func(TOK___bound_setjmp);
1652 vpushv(sv + 1);
1653 gfunc_call(1);
1654 func_bound_add_epilog = 1;
1656 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1657 if (v == TOK_alloca)
1658 func_bound_add_epilog = 1;
1659 #endif
1660 #if TARGETOS_NetBSD
1661 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1662 sv->sym->asm_label = TOK___bound_longjmp;
1663 #endif
1667 /* Add bounds for local symbols from S to E (via ->prev) */
1668 static void add_local_bounds(Sym *s, Sym *e)
1670 for (; s != e; s = s->prev) {
1671 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1672 continue;
1673 /* Add arrays/structs/unions because we always take address */
1674 if ((s->type.t & VT_ARRAY)
1675 || (s->type.t & VT_BTYPE) == VT_STRUCT
1676 || s->a.addrtaken) {
1677 /* add local bound info */
1678 int align, size = type_size(&s->type, &align);
1679 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1680 2 * sizeof(addr_t));
1681 bounds_ptr[0] = s->c;
1682 bounds_ptr[1] = size;
1686 #endif
1688 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1689 static void pop_local_syms(Sym *b, int keep)
1691 #ifdef CONFIG_TCC_BCHECK
1692 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1693 add_local_bounds(local_stack, b);
1694 #endif
1695 if (debug_modes)
1696 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1697 sym_pop(&local_stack, b, keep);
1700 /* increment an lvalue pointer */
1701 static void incr_offset(int offset)
1703 int t = vtop->type.t;
1704 gaddrof(); /* remove VT_LVAL */
1705 vtop->type.t = VT_PTRDIFF_T; /* set scalar type */
1706 vpushs(offset);
1707 gen_op('+');
1708 vtop->r |= VT_LVAL;
1709 vtop->type.t = t;
1712 static void incr_bf_adr(int o)
1714 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1715 incr_offset(o);
1718 /* single-byte load mode for packed or otherwise unaligned bitfields */
1719 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1721 int n, o, bits;
1722 save_reg_upstack(vtop->r, 1);
1723 vpush64(type->t & VT_BTYPE, 0); // B X
1724 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1725 do {
1726 vswap(); // X B
1727 incr_bf_adr(o);
1728 vdup(); // X B B
1729 n = 8 - bit_pos;
1730 if (n > bit_size)
1731 n = bit_size;
1732 if (bit_pos)
1733 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1734 if (n < 8)
1735 vpushi((1 << n) - 1), gen_op('&');
1736 gen_cast(type);
1737 if (bits)
1738 vpushi(bits), gen_op(TOK_SHL);
1739 vrotb(3); // B Y X
1740 gen_op('|'); // B X
1741 bits += n, bit_size -= n, o = 1;
1742 } while (bit_size);
1743 vswap(), vpop();
1744 if (!(type->t & VT_UNSIGNED)) {
1745 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1746 vpushi(n), gen_op(TOK_SHL);
1747 vpushi(n), gen_op(TOK_SAR);
1751 /* single-byte store mode for packed or otherwise unaligned bitfields */
1752 static void store_packed_bf(int bit_pos, int bit_size)
1754 int bits, n, o, m, c;
1755 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1756 vswap(); // X B
1757 save_reg_upstack(vtop->r, 1);
1758 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1759 do {
1760 incr_bf_adr(o); // X B
1761 vswap(); //B X
1762 c ? vdup() : gv_dup(); // B V X
1763 vrott(3); // X B V
1764 if (bits)
1765 vpushi(bits), gen_op(TOK_SHR);
1766 if (bit_pos)
1767 vpushi(bit_pos), gen_op(TOK_SHL);
1768 n = 8 - bit_pos;
1769 if (n > bit_size)
1770 n = bit_size;
1771 if (n < 8) {
1772 m = ((1 << n) - 1) << bit_pos;
1773 vpushi(m), gen_op('&'); // X B V1
1774 vpushv(vtop-1); // X B V1 B
1775 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1776 gen_op('&'); // X B V1 B1
1777 gen_op('|'); // X B V2
1779 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1780 vstore(), vpop(); // X B
1781 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1782 } while (bit_size);
1783 vpop(), vpop();
1786 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1788 int t;
1789 if (0 == sv->type.ref)
1790 return 0;
1791 t = sv->type.ref->auxtype;
1792 if (t != -1 && t != VT_STRUCT) {
1793 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1794 sv->r |= VT_LVAL;
1796 return t;
1799 /* store vtop a register belonging to class 'rc'. lvalues are
1800 converted to values. Cannot be used if cannot be converted to
1801 register value (such as structures). */
1802 ST_FUNC int gv(int rc)
1804 int r, r2, r_ok, r2_ok, rc2, bt;
1805 int bit_pos, bit_size, size, align;
1807 /* NOTE: get_reg can modify vstack[] */
1808 if (vtop->type.t & VT_BITFIELD) {
1809 CType type;
1811 bit_pos = BIT_POS(vtop->type.t);
1812 bit_size = BIT_SIZE(vtop->type.t);
1813 /* remove bit field info to avoid loops */
1814 vtop->type.t &= ~VT_STRUCT_MASK;
1816 type.ref = NULL;
1817 type.t = vtop->type.t & VT_UNSIGNED;
1818 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1819 type.t |= VT_UNSIGNED;
1821 r = adjust_bf(vtop, bit_pos, bit_size);
1823 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1824 type.t |= VT_LLONG;
1825 else
1826 type.t |= VT_INT;
1828 if (r == VT_STRUCT) {
1829 load_packed_bf(&type, bit_pos, bit_size);
1830 } else {
1831 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1832 /* cast to int to propagate signedness in following ops */
1833 gen_cast(&type);
1834 /* generate shifts */
1835 vpushi(bits - (bit_pos + bit_size));
1836 gen_op(TOK_SHL);
1837 vpushi(bits - bit_size);
1838 /* NOTE: transformed to SHR if unsigned */
1839 gen_op(TOK_SAR);
1841 r = gv(rc);
1842 } else {
1843 if (is_float(vtop->type.t) &&
1844 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1845 /* CPUs usually cannot use float constants, so we store them
1846 generically in data segment */
1847 init_params p = { rodata_section };
1848 unsigned long offset;
1849 size = type_size(&vtop->type, &align);
1850 if (NODATA_WANTED)
1851 size = 0, align = 1;
1852 offset = section_add(p.sec, size, align);
1853 vpush_ref(&vtop->type, p.sec, offset, size);
1854 vswap();
1855 init_putv(&p, &vtop->type, offset);
1856 vtop->r |= VT_LVAL;
1858 #ifdef CONFIG_TCC_BCHECK
1859 if (vtop->r & VT_MUSTBOUND)
1860 gbound();
1861 #endif
1863 bt = vtop->type.t & VT_BTYPE;
1865 #ifdef TCC_TARGET_RISCV64
1866 /* XXX mega hack */
1867 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1868 rc = RC_INT;
1869 #endif
1870 rc2 = RC2_TYPE(bt, rc);
1872 /* need to reload if:
1873 - constant
1874 - lvalue (need to dereference pointer)
1875 - already a register, but not in the right class */
1876 r = vtop->r & VT_VALMASK;
1877 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1878 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1880 if (!r_ok || !r2_ok) {
1882 if (!r_ok) {
1883 if (1 /* we can 'mov (r),r' in cases */
1884 && r < VT_CONST
1885 && (reg_classes[r] & rc)
1886 && !rc2
1888 save_reg_upstack(r, 1);
1889 else
1890 r = get_reg(rc);
1893 if (rc2) {
1894 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1895 int original_type = vtop->type.t;
1897 /* two register type load :
1898 expand to two words temporarily */
1899 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1900 /* load constant */
1901 unsigned long long ll = vtop->c.i;
1902 vtop->c.i = ll; /* first word */
1903 load(r, vtop);
1904 vtop->r = r; /* save register value */
1905 vpushi(ll >> 32); /* second word */
1906 } else if (vtop->r & VT_LVAL) {
1907 /* We do not want to modifier the long long pointer here.
1908 So we save any other instances down the stack */
1909 save_reg_upstack(vtop->r, 1);
1910 /* load from memory */
1911 vtop->type.t = load_type;
1912 load(r, vtop);
1913 vdup();
1914 vtop[-1].r = r; /* save register value */
1915 /* increment pointer to get second word */
1916 incr_offset(PTR_SIZE);
1917 } else {
1918 /* move registers */
1919 if (!r_ok)
1920 load(r, vtop);
1921 if (r2_ok && vtop->r2 < VT_CONST)
1922 goto done;
1923 vdup();
1924 vtop[-1].r = r; /* save register value */
1925 vtop->r = vtop[-1].r2;
1927 /* Allocate second register. Here we rely on the fact that
1928 get_reg() tries first to free r2 of an SValue. */
1929 r2 = get_reg(rc2);
1930 load(r2, vtop);
1931 vpop();
1932 /* write second register */
1933 vtop->r2 = r2;
1934 done:
1935 vtop->type.t = original_type;
1936 } else {
1937 if (vtop->r == VT_CMP)
1938 vset_VT_JMP();
1939 /* one register type load */
1940 load(r, vtop);
1943 vtop->r = r;
1944 #ifdef TCC_TARGET_C67
1945 /* uses register pairs for doubles */
1946 if (bt == VT_DOUBLE)
1947 vtop->r2 = r+1;
1948 #endif
1950 return r;
1953 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1954 ST_FUNC void gv2(int rc1, int rc2)
1956 /* generate more generic register first. But VT_JMP or VT_CMP
1957 values must be generated first in all cases to avoid possible
1958 reload errors */
1959 if (vtop->r != VT_CMP && rc1 <= rc2) {
1960 vswap();
1961 gv(rc1);
1962 vswap();
1963 gv(rc2);
1964 /* test if reload is needed for first register */
1965 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1966 vswap();
1967 gv(rc1);
1968 vswap();
1970 } else {
1971 gv(rc2);
1972 vswap();
1973 gv(rc1);
1974 vswap();
1975 /* test if reload is needed for first register */
1976 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1977 gv(rc2);
1982 #if PTR_SIZE == 4
1983 /* expand 64bit on stack in two ints */
1984 ST_FUNC void lexpand(void)
1986 int u, v;
1987 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1988 v = vtop->r & (VT_VALMASK | VT_LVAL);
1989 if (v == VT_CONST) {
1990 vdup();
1991 vtop[0].c.i >>= 32;
1992 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1993 vdup();
1994 vtop[0].c.i += 4;
1995 } else {
1996 gv(RC_INT);
1997 vdup();
1998 vtop[0].r = vtop[-1].r2;
1999 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2001 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2003 #endif
2005 #if PTR_SIZE == 4
2006 /* build a long long from two ints */
2007 static void lbuild(int t)
2009 gv2(RC_INT, RC_INT);
2010 vtop[-1].r2 = vtop[0].r;
2011 vtop[-1].type.t = t;
2012 vpop();
2014 #endif
2016 /* convert stack entry to register and duplicate its value in another
2017 register */
2018 static void gv_dup(void)
2020 int t, rc, r;
2022 t = vtop->type.t;
2023 #if PTR_SIZE == 4
2024 if ((t & VT_BTYPE) == VT_LLONG) {
2025 if (t & VT_BITFIELD) {
2026 gv(RC_INT);
2027 t = vtop->type.t;
2029 lexpand();
2030 gv_dup();
2031 vswap();
2032 vrotb(3);
2033 gv_dup();
2034 vrotb(4);
2035 /* stack: H L L1 H1 */
2036 lbuild(t);
2037 vrotb(3);
2038 vrotb(3);
2039 vswap();
2040 lbuild(t);
2041 vswap();
2042 return;
2044 #endif
2045 /* duplicate value */
2046 rc = RC_TYPE(t);
2047 gv(rc);
2048 r = get_reg(rc);
2049 vdup();
2050 load(r, vtop);
2051 vtop->r = r;
2054 #if PTR_SIZE == 4
2055 /* generate CPU independent (unsigned) long long operations */
2056 static void gen_opl(int op)
2058 int t, a, b, op1, c, i;
2059 int func;
2060 unsigned short reg_iret = REG_IRET;
2061 unsigned short reg_lret = REG_IRE2;
2062 SValue tmp;
2064 switch(op) {
2065 case '/':
2066 case TOK_PDIV:
2067 func = TOK___divdi3;
2068 goto gen_func;
2069 case TOK_UDIV:
2070 func = TOK___udivdi3;
2071 goto gen_func;
2072 case '%':
2073 func = TOK___moddi3;
2074 goto gen_mod_func;
2075 case TOK_UMOD:
2076 func = TOK___umoddi3;
2077 gen_mod_func:
2078 #ifdef TCC_ARM_EABI
2079 reg_iret = TREG_R2;
2080 reg_lret = TREG_R3;
2081 #endif
2082 gen_func:
2083 /* call generic long long function */
2084 vpush_helper_func(func);
2085 vrott(3);
2086 gfunc_call(2);
2087 vpushi(0);
2088 vtop->r = reg_iret;
2089 vtop->r2 = reg_lret;
2090 break;
2091 case '^':
2092 case '&':
2093 case '|':
2094 case '*':
2095 case '+':
2096 case '-':
2097 //pv("gen_opl A",0,2);
2098 t = vtop->type.t;
2099 vswap();
2100 lexpand();
2101 vrotb(3);
2102 lexpand();
2103 /* stack: L1 H1 L2 H2 */
2104 tmp = vtop[0];
2105 vtop[0] = vtop[-3];
2106 vtop[-3] = tmp;
2107 tmp = vtop[-2];
2108 vtop[-2] = vtop[-3];
2109 vtop[-3] = tmp;
2110 vswap();
2111 /* stack: H1 H2 L1 L2 */
2112 //pv("gen_opl B",0,4);
2113 if (op == '*') {
2114 vpushv(vtop - 1);
2115 vpushv(vtop - 1);
2116 gen_op(TOK_UMULL);
2117 lexpand();
2118 /* stack: H1 H2 L1 L2 ML MH */
2119 for(i=0;i<4;i++)
2120 vrotb(6);
2121 /* stack: ML MH H1 H2 L1 L2 */
2122 tmp = vtop[0];
2123 vtop[0] = vtop[-2];
2124 vtop[-2] = tmp;
2125 /* stack: ML MH H1 L2 H2 L1 */
2126 gen_op('*');
2127 vrotb(3);
2128 vrotb(3);
2129 gen_op('*');
2130 /* stack: ML MH M1 M2 */
2131 gen_op('+');
2132 gen_op('+');
2133 } else if (op == '+' || op == '-') {
2134 /* XXX: add non carry method too (for MIPS or alpha) */
2135 if (op == '+')
2136 op1 = TOK_ADDC1;
2137 else
2138 op1 = TOK_SUBC1;
2139 gen_op(op1);
2140 /* stack: H1 H2 (L1 op L2) */
2141 vrotb(3);
2142 vrotb(3);
2143 gen_op(op1 + 1); /* TOK_xxxC2 */
2144 } else {
2145 gen_op(op);
2146 /* stack: H1 H2 (L1 op L2) */
2147 vrotb(3);
2148 vrotb(3);
2149 /* stack: (L1 op L2) H1 H2 */
2150 gen_op(op);
2151 /* stack: (L1 op L2) (H1 op H2) */
2153 /* stack: L H */
2154 lbuild(t);
2155 break;
2156 case TOK_SAR:
2157 case TOK_SHR:
2158 case TOK_SHL:
2159 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2160 t = vtop[-1].type.t;
2161 vswap();
2162 lexpand();
2163 vrotb(3);
2164 /* stack: L H shift */
2165 c = (int)vtop->c.i;
2166 /* constant: simpler */
2167 /* NOTE: all comments are for SHL. the other cases are
2168 done by swapping words */
2169 vpop();
2170 if (op != TOK_SHL)
2171 vswap();
2172 if (c >= 32) {
2173 /* stack: L H */
2174 vpop();
2175 if (c > 32) {
2176 vpushi(c - 32);
2177 gen_op(op);
2179 if (op != TOK_SAR) {
2180 vpushi(0);
2181 } else {
2182 gv_dup();
2183 vpushi(31);
2184 gen_op(TOK_SAR);
2186 vswap();
2187 } else {
2188 vswap();
2189 gv_dup();
2190 /* stack: H L L */
2191 vpushi(c);
2192 gen_op(op);
2193 vswap();
2194 vpushi(32 - c);
2195 if (op == TOK_SHL)
2196 gen_op(TOK_SHR);
2197 else
2198 gen_op(TOK_SHL);
2199 vrotb(3);
2200 /* stack: L L H */
2201 vpushi(c);
2202 if (op == TOK_SHL)
2203 gen_op(TOK_SHL);
2204 else
2205 gen_op(TOK_SHR);
2206 gen_op('|');
2208 if (op != TOK_SHL)
2209 vswap();
2210 lbuild(t);
2211 } else {
2212 /* XXX: should provide a faster fallback on x86 ? */
2213 switch(op) {
2214 case TOK_SAR:
2215 func = TOK___ashrdi3;
2216 goto gen_func;
2217 case TOK_SHR:
2218 func = TOK___lshrdi3;
2219 goto gen_func;
2220 case TOK_SHL:
2221 func = TOK___ashldi3;
2222 goto gen_func;
2225 break;
2226 default:
2227 /* compare operations */
2228 t = vtop->type.t;
2229 vswap();
2230 lexpand();
2231 vrotb(3);
2232 lexpand();
2233 /* stack: L1 H1 L2 H2 */
2234 tmp = vtop[-1];
2235 vtop[-1] = vtop[-2];
2236 vtop[-2] = tmp;
2237 /* stack: L1 L2 H1 H2 */
2238 save_regs(4);
2239 /* compare high */
2240 op1 = op;
2241 /* when values are equal, we need to compare low words. since
2242 the jump is inverted, we invert the test too. */
2243 if (op1 == TOK_LT)
2244 op1 = TOK_LE;
2245 else if (op1 == TOK_GT)
2246 op1 = TOK_GE;
2247 else if (op1 == TOK_ULT)
2248 op1 = TOK_ULE;
2249 else if (op1 == TOK_UGT)
2250 op1 = TOK_UGE;
2251 a = 0;
2252 b = 0;
2253 gen_op(op1);
2254 if (op == TOK_NE) {
2255 b = gvtst(0, 0);
2256 } else {
2257 a = gvtst(1, 0);
2258 if (op != TOK_EQ) {
2259 /* generate non equal test */
2260 vpushi(0);
2261 vset_VT_CMP(TOK_NE);
2262 b = gvtst(0, 0);
2265 /* compare low. Always unsigned */
2266 op1 = op;
2267 if (op1 == TOK_LT)
2268 op1 = TOK_ULT;
2269 else if (op1 == TOK_LE)
2270 op1 = TOK_ULE;
2271 else if (op1 == TOK_GT)
2272 op1 = TOK_UGT;
2273 else if (op1 == TOK_GE)
2274 op1 = TOK_UGE;
2275 gen_op(op1);
2276 #if 0//def TCC_TARGET_I386
2277 if (op == TOK_NE) { gsym(b); break; }
2278 if (op == TOK_EQ) { gsym(a); break; }
2279 #endif
2280 gvtst_set(1, a);
2281 gvtst_set(0, b);
2282 break;
2285 #endif
2287 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2289 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2290 return (a ^ b) >> 63 ? -x : x;
2293 static int gen_opic_lt(uint64_t a, uint64_t b)
2295 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2298 /* handle integer constant optimizations and various machine
2299 independent opt */
2300 static void gen_opic(int op)
2302 SValue *v1 = vtop - 1;
2303 SValue *v2 = vtop;
2304 int t1 = v1->type.t & VT_BTYPE;
2305 int t2 = v2->type.t & VT_BTYPE;
2306 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2307 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2308 uint64_t l1 = c1 ? v1->c.i : 0;
2309 uint64_t l2 = c2 ? v2->c.i : 0;
2310 int shm = (t1 == VT_LLONG) ? 63 : 31;
2311 int r;
2313 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2314 l1 = ((uint32_t)l1 |
2315 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2316 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2317 l2 = ((uint32_t)l2 |
2318 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2320 if (c1 && c2) {
2321 switch(op) {
2322 case '+': l1 += l2; break;
2323 case '-': l1 -= l2; break;
2324 case '&': l1 &= l2; break;
2325 case '^': l1 ^= l2; break;
2326 case '|': l1 |= l2; break;
2327 case '*': l1 *= l2; break;
2329 case TOK_PDIV:
2330 case '/':
2331 case '%':
2332 case TOK_UDIV:
2333 case TOK_UMOD:
2334 /* if division by zero, generate explicit division */
2335 if (l2 == 0) {
2336 if (CONST_WANTED && !NOEVAL_WANTED)
2337 tcc_error("division by zero in constant");
2338 goto general_case;
2340 switch(op) {
2341 default: l1 = gen_opic_sdiv(l1, l2); break;
2342 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2343 case TOK_UDIV: l1 = l1 / l2; break;
2344 case TOK_UMOD: l1 = l1 % l2; break;
2346 break;
2347 case TOK_SHL: l1 <<= (l2 & shm); break;
2348 case TOK_SHR: l1 >>= (l2 & shm); break;
2349 case TOK_SAR:
2350 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2351 break;
2352 /* tests */
2353 case TOK_ULT: l1 = l1 < l2; break;
2354 case TOK_UGE: l1 = l1 >= l2; break;
2355 case TOK_EQ: l1 = l1 == l2; break;
2356 case TOK_NE: l1 = l1 != l2; break;
2357 case TOK_ULE: l1 = l1 <= l2; break;
2358 case TOK_UGT: l1 = l1 > l2; break;
2359 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2360 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2361 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2362 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2363 /* logical */
2364 case TOK_LAND: l1 = l1 && l2; break;
2365 case TOK_LOR: l1 = l1 || l2; break;
2366 default:
2367 goto general_case;
2369 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2370 l1 = ((uint32_t)l1 |
2371 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2372 v1->c.i = l1;
2373 v1->r |= v2->r & VT_NONCONST;
2374 vtop--;
2375 } else {
2376 /* if commutative ops, put c2 as constant */
2377 if (c1 && (op == '+' || op == '&' || op == '^' ||
2378 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2379 vswap();
2380 c2 = c1; //c = c1, c1 = c2, c2 = c;
2381 l2 = l1; //l = l1, l1 = l2, l2 = l;
2383 if (c1 && ((l1 == 0 &&
2384 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2385 (l1 == -1 && op == TOK_SAR))) {
2386 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2387 vpop();
2388 } else if (c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2389 (op == '|' &&
2390 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2391 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2392 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2393 if (l2 == 1)
2394 vtop->c.i = 0;
2395 vswap();
2396 vtop--;
2397 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2398 op == TOK_PDIV) &&
2399 l2 == 1) ||
2400 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2401 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2402 l2 == 0) ||
2403 (op == '&' &&
2404 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2405 /* filter out NOP operations like x*1, x-0, x&-1... */
2406 vtop--;
2407 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2408 /* try to use shifts instead of muls or divs */
2409 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2410 int n = -1;
2411 while (l2) {
2412 l2 >>= 1;
2413 n++;
2415 vtop->c.i = n;
2416 if (op == '*')
2417 op = TOK_SHL;
2418 else if (op == TOK_PDIV)
2419 op = TOK_SAR;
2420 else
2421 op = TOK_SHR;
2423 goto general_case;
2424 } else if (c2 && (op == '+' || op == '-') &&
2425 (r = vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM),
2426 r == (VT_CONST | VT_SYM) || r == VT_LOCAL)) {
2427 /* symbol + constant case */
2428 if (op == '-')
2429 l2 = -l2;
2430 l2 += vtop[-1].c.i;
2431 /* The backends can't always deal with addends to symbols
2432 larger than +-1<<31. Don't construct such. */
2433 if ((int)l2 != l2)
2434 goto general_case;
2435 vtop--;
2436 vtop->c.i = l2;
2437 } else {
2438 general_case:
2439 /* call low level op generator */
2440 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2441 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2442 gen_opl(op);
2443 else
2444 gen_opi(op);
2446 if (vtop->r == VT_CONST)
2447 vtop->r |= VT_NONCONST; /* is const, but only by optimization */
2451 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2452 # define gen_negf gen_opf
2453 #elif defined TCC_TARGET_ARM
2454 void gen_negf(int op)
2456 /* arm will detect 0-x and replace by vneg */
2457 vpushi(0), vswap(), gen_op('-');
2459 #else
2460 /* XXX: implement in gen_opf() for other backends too */
2461 void gen_negf(int op)
2463 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2464 subtract(-0, x), but with them it's really a sign flip
2465 operation. We implement this with bit manipulation and have
2466 to do some type reinterpretation for this, which TCC can do
2467 only via memory. */
2469 int align, size, bt;
2471 size = type_size(&vtop->type, &align);
2472 bt = vtop->type.t & VT_BTYPE;
2473 save_reg(gv(RC_TYPE(bt)));
2474 vdup();
2475 incr_bf_adr(size - 1);
2476 vdup();
2477 vpushi(0x80); /* flip sign */
2478 gen_op('^');
2479 vstore();
2480 vpop();
2482 #endif
2484 /* generate a floating point operation with constant propagation */
2485 static void gen_opif(int op)
2487 int c1, c2, i, bt;
2488 SValue *v1, *v2;
2489 #if defined _MSC_VER && defined __x86_64__
2490 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2491 volatile
2492 #endif
2493 long double f1, f2;
2495 v1 = vtop - 1;
2496 v2 = vtop;
2497 if (op == TOK_NEG)
2498 v1 = v2;
2499 bt = v1->type.t & VT_BTYPE;
2501 /* currently, we cannot do computations with forward symbols */
2502 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2503 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2504 if (c1 && c2) {
2505 if (bt == VT_FLOAT) {
2506 f1 = v1->c.f;
2507 f2 = v2->c.f;
2508 } else if (bt == VT_DOUBLE) {
2509 f1 = v1->c.d;
2510 f2 = v2->c.d;
2511 } else {
2512 f1 = v1->c.ld;
2513 f2 = v2->c.ld;
2515 /* NOTE: we only do constant propagation if finite number (not
2516 NaN or infinity) (ANSI spec) */
2517 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !CONST_WANTED)
2518 goto general_case;
2519 switch(op) {
2520 case '+': f1 += f2; break;
2521 case '-': f1 -= f2; break;
2522 case '*': f1 *= f2; break;
2523 case '/':
2524 if (f2 == 0.0) {
2525 union { float f; unsigned u; } x1, x2, y;
2526 /* If not in initializer we need to potentially generate
2527 FP exceptions at runtime, otherwise we want to fold. */
2528 if (!CONST_WANTED)
2529 goto general_case;
2530 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2531 when used to compile the f1 /= f2 below, would be -nan */
2532 x1.f = f1, x2.f = f2;
2533 if (f1 == 0.0)
2534 y.u = 0x7fc00000; /* nan */
2535 else
2536 y.u = 0x7f800000; /* infinity */
2537 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2538 f1 = y.f;
2539 break;
2541 f1 /= f2;
2542 break;
2543 case TOK_NEG:
2544 f1 = -f1;
2545 goto unary_result;
2546 case TOK_EQ:
2547 i = f1 == f2;
2548 make_int:
2549 vtop -= 2;
2550 vpushi(i);
2551 return;
2552 case TOK_NE:
2553 i = f1 != f2;
2554 goto make_int;
2555 case TOK_LT:
2556 i = f1 < f2;
2557 goto make_int;
2558 case TOK_GE:
2559 i = f1 >= f2;
2560 goto make_int;
2561 case TOK_LE:
2562 i = f1 <= f2;
2563 goto make_int;
2564 case TOK_GT:
2565 i = f1 > f2;
2566 goto make_int;
2567 default:
2568 goto general_case;
2570 vtop--;
2571 unary_result:
2572 /* XXX: overflow test ? */
2573 if (bt == VT_FLOAT) {
2574 v1->c.f = f1;
2575 } else if (bt == VT_DOUBLE) {
2576 v1->c.d = f1;
2577 } else {
2578 v1->c.ld = f1;
2580 } else {
2581 general_case:
2582 if (op == TOK_NEG) {
2583 gen_negf(op);
2584 } else {
2585 gen_opf(op);
2590 /* print a type. If 'varstr' is not NULL, then the variable is also
2591 printed in the type */
2592 /* XXX: union */
2593 /* XXX: add array and function pointers */
2594 static void type_to_str(char *buf, int buf_size,
2595 CType *type, const char *varstr)
2597 int bt, v, t;
2598 Sym *s, *sa;
2599 char buf1[256];
2600 const char *tstr;
2602 t = type->t;
2603 bt = t & VT_BTYPE;
2604 buf[0] = '\0';
2606 if (t & VT_EXTERN)
2607 pstrcat(buf, buf_size, "extern ");
2608 if (t & VT_STATIC)
2609 pstrcat(buf, buf_size, "static ");
2610 if (t & VT_TYPEDEF)
2611 pstrcat(buf, buf_size, "typedef ");
2612 if (t & VT_INLINE)
2613 pstrcat(buf, buf_size, "inline ");
2614 if (bt != VT_PTR) {
2615 if (t & VT_VOLATILE)
2616 pstrcat(buf, buf_size, "volatile ");
2617 if (t & VT_CONSTANT)
2618 pstrcat(buf, buf_size, "const ");
2620 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2621 || ((t & VT_UNSIGNED)
2622 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2623 && !IS_ENUM(t)
2625 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2627 buf_size -= strlen(buf);
2628 buf += strlen(buf);
2630 switch(bt) {
2631 case VT_VOID:
2632 tstr = "void";
2633 goto add_tstr;
2634 case VT_BOOL:
2635 tstr = "_Bool";
2636 goto add_tstr;
2637 case VT_BYTE:
2638 tstr = "char";
2639 goto add_tstr;
2640 case VT_SHORT:
2641 tstr = "short";
2642 goto add_tstr;
2643 case VT_INT:
2644 tstr = "int";
2645 goto maybe_long;
2646 case VT_LLONG:
2647 tstr = "long long";
2648 maybe_long:
2649 if (t & VT_LONG)
2650 tstr = "long";
2651 if (!IS_ENUM(t))
2652 goto add_tstr;
2653 tstr = "enum ";
2654 goto tstruct;
2655 case VT_FLOAT:
2656 tstr = "float";
2657 goto add_tstr;
2658 case VT_DOUBLE:
2659 tstr = "double";
2660 if (!(t & VT_LONG))
2661 goto add_tstr;
2662 case VT_LDOUBLE:
2663 tstr = "long double";
2664 add_tstr:
2665 pstrcat(buf, buf_size, tstr);
2666 break;
2667 case VT_STRUCT:
2668 tstr = "struct ";
2669 if (IS_UNION(t))
2670 tstr = "union ";
2671 tstruct:
2672 pstrcat(buf, buf_size, tstr);
2673 v = type->ref->v & ~SYM_STRUCT;
2674 if (v >= SYM_FIRST_ANOM)
2675 pstrcat(buf, buf_size, "<anonymous>");
2676 else
2677 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2678 break;
2679 case VT_FUNC:
2680 s = type->ref;
2681 buf1[0]=0;
2682 if (varstr && '*' == *varstr) {
2683 pstrcat(buf1, sizeof(buf1), "(");
2684 pstrcat(buf1, sizeof(buf1), varstr);
2685 pstrcat(buf1, sizeof(buf1), ")");
2687 pstrcat(buf1, buf_size, "(");
2688 sa = s->next;
2689 while (sa != NULL) {
2690 char buf2[256];
2691 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2692 pstrcat(buf1, sizeof(buf1), buf2);
2693 sa = sa->next;
2694 if (sa)
2695 pstrcat(buf1, sizeof(buf1), ", ");
2697 if (s->f.func_type == FUNC_ELLIPSIS)
2698 pstrcat(buf1, sizeof(buf1), ", ...");
2699 pstrcat(buf1, sizeof(buf1), ")");
2700 type_to_str(buf, buf_size, &s->type, buf1);
2701 goto no_var;
2702 case VT_PTR:
2703 s = type->ref;
2704 if (t & (VT_ARRAY|VT_VLA)) {
2705 if (varstr && '*' == *varstr)
2706 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2707 else
2708 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2709 type_to_str(buf, buf_size, &s->type, buf1);
2710 goto no_var;
2712 pstrcpy(buf1, sizeof(buf1), "*");
2713 if (t & VT_CONSTANT)
2714 pstrcat(buf1, buf_size, "const ");
2715 if (t & VT_VOLATILE)
2716 pstrcat(buf1, buf_size, "volatile ");
2717 if (varstr)
2718 pstrcat(buf1, sizeof(buf1), varstr);
2719 type_to_str(buf, buf_size, &s->type, buf1);
2720 goto no_var;
2722 if (varstr) {
2723 pstrcat(buf, buf_size, " ");
2724 pstrcat(buf, buf_size, varstr);
2726 no_var: ;
2729 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2731 char buf1[256], buf2[256];
2732 type_to_str(buf1, sizeof(buf1), st, NULL);
2733 type_to_str(buf2, sizeof(buf2), dt, NULL);
2734 tcc_error(fmt, buf1, buf2);
2737 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2739 char buf1[256], buf2[256];
2740 type_to_str(buf1, sizeof(buf1), st, NULL);
2741 type_to_str(buf2, sizeof(buf2), dt, NULL);
2742 tcc_warning(fmt, buf1, buf2);
2745 static int pointed_size(CType *type)
2747 int align;
2748 return type_size(pointed_type(type), &align);
2751 static inline int is_null_pointer(SValue *p)
2753 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2754 return 0;
2755 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2756 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2757 ((p->type.t & VT_BTYPE) == VT_PTR &&
2758 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2759 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2760 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2764 /* compare function types. OLD functions match any new functions */
2765 static int is_compatible_func(CType *type1, CType *type2)
2767 Sym *s1, *s2;
2769 s1 = type1->ref;
2770 s2 = type2->ref;
2771 if (s1->f.func_call != s2->f.func_call)
2772 return 0;
2773 if (s1->f.func_type != s2->f.func_type
2774 && s1->f.func_type != FUNC_OLD
2775 && s2->f.func_type != FUNC_OLD)
2776 return 0;
2777 for (;;) {
2778 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2779 return 0;
2780 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2781 return 1;
2782 s1 = s1->next;
2783 s2 = s2->next;
2784 if (!s1)
2785 return !s2;
2786 if (!s2)
2787 return 0;
2791 /* return true if type1 and type2 are the same. If unqualified is
2792 true, qualifiers on the types are ignored.
2794 static int compare_types(CType *type1, CType *type2, int unqualified)
2796 int bt1, t1, t2;
2798 t1 = type1->t & VT_TYPE;
2799 t2 = type2->t & VT_TYPE;
2800 if (unqualified) {
2801 /* strip qualifiers before comparing */
2802 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2803 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2806 /* Default Vs explicit signedness only matters for char */
2807 if ((t1 & VT_BTYPE) != VT_BYTE) {
2808 t1 &= ~VT_DEFSIGN;
2809 t2 &= ~VT_DEFSIGN;
2811 /* XXX: bitfields ? */
2812 if (t1 != t2)
2813 return 0;
2815 if ((t1 & VT_ARRAY)
2816 && !(type1->ref->c < 0
2817 || type2->ref->c < 0
2818 || type1->ref->c == type2->ref->c))
2819 return 0;
2821 /* test more complicated cases */
2822 bt1 = t1 & VT_BTYPE;
2823 if (bt1 == VT_PTR) {
2824 type1 = pointed_type(type1);
2825 type2 = pointed_type(type2);
2826 return is_compatible_types(type1, type2);
2827 } else if (bt1 == VT_STRUCT) {
2828 return (type1->ref == type2->ref);
2829 } else if (bt1 == VT_FUNC) {
2830 return is_compatible_func(type1, type2);
2831 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2832 /* If both are enums then they must be the same, if only one is then
2833 t1 and t2 must be equal, which was checked above already. */
2834 return type1->ref == type2->ref;
2835 } else {
2836 return 1;
2840 #define CMP_OP 'C'
2841 #define SHIFT_OP 'S'
2843 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2844 type is stored in DEST if non-null (except for pointer plus/minus) . */
2845 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2847 CType *type1, *type2, type;
2848 int t1, t2, bt1, bt2;
2849 int ret = 1;
2851 /* for shifts, 'combine' only left operand */
2852 if (op == SHIFT_OP)
2853 op2 = op1;
2855 type1 = &op1->type, type2 = &op2->type;
2856 t1 = type1->t, t2 = type2->t;
2857 bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2859 type.t = VT_VOID;
2860 type.ref = NULL;
2862 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2863 ret = op == '?' ? 1 : 0;
2864 /* NOTE: as an extension, we accept void on only one side */
2865 type.t = VT_VOID;
2866 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2867 if (op == '+') {
2868 if (!is_integer_btype(bt1 == VT_PTR ? bt2 : bt1))
2869 ret = 0;
2871 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2872 /* If one is a null ptr constant the result type is the other. */
2873 else if (is_null_pointer (op2)) type = *type1;
2874 else if (is_null_pointer (op1)) type = *type2;
2875 else if (bt1 != bt2) {
2876 /* accept comparison or cond-expr between pointer and integer
2877 with a warning */
2878 if ((op == '?' || op == CMP_OP)
2879 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2880 tcc_warning("pointer/integer mismatch in %s",
2881 op == '?' ? "conditional expression" : "comparison");
2882 else if (op != '-' || !is_integer_btype(bt2))
2883 ret = 0;
2884 type = *(bt1 == VT_PTR ? type1 : type2);
2885 } else {
2886 CType *pt1 = pointed_type(type1);
2887 CType *pt2 = pointed_type(type2);
2888 int pbt1 = pt1->t & VT_BTYPE;
2889 int pbt2 = pt2->t & VT_BTYPE;
2890 int newquals, copied = 0;
2891 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2892 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2893 if (op != '?' && op != CMP_OP)
2894 ret = 0;
2895 else
2896 type_incompatibility_warning(type1, type2,
2897 op == '?'
2898 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2899 : "pointer type mismatch in comparison('%s' and '%s')");
2901 if (op == '?') {
2902 /* pointers to void get preferred, otherwise the
2903 pointed to types minus qualifs should be compatible */
2904 type = *((pbt1 == VT_VOID) ? type1 : type2);
2905 /* combine qualifs */
2906 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2907 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2908 & newquals)
2910 /* copy the pointer target symbol */
2911 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2912 0, type.ref->c);
2913 copied = 1;
2914 pointed_type(&type)->t |= newquals;
2916 /* pointers to incomplete arrays get converted to
2917 pointers to completed ones if possible */
2918 if (pt1->t & VT_ARRAY
2919 && pt2->t & VT_ARRAY
2920 && pointed_type(&type)->ref->c < 0
2921 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2923 if (!copied)
2924 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2925 0, type.ref->c);
2926 pointed_type(&type)->ref =
2927 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2928 0, pointed_type(&type)->ref->c);
2929 pointed_type(&type)->ref->c =
2930 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2934 if (op == CMP_OP)
2935 type.t = VT_SIZE_T;
2936 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2937 if (op != '?' || !compare_types(type1, type2, 1))
2938 ret = 0;
2939 type = *type1;
2940 } else if (is_float(bt1) || is_float(bt2)) {
2941 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2942 type.t = VT_LDOUBLE;
2943 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2944 type.t = VT_DOUBLE;
2945 } else {
2946 type.t = VT_FLOAT;
2948 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2949 /* cast to biggest op */
2950 type.t = VT_LLONG | VT_LONG;
2951 if (bt1 == VT_LLONG)
2952 type.t &= t1;
2953 if (bt2 == VT_LLONG)
2954 type.t &= t2;
2955 /* convert to unsigned if it does not fit in a long long */
2956 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2957 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2958 type.t |= VT_UNSIGNED;
2959 } else {
2960 /* integer operations */
2961 type.t = VT_INT | (VT_LONG & (t1 | t2));
2962 /* convert to unsigned if it does not fit in an integer */
2963 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2964 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2965 type.t |= VT_UNSIGNED;
2967 if (dest)
2968 *dest = type;
2969 return ret;
2972 /* generic gen_op: handles types problems */
2973 ST_FUNC void gen_op(int op)
2975 int t1, t2, bt1, bt2, t;
2976 CType type1, combtype;
2977 int op_class = op;
2979 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2980 op_class = SHIFT_OP;
2981 else if (TOK_ISCOND(op)) /* == != > ... */
2982 op_class = CMP_OP;
2984 redo:
2985 t1 = vtop[-1].type.t;
2986 t2 = vtop[0].type.t;
2987 bt1 = t1 & VT_BTYPE;
2988 bt2 = t2 & VT_BTYPE;
2990 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2991 if (bt2 == VT_FUNC) {
2992 mk_pointer(&vtop->type);
2993 gaddrof();
2995 if (bt1 == VT_FUNC) {
2996 vswap();
2997 mk_pointer(&vtop->type);
2998 gaddrof();
2999 vswap();
3001 goto redo;
3002 } else if (!combine_types(&combtype, vtop - 1, vtop, op_class)) {
3003 op_err:
3004 tcc_error("invalid operand types for binary operation");
3005 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3006 /* at least one operand is a pointer */
3007 /* relational op: must be both pointers */
3008 int align;
3009 if (op_class == CMP_OP)
3010 goto std_op;
3011 /* if both pointers, then it must be the '-' op */
3012 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3013 if (op != '-')
3014 goto op_err;
3015 vpush_type_size(pointed_type(&vtop[-1].type), &align);
3016 vtop->type.t &= ~VT_UNSIGNED;
3017 vrott(3);
3018 gen_opic(op);
3019 vtop->type.t = VT_PTRDIFF_T;
3020 vswap();
3021 gen_op(TOK_PDIV);
3022 } else {
3023 /* exactly one pointer : must be '+' or '-'. */
3024 if (op != '-' && op != '+')
3025 goto op_err;
3026 /* Put pointer as first operand */
3027 if (bt2 == VT_PTR) {
3028 vswap();
3029 t = t1, t1 = t2, t2 = t;
3030 bt2 = bt1;
3032 #if PTR_SIZE == 4
3033 if (bt2 == VT_LLONG)
3034 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3035 gen_cast_s(VT_INT);
3036 #endif
3037 type1 = vtop[-1].type;
3038 vpush_type_size(pointed_type(&vtop[-1].type), &align);
3039 gen_op('*');
3040 #ifdef CONFIG_TCC_BCHECK
3041 if (tcc_state->do_bounds_check && !CONST_WANTED) {
3042 /* if bounded pointers, we generate a special code to
3043 test bounds */
3044 if (op == '-') {
3045 vpushi(0);
3046 vswap();
3047 gen_op('-');
3049 gen_bounded_ptr_add();
3050 } else
3051 #endif
3053 gen_opic(op);
3055 type1.t &= ~(VT_ARRAY|VT_VLA);
3056 /* put again type if gen_opic() swaped operands */
3057 vtop->type = type1;
3059 } else {
3060 /* floats can only be used for a few operations */
3061 if (is_float(combtype.t)
3062 && op != '+' && op != '-' && op != '*' && op != '/'
3063 && op_class != CMP_OP) {
3064 goto op_err;
3066 std_op:
3067 t = t2 = combtype.t;
3068 /* special case for shifts and long long: we keep the shift as
3069 an integer */
3070 if (op_class == SHIFT_OP)
3071 t2 = VT_INT;
3072 /* XXX: currently, some unsigned operations are explicit, so
3073 we modify them here */
3074 if (t & VT_UNSIGNED) {
3075 if (op == TOK_SAR)
3076 op = TOK_SHR;
3077 else if (op == '/')
3078 op = TOK_UDIV;
3079 else if (op == '%')
3080 op = TOK_UMOD;
3081 else if (op == TOK_LT)
3082 op = TOK_ULT;
3083 else if (op == TOK_GT)
3084 op = TOK_UGT;
3085 else if (op == TOK_LE)
3086 op = TOK_ULE;
3087 else if (op == TOK_GE)
3088 op = TOK_UGE;
3090 vswap();
3091 gen_cast_s(t);
3092 vswap();
3093 gen_cast_s(t2);
3094 if (is_float(t))
3095 gen_opif(op);
3096 else
3097 gen_opic(op);
3098 if (op_class == CMP_OP) {
3099 /* relational op: the result is an int */
3100 vtop->type.t = VT_INT;
3101 } else {
3102 vtop->type.t = t;
3105 // Make sure that we have converted to an rvalue:
3106 if (vtop->r & VT_LVAL)
3107 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3110 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3111 #define gen_cvt_itof1 gen_cvt_itof
3112 #else
3113 /* generic itof for unsigned long long case */
3114 static void gen_cvt_itof1(int t)
3116 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3117 (VT_LLONG | VT_UNSIGNED)) {
3119 if (t == VT_FLOAT)
3120 vpush_helper_func(TOK___floatundisf);
3121 #if LDOUBLE_SIZE != 8
3122 else if (t == VT_LDOUBLE)
3123 vpush_helper_func(TOK___floatundixf);
3124 #endif
3125 else
3126 vpush_helper_func(TOK___floatundidf);
3127 vrott(2);
3128 gfunc_call(1);
3129 vpushi(0);
3130 PUT_R_RET(vtop, t);
3131 } else {
3132 gen_cvt_itof(t);
3135 #endif
3137 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3138 #define gen_cvt_ftoi1 gen_cvt_ftoi
3139 #else
3140 /* generic ftoi for unsigned long long case */
3141 static void gen_cvt_ftoi1(int t)
3143 int st;
3144 if (t == (VT_LLONG | VT_UNSIGNED)) {
3145 /* not handled natively */
3146 st = vtop->type.t & VT_BTYPE;
3147 if (st == VT_FLOAT)
3148 vpush_helper_func(TOK___fixunssfdi);
3149 #if LDOUBLE_SIZE != 8
3150 else if (st == VT_LDOUBLE)
3151 vpush_helper_func(TOK___fixunsxfdi);
3152 #endif
3153 else
3154 vpush_helper_func(TOK___fixunsdfdi);
3155 vrott(2);
3156 gfunc_call(1);
3157 vpushi(0);
3158 PUT_R_RET(vtop, t);
3159 } else {
3160 gen_cvt_ftoi(t);
3163 #endif
3165 /* special delayed cast for char/short */
3166 static void force_charshort_cast(void)
3168 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3169 int dbt = vtop->type.t;
3170 vtop->r &= ~VT_MUSTCAST;
3171 vtop->type.t = sbt;
3172 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3173 vtop->type.t = dbt;
3176 static void gen_cast_s(int t)
3178 CType type;
3179 type.t = t;
3180 type.ref = NULL;
3181 gen_cast(&type);
3184 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3185 static void gen_cast(CType *type)
3187 int sbt, dbt, sf, df, c;
3188 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3190 /* special delayed cast for char/short */
3191 if (vtop->r & VT_MUSTCAST)
3192 force_charshort_cast();
3194 /* bitfields first get cast to ints */
3195 if (vtop->type.t & VT_BITFIELD)
3196 gv(RC_INT);
3198 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3199 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3200 if (sbt == VT_FUNC)
3201 sbt = VT_PTR;
3203 again:
3204 if (sbt != dbt) {
3205 sf = is_float(sbt);
3206 df = is_float(dbt);
3207 dbt_bt = dbt & VT_BTYPE;
3208 sbt_bt = sbt & VT_BTYPE;
3209 if (dbt_bt == VT_VOID)
3210 goto done;
3211 if (sbt_bt == VT_VOID) {
3212 error:
3213 cast_error(&vtop->type, type);
3216 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3217 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3218 /* don't try to convert to ldouble when cross-compiling
3219 (except when it's '0' which is needed for arm:gen_negf()) */
3220 if (dbt_bt == VT_LDOUBLE && !nocode_wanted && (sf || vtop->c.i != 0))
3221 c = 0;
3222 #endif
3223 if (c) {
3224 /* constant case: we can do it now */
3225 /* XXX: in ISOC, cannot do it if error in convert */
3226 if (sbt == VT_FLOAT)
3227 vtop->c.ld = vtop->c.f;
3228 else if (sbt == VT_DOUBLE)
3229 vtop->c.ld = vtop->c.d;
3231 if (df) {
3232 if (sbt_bt == VT_LLONG) {
3233 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3234 vtop->c.ld = vtop->c.i;
3235 else
3236 vtop->c.ld = -(long double)-vtop->c.i;
3237 } else if(!sf) {
3238 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3239 vtop->c.ld = (uint32_t)vtop->c.i;
3240 else
3241 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3244 if (dbt == VT_FLOAT)
3245 vtop->c.f = (float)vtop->c.ld;
3246 else if (dbt == VT_DOUBLE)
3247 vtop->c.d = (double)vtop->c.ld;
3248 } else if (sf && dbt == VT_BOOL) {
3249 vtop->c.i = (vtop->c.ld != 0);
3250 } else {
3251 if(sf)
3252 /* the range of [int64_t] is enough to hold the integer part of any float value.
3253 Meanwhile, converting negative double to unsigned integer is UB.
3254 So first convert to [int64_t] here. */
3255 vtop->c.i = (int64_t)vtop->c.ld;
3256 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3258 else if (sbt & VT_UNSIGNED)
3259 vtop->c.i = (uint32_t)vtop->c.i;
3260 else
3261 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3263 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3265 else if (dbt == VT_BOOL)
3266 vtop->c.i = (vtop->c.i != 0);
3267 else {
3268 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3269 dbt_bt == VT_SHORT ? 0xffff :
3270 0xffffffff;
3271 vtop->c.i &= m;
3272 if (!(dbt & VT_UNSIGNED))
3273 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3276 goto done;
3278 } else if (dbt == VT_BOOL
3279 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3280 == (VT_CONST | VT_SYM)) {
3281 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3282 vtop->r = VT_CONST;
3283 vtop->c.i = 1;
3284 goto done;
3287 /* cannot generate code for global or static initializers */
3288 if (nocode_wanted & DATA_ONLY_WANTED)
3289 goto done;
3291 /* non constant case: generate code */
3292 if (dbt == VT_BOOL) {
3293 gen_test_zero(TOK_NE);
3294 goto done;
3297 if (sf || df) {
3298 if (sf && df) {
3299 /* convert from fp to fp */
3300 gen_cvt_ftof(dbt);
3301 } else if (df) {
3302 /* convert int to fp */
3303 gen_cvt_itof1(dbt);
3304 } else {
3305 /* convert fp to int */
3306 sbt = dbt;
3307 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3308 sbt = VT_INT;
3309 gen_cvt_ftoi1(sbt);
3310 goto again; /* may need char/short cast */
3312 goto done;
3315 ds = btype_size(dbt_bt);
3316 ss = btype_size(sbt_bt);
3317 if (ds == 0 || ss == 0)
3318 goto error;
3320 if (IS_ENUM(type->t) && type->ref->c < 0)
3321 tcc_error("cast to incomplete type");
3323 /* same size and no sign conversion needed */
3324 if (ds == ss && ds >= 4)
3325 goto done;
3326 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3327 tcc_warning("cast between pointer and integer of different size");
3328 if (sbt_bt == VT_PTR) {
3329 /* put integer type to allow logical operations below */
3330 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3334 /* processor allows { int a = 0, b = *(char*)&a; }
3335 That means that if we cast to less width, we can just
3336 change the type and read it still later. */
3337 #define ALLOW_SUBTYPE_ACCESS 1
3339 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3340 /* value still in memory */
3341 if (ds <= ss)
3342 goto done;
3343 /* ss <= 4 here */
3344 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3345 gv(RC_INT);
3346 goto done; /* no 64bit envolved */
3349 gv(RC_INT);
3351 trunc = 0;
3352 #if PTR_SIZE == 4
3353 if (ds == 8) {
3354 /* generate high word */
3355 if (sbt & VT_UNSIGNED) {
3356 vpushi(0);
3357 gv(RC_INT);
3358 } else {
3359 gv_dup();
3360 vpushi(31);
3361 gen_op(TOK_SAR);
3363 lbuild(dbt);
3364 } else if (ss == 8) {
3365 /* from long long: just take low order word */
3366 lexpand();
3367 vpop();
3369 ss = 4;
3371 #elif PTR_SIZE == 8
3372 if (ds == 8) {
3373 /* need to convert from 32bit to 64bit */
3374 if (sbt & VT_UNSIGNED) {
3375 #if defined(TCC_TARGET_RISCV64)
3376 /* RISC-V keeps 32bit vals in registers sign-extended.
3377 So here we need a zero-extension. */
3378 trunc = 32;
3379 #else
3380 goto done;
3381 #endif
3382 } else {
3383 gen_cvt_sxtw();
3384 goto done;
3386 ss = ds, ds = 4, dbt = sbt;
3387 } else if (ss == 8) {
3388 /* RISC-V keeps 32bit vals in registers sign-extended.
3389 So here we need a sign-extension for signed types and
3390 zero-extension. for unsigned types. */
3391 #if !defined(TCC_TARGET_RISCV64)
3392 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3393 #endif
3394 } else {
3395 ss = 4;
3397 #endif
3399 if (ds >= ss)
3400 goto done;
3401 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3402 if (ss == 4) {
3403 gen_cvt_csti(dbt);
3404 goto done;
3406 #endif
3407 bits = (ss - ds) * 8;
3408 /* for unsigned, gen_op will convert SAR to SHR */
3409 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3410 vpushi(bits);
3411 gen_op(TOK_SHL);
3412 vpushi(bits - trunc);
3413 gen_op(TOK_SAR);
3414 vpushi(trunc);
3415 gen_op(TOK_SHR);
3417 done:
3418 vtop->type = *type;
3419 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3422 /* return type size as known at compile time. Put alignment at 'a' */
3423 ST_FUNC int type_size(CType *type, int *a)
3425 Sym *s;
3426 int bt;
3428 bt = type->t & VT_BTYPE;
3429 if (bt == VT_STRUCT) {
3430 /* struct/union */
3431 s = type->ref;
3432 *a = s->r;
3433 return s->c;
3434 } else if (bt == VT_PTR) {
3435 if (type->t & VT_ARRAY) {
3436 int ts;
3437 s = type->ref;
3438 ts = type_size(&s->type, a);
3439 if (ts < 0 && s->c < 0)
3440 ts = -ts;
3441 return ts * s->c;
3442 } else {
3443 *a = PTR_SIZE;
3444 return PTR_SIZE;
3446 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3447 *a = 0;
3448 return -1; /* incomplete enum */
3449 } else if (bt == VT_LDOUBLE) {
3450 *a = LDOUBLE_ALIGN;
3451 return LDOUBLE_SIZE;
3452 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3453 #if (defined TCC_TARGET_I386 && !defined TCC_TARGET_PE) \
3454 || (defined TCC_TARGET_ARM && !defined TCC_ARM_EABI)
3455 *a = 4;
3456 #else
3457 *a = 8;
3458 #endif
3459 return 8;
3460 } else if (bt == VT_INT || bt == VT_FLOAT) {
3461 *a = 4;
3462 return 4;
3463 } else if (bt == VT_SHORT) {
3464 *a = 2;
3465 return 2;
3466 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3467 *a = 8;
3468 return 16;
3469 } else {
3470 /* char, void, function, _Bool */
3471 *a = 1;
3472 return 1;
3476 /* push type size as known at runtime time on top of value stack. Put
3477 alignment at 'a' */
3478 static void vpush_type_size(CType *type, int *a)
3480 if (type->t & VT_VLA) {
3481 type_size(&type->ref->type, a);
3482 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3483 } else {
3484 int size = type_size(type, a);
3485 if (size < 0)
3486 tcc_error("unknown type size");
3487 vpushs(size);
3491 /* return the pointed type of t */
3492 static inline CType *pointed_type(CType *type)
3494 return &type->ref->type;
3497 /* modify type so that its it is a pointer to type. */
3498 ST_FUNC void mk_pointer(CType *type)
3500 Sym *s;
3501 s = sym_push(SYM_FIELD, type, 0, -1);
3502 type->t = VT_PTR | (type->t & VT_STORAGE);
3503 type->ref = s;
3506 /* return true if type1 and type2 are exactly the same (including
3507 qualifiers).
3509 static int is_compatible_types(CType *type1, CType *type2)
3511 return compare_types(type1,type2,0);
3514 /* return true if type1 and type2 are the same (ignoring qualifiers).
3516 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3518 return compare_types(type1,type2,1);
3521 static void cast_error(CType *st, CType *dt)
3523 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3526 /* verify type compatibility to store vtop in 'dt' type */
3527 static void verify_assign_cast(CType *dt)
3529 CType *st, *type1, *type2;
3530 int dbt, sbt, qualwarn, lvl;
3532 st = &vtop->type; /* source type */
3533 dbt = dt->t & VT_BTYPE;
3534 sbt = st->t & VT_BTYPE;
3535 if (dt->t & VT_CONSTANT)
3536 tcc_warning("assignment of read-only location");
3537 switch(dbt) {
3538 case VT_VOID:
3539 if (sbt != dbt)
3540 tcc_error("assignment to void expression");
3541 break;
3542 case VT_PTR:
3543 /* special cases for pointers */
3544 /* '0' can also be a pointer */
3545 if (is_null_pointer(vtop))
3546 break;
3547 /* accept implicit pointer to integer cast with warning */
3548 if (is_integer_btype(sbt)) {
3549 tcc_warning("assignment makes pointer from integer without a cast");
3550 break;
3552 type1 = pointed_type(dt);
3553 if (sbt == VT_PTR)
3554 type2 = pointed_type(st);
3555 else if (sbt == VT_FUNC)
3556 type2 = st; /* a function is implicitly a function pointer */
3557 else
3558 goto error;
3559 if (is_compatible_types(type1, type2))
3560 break;
3561 for (qualwarn = lvl = 0;; ++lvl) {
3562 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3563 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3564 qualwarn = 1;
3565 dbt = type1->t & (VT_BTYPE|VT_LONG);
3566 sbt = type2->t & (VT_BTYPE|VT_LONG);
3567 if (dbt != VT_PTR || sbt != VT_PTR)
3568 break;
3569 type1 = pointed_type(type1);
3570 type2 = pointed_type(type2);
3572 if (!is_compatible_unqualified_types(type1, type2)) {
3573 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3574 /* void * can match anything */
3575 } else if (dbt == sbt
3576 && is_integer_btype(sbt & VT_BTYPE)
3577 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3578 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3579 /* Like GCC don't warn by default for merely changes
3580 in pointer target signedness. Do warn for different
3581 base types, though, in particular for unsigned enums
3582 and signed int targets. */
3583 } else {
3584 tcc_warning("assignment from incompatible pointer type");
3585 break;
3588 if (qualwarn)
3589 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3590 break;
3591 case VT_BYTE:
3592 case VT_SHORT:
3593 case VT_INT:
3594 case VT_LLONG:
3595 if (sbt == VT_PTR || sbt == VT_FUNC) {
3596 tcc_warning("assignment makes integer from pointer without a cast");
3597 } else if (sbt == VT_STRUCT) {
3598 goto case_VT_STRUCT;
3600 /* XXX: more tests */
3601 break;
3602 case VT_STRUCT:
3603 case_VT_STRUCT:
3604 if (!is_compatible_unqualified_types(dt, st)) {
3605 error:
3606 cast_error(st, dt);
3608 break;
3612 static void gen_assign_cast(CType *dt)
3614 verify_assign_cast(dt);
3615 gen_cast(dt);
3618 /* store vtop in lvalue pushed on stack */
3619 ST_FUNC void vstore(void)
3621 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3623 ft = vtop[-1].type.t;
3624 sbt = vtop->type.t & VT_BTYPE;
3625 dbt = ft & VT_BTYPE;
3626 verify_assign_cast(&vtop[-1].type);
3628 if (sbt == VT_STRUCT) {
3629 /* if structure, only generate pointer */
3630 /* structure assignment : generate memcpy */
3631 size = type_size(&vtop->type, &align);
3632 /* destination, keep on stack() as result */
3633 vpushv(vtop - 1);
3634 #ifdef CONFIG_TCC_BCHECK
3635 if (vtop->r & VT_MUSTBOUND)
3636 gbound(); /* check would be wrong after gaddrof() */
3637 #endif
3638 vtop->type.t = VT_PTR;
3639 gaddrof();
3640 /* source */
3641 vswap();
3642 #ifdef CONFIG_TCC_BCHECK
3643 if (vtop->r & VT_MUSTBOUND)
3644 gbound();
3645 #endif
3646 vtop->type.t = VT_PTR;
3647 gaddrof();
3649 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3650 if (1
3651 #ifdef CONFIG_TCC_BCHECK
3652 && !tcc_state->do_bounds_check
3653 #endif
3655 gen_struct_copy(size);
3656 } else
3657 #endif
3659 /* type size */
3660 vpushi(size);
3661 /* Use memmove, rather than memcpy, as dest and src may be same: */
3662 #ifdef TCC_ARM_EABI
3663 if(!(align & 7))
3664 vpush_helper_func(TOK_memmove8);
3665 else if(!(align & 3))
3666 vpush_helper_func(TOK_memmove4);
3667 else
3668 #endif
3669 vpush_helper_func(TOK_memmove);
3670 vrott(4);
3671 gfunc_call(3);
3674 } else if (ft & VT_BITFIELD) {
3675 /* bitfield store handling */
3677 /* save lvalue as expression result (example: s.b = s.a = n;) */
3678 vdup(), vtop[-1] = vtop[-2];
3680 bit_pos = BIT_POS(ft);
3681 bit_size = BIT_SIZE(ft);
3682 /* remove bit field info to avoid loops */
3683 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3685 if (dbt == VT_BOOL) {
3686 gen_cast(&vtop[-1].type);
3687 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3689 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3690 if (dbt != VT_BOOL) {
3691 gen_cast(&vtop[-1].type);
3692 dbt = vtop[-1].type.t & VT_BTYPE;
3694 if (r == VT_STRUCT) {
3695 store_packed_bf(bit_pos, bit_size);
3696 } else {
3697 unsigned long long mask = (1ULL << bit_size) - 1;
3698 if (dbt != VT_BOOL) {
3699 /* mask source */
3700 if (dbt == VT_LLONG)
3701 vpushll(mask);
3702 else
3703 vpushi((unsigned)mask);
3704 gen_op('&');
3706 /* shift source */
3707 vpushi(bit_pos);
3708 gen_op(TOK_SHL);
3709 vswap();
3710 /* duplicate destination */
3711 vdup();
3712 vrott(3);
3713 /* load destination, mask and or with source */
3714 if (dbt == VT_LLONG)
3715 vpushll(~(mask << bit_pos));
3716 else
3717 vpushi(~((unsigned)mask << bit_pos));
3718 gen_op('&');
3719 gen_op('|');
3720 /* store result */
3721 vstore();
3722 /* ... and discard */
3723 vpop();
3725 } else if (dbt == VT_VOID) {
3726 --vtop;
3727 } else {
3728 /* optimize char/short casts */
3729 delayed_cast = 0;
3730 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3731 && is_integer_btype(sbt)
3733 if ((vtop->r & VT_MUSTCAST)
3734 && btype_size(dbt) > btype_size(sbt)
3736 force_charshort_cast();
3737 delayed_cast = 1;
3738 } else {
3739 gen_cast(&vtop[-1].type);
3742 #ifdef CONFIG_TCC_BCHECK
3743 /* bound check case */
3744 if (vtop[-1].r & VT_MUSTBOUND) {
3745 vswap();
3746 gbound();
3747 vswap();
3749 #endif
3750 gv(RC_TYPE(dbt)); /* generate value */
3752 if (delayed_cast) {
3753 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3754 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3755 vtop->type.t = ft & VT_TYPE;
3758 /* if lvalue was saved on stack, must read it */
3759 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3760 SValue sv;
3761 r = get_reg(RC_INT);
3762 sv.type.t = VT_PTRDIFF_T;
3763 sv.r = VT_LOCAL | VT_LVAL;
3764 sv.c.i = vtop[-1].c.i;
3765 load(r, &sv);
3766 vtop[-1].r = r | VT_LVAL;
3769 r = vtop->r & VT_VALMASK;
3770 /* two word case handling :
3771 store second register at word + 4 (or +8 for x86-64) */
3772 if (USING_TWO_WORDS(dbt)) {
3773 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3774 vtop[-1].type.t = load_type;
3775 store(r, vtop - 1);
3776 vswap();
3777 incr_offset(PTR_SIZE);
3778 vswap();
3779 /* XXX: it works because r2 is spilled last ! */
3780 store(vtop->r2, vtop - 1);
3781 } else {
3782 /* single word */
3783 store(r, vtop - 1);
3785 vswap();
3786 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3790 /* post defines POST/PRE add. c is the token ++ or -- */
3791 ST_FUNC void inc(int post, int c)
3793 test_lvalue();
3794 vdup(); /* save lvalue */
3795 if (post) {
3796 gv_dup(); /* duplicate value */
3797 vrotb(3);
3798 vrotb(3);
3800 /* add constant */
3801 vpushi(c - TOK_MID);
3802 gen_op('+');
3803 vstore(); /* store value */
3804 if (post)
3805 vpop(); /* if post op, return saved value */
3808 ST_FUNC CString* parse_mult_str (const char *msg)
3810 /* read the string */
3811 if (tok != TOK_STR)
3812 expect(msg);
3813 cstr_reset(&initstr);
3814 while (tok == TOK_STR) {
3815 /* XXX: add \0 handling too ? */
3816 cstr_cat(&initstr, tokc.str.data, -1);
3817 next();
3819 cstr_ccat(&initstr, '\0');
3820 return &initstr;
3823 /* If I is >= 1 and a power of two, returns log2(i)+1.
3824 If I is 0 returns 0. */
3825 ST_FUNC int exact_log2p1(int i)
3827 int ret;
3828 if (!i)
3829 return 0;
3830 for (ret = 1; i >= 1 << 8; ret += 8)
3831 i >>= 8;
3832 if (i >= 1 << 4)
3833 ret += 4, i >>= 4;
3834 if (i >= 1 << 2)
3835 ret += 2, i >>= 2;
3836 if (i >= 1 << 1)
3837 ret++;
3838 return ret;
3841 /* Parse __attribute__((...)) GNUC extension. */
3842 static void parse_attribute(AttributeDef *ad)
3844 int t, n;
3845 char *astr;
3847 redo:
3848 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3849 return;
3850 next();
3851 skip('(');
3852 skip('(');
3853 while (tok != ')') {
3854 if (tok < TOK_IDENT)
3855 expect("attribute name");
3856 t = tok;
3857 next();
3858 switch(t) {
3859 case TOK_CLEANUP1:
3860 case TOK_CLEANUP2:
3862 Sym *s;
3864 skip('(');
3865 s = sym_find(tok);
3866 if (!s) {
3867 tcc_warning_c(warn_implicit_function_declaration)(
3868 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3869 s = external_global_sym(tok, &func_old_type);
3870 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3871 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3872 ad->cleanup_func = s;
3873 next();
3874 skip(')');
3875 break;
3877 case TOK_CONSTRUCTOR1:
3878 case TOK_CONSTRUCTOR2:
3879 ad->f.func_ctor = 1;
3880 break;
3881 case TOK_DESTRUCTOR1:
3882 case TOK_DESTRUCTOR2:
3883 ad->f.func_dtor = 1;
3884 break;
3885 case TOK_ALWAYS_INLINE1:
3886 case TOK_ALWAYS_INLINE2:
3887 ad->f.func_alwinl = 1;
3888 break;
3889 case TOK_SECTION1:
3890 case TOK_SECTION2:
3891 skip('(');
3892 astr = parse_mult_str("section name")->data;
3893 ad->section = find_section(tcc_state, astr);
3894 skip(')');
3895 break;
3896 case TOK_ALIAS1:
3897 case TOK_ALIAS2:
3898 skip('(');
3899 astr = parse_mult_str("alias(\"target\")")->data;
3900 /* save string as token, for later */
3901 ad->alias_target = tok_alloc_const(astr);
3902 skip(')');
3903 break;
3904 case TOK_VISIBILITY1:
3905 case TOK_VISIBILITY2:
3906 skip('(');
3907 astr = parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data;
3908 if (!strcmp (astr, "default"))
3909 ad->a.visibility = STV_DEFAULT;
3910 else if (!strcmp (astr, "hidden"))
3911 ad->a.visibility = STV_HIDDEN;
3912 else if (!strcmp (astr, "internal"))
3913 ad->a.visibility = STV_INTERNAL;
3914 else if (!strcmp (astr, "protected"))
3915 ad->a.visibility = STV_PROTECTED;
3916 else
3917 expect("visibility(\"default|hidden|internal|protected\")");
3918 skip(')');
3919 break;
3920 case TOK_ALIGNED1:
3921 case TOK_ALIGNED2:
3922 if (tok == '(') {
3923 next();
3924 n = expr_const();
3925 if (n <= 0 || (n & (n - 1)) != 0)
3926 tcc_error("alignment must be a positive power of two");
3927 skip(')');
3928 } else {
3929 n = MAX_ALIGN;
3931 ad->a.aligned = exact_log2p1(n);
3932 if (n != 1 << (ad->a.aligned - 1))
3933 tcc_error("alignment of %d is larger than implemented", n);
3934 break;
3935 case TOK_PACKED1:
3936 case TOK_PACKED2:
3937 ad->a.packed = 1;
3938 break;
3939 case TOK_WEAK1:
3940 case TOK_WEAK2:
3941 ad->a.weak = 1;
3942 break;
3943 case TOK_NODEBUG1:
3944 case TOK_NODEBUG2:
3945 ad->a.nodebug = 1;
3946 break;
3947 case TOK_UNUSED1:
3948 case TOK_UNUSED2:
3949 /* currently, no need to handle it because tcc does not
3950 track unused objects */
3951 break;
3952 case TOK_NORETURN1:
3953 case TOK_NORETURN2:
3954 ad->f.func_noreturn = 1;
3955 break;
3956 case TOK_CDECL1:
3957 case TOK_CDECL2:
3958 case TOK_CDECL3:
3959 ad->f.func_call = FUNC_CDECL;
3960 break;
3961 case TOK_STDCALL1:
3962 case TOK_STDCALL2:
3963 case TOK_STDCALL3:
3964 ad->f.func_call = FUNC_STDCALL;
3965 break;
3966 #ifdef TCC_TARGET_I386
3967 case TOK_REGPARM1:
3968 case TOK_REGPARM2:
3969 skip('(');
3970 n = expr_const();
3971 if (n > 3)
3972 n = 3;
3973 else if (n < 0)
3974 n = 0;
3975 if (n > 0)
3976 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3977 skip(')');
3978 break;
3979 case TOK_FASTCALL1:
3980 case TOK_FASTCALL2:
3981 case TOK_FASTCALL3:
3982 ad->f.func_call = FUNC_FASTCALLW;
3983 break;
3984 case TOK_THISCALL1:
3985 case TOK_THISCALL2:
3986 case TOK_THISCALL3:
3987 ad->f.func_call = FUNC_THISCALL;
3988 break;
3989 #endif
3990 case TOK_MODE:
3991 skip('(');
3992 switch(tok) {
3993 case TOK_MODE_DI:
3994 ad->attr_mode = VT_LLONG + 1;
3995 break;
3996 case TOK_MODE_QI:
3997 ad->attr_mode = VT_BYTE + 1;
3998 break;
3999 case TOK_MODE_HI:
4000 ad->attr_mode = VT_SHORT + 1;
4001 break;
4002 case TOK_MODE_SI:
4003 case TOK_MODE_word:
4004 ad->attr_mode = VT_INT + 1;
4005 break;
4006 default:
4007 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4008 break;
4010 next();
4011 skip(')');
4012 break;
4013 case TOK_DLLEXPORT:
4014 ad->a.dllexport = 1;
4015 break;
4016 case TOK_NODECORATE:
4017 ad->a.nodecorate = 1;
4018 break;
4019 case TOK_DLLIMPORT:
4020 ad->a.dllimport = 1;
4021 break;
4022 default:
4023 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
4024 /* skip parameters */
4025 if (tok == '(') {
4026 int parenthesis = 0;
4027 do {
4028 if (tok == '(')
4029 parenthesis++;
4030 else if (tok == ')')
4031 parenthesis--;
4032 next();
4033 } while (parenthesis && tok != -1);
4035 break;
4037 if (tok != ',')
4038 break;
4039 next();
4041 skip(')');
4042 skip(')');
4043 goto redo;
4046 static Sym * find_field (CType *type, int v, int *cumofs)
4048 Sym *s = type->ref;
4049 int v1 = v | SYM_FIELD;
4050 if (!(v & SYM_FIELD)) { /* top-level call */
4051 if ((type->t & VT_BTYPE) != VT_STRUCT)
4052 expect("struct or union");
4053 if (v < TOK_UIDENT)
4054 expect("field name");
4055 if (s->c < 0)
4056 tcc_error("dereferencing incomplete type '%s'",
4057 get_tok_str(s->v & ~SYM_STRUCT, 0));
4059 while ((s = s->next) != NULL) {
4060 if (s->v == v1) {
4061 *cumofs = s->c;
4062 return s;
4064 if ((s->type.t & VT_BTYPE) == VT_STRUCT
4065 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
4066 /* try to find field in anonymous sub-struct/union */
4067 Sym *ret = find_field (&s->type, v1, cumofs);
4068 if (ret) {
4069 *cumofs += s->c;
4070 return ret;
4074 if (!(v & SYM_FIELD))
4075 tcc_error("field not found: %s", get_tok_str(v, NULL));
4076 return s;
4079 static void check_fields (CType *type, int check)
4081 Sym *s = type->ref;
4083 while ((s = s->next) != NULL) {
4084 int v = s->v & ~SYM_FIELD;
4085 if (v < SYM_FIRST_ANOM) {
4086 TokenSym *ts = table_ident[v - TOK_IDENT];
4087 if (check && (ts->tok & SYM_FIELD))
4088 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4089 ts->tok ^= SYM_FIELD;
4090 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4091 check_fields (&s->type, check);
4095 static void struct_layout(CType *type, AttributeDef *ad)
4097 int size, align, maxalign, offset, c, bit_pos, bit_size;
4098 int packed, a, bt, prevbt, prev_bit_size;
4099 int pcc = !tcc_state->ms_bitfields;
4100 int pragma_pack = *tcc_state->pack_stack_ptr;
4101 Sym *f;
4103 maxalign = 1;
4104 offset = 0;
4105 c = 0;
4106 bit_pos = 0;
4107 prevbt = VT_STRUCT; /* make it never match */
4108 prev_bit_size = 0;
4110 //#define BF_DEBUG
4112 for (f = type->ref->next; f; f = f->next) {
4113 if (f->type.t & VT_BITFIELD)
4114 bit_size = BIT_SIZE(f->type.t);
4115 else
4116 bit_size = -1;
4117 size = type_size(&f->type, &align);
4118 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4119 packed = 0;
4121 if (pcc && bit_size == 0) {
4122 /* in pcc mode, packing does not affect zero-width bitfields */
4124 } else {
4125 /* in pcc mode, attribute packed overrides if set. */
4126 if (pcc && (f->a.packed || ad->a.packed))
4127 align = packed = 1;
4129 /* pragma pack overrides align if lesser and packs bitfields always */
4130 if (pragma_pack) {
4131 packed = 1;
4132 if (pragma_pack < align)
4133 align = pragma_pack;
4134 /* in pcc mode pragma pack also overrides individual align */
4135 if (pcc && pragma_pack < a)
4136 a = 0;
4139 /* some individual align was specified */
4140 if (a)
4141 align = a;
4143 if (type->ref->type.t == VT_UNION) {
4144 if (pcc && bit_size >= 0)
4145 size = (bit_size + 7) >> 3;
4146 offset = 0;
4147 if (size > c)
4148 c = size;
4150 } else if (bit_size < 0) {
4151 if (pcc)
4152 c += (bit_pos + 7) >> 3;
4153 c = (c + align - 1) & -align;
4154 offset = c;
4155 if (size > 0)
4156 c += size;
4157 bit_pos = 0;
4158 prevbt = VT_STRUCT;
4159 prev_bit_size = 0;
4161 } else {
4162 /* A bit-field. Layout is more complicated. There are two
4163 options: PCC (GCC) compatible and MS compatible */
4164 if (pcc) {
4165 /* In PCC layout a bit-field is placed adjacent to the
4166 preceding bit-fields, except if:
4167 - it has zero-width
4168 - an individual alignment was given
4169 - it would overflow its base type container and
4170 there is no packing */
4171 if (bit_size == 0) {
4172 new_field:
4173 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4174 bit_pos = 0;
4175 } else if (f->a.aligned) {
4176 goto new_field;
4177 } else if (!packed) {
4178 int a8 = align * 8;
4179 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4180 if (ofs > size / align)
4181 goto new_field;
4184 /* in pcc mode, long long bitfields have type int if they fit */
4185 if (size == 8 && bit_size <= 32)
4186 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4188 while (bit_pos >= align * 8)
4189 c += align, bit_pos -= align * 8;
4190 offset = c;
4192 /* In PCC layout named bit-fields influence the alignment
4193 of the containing struct using the base types alignment,
4194 except for packed fields (which here have correct align). */
4195 if (f->v & SYM_FIRST_ANOM
4196 // && bit_size // ??? gcc on ARM/rpi does that
4198 align = 1;
4200 } else {
4201 bt = f->type.t & VT_BTYPE;
4202 if ((bit_pos + bit_size > size * 8)
4203 || (bit_size > 0) == (bt != prevbt)
4205 c = (c + align - 1) & -align;
4206 offset = c;
4207 bit_pos = 0;
4208 /* In MS bitfield mode a bit-field run always uses
4209 at least as many bits as the underlying type.
4210 To start a new run it's also required that this
4211 or the last bit-field had non-zero width. */
4212 if (bit_size || prev_bit_size)
4213 c += size;
4215 /* In MS layout the records alignment is normally
4216 influenced by the field, except for a zero-width
4217 field at the start of a run (but by further zero-width
4218 fields it is again). */
4219 if (bit_size == 0 && prevbt != bt)
4220 align = 1;
4221 prevbt = bt;
4222 prev_bit_size = bit_size;
4225 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4226 | (bit_pos << VT_STRUCT_SHIFT);
4227 bit_pos += bit_size;
4229 if (align > maxalign)
4230 maxalign = align;
4232 #ifdef BF_DEBUG
4233 printf("set field %s offset %-2d size %-2d align %-2d",
4234 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4235 if (f->type.t & VT_BITFIELD) {
4236 printf(" pos %-2d bits %-2d",
4237 BIT_POS(f->type.t),
4238 BIT_SIZE(f->type.t)
4241 printf("\n");
4242 #endif
4244 f->c = offset;
4245 f->r = 0;
4248 if (pcc)
4249 c += (bit_pos + 7) >> 3;
4251 /* store size and alignment */
4252 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4253 if (a < maxalign)
4254 a = maxalign;
4255 type->ref->r = a;
4256 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4257 /* can happen if individual align for some member was given. In
4258 this case MSVC ignores maxalign when aligning the size */
4259 a = pragma_pack;
4260 if (a < bt)
4261 a = bt;
4263 c = (c + a - 1) & -a;
4264 type->ref->c = c;
4266 #ifdef BF_DEBUG
4267 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4268 #endif
4270 /* check whether we can access bitfields by their type */
4271 for (f = type->ref->next; f; f = f->next) {
4272 int s, px, cx, c0;
4273 CType t;
4275 if (0 == (f->type.t & VT_BITFIELD))
4276 continue;
4277 f->type.ref = f;
4278 f->auxtype = -1;
4279 bit_size = BIT_SIZE(f->type.t);
4280 if (bit_size == 0)
4281 continue;
4282 bit_pos = BIT_POS(f->type.t);
4283 size = type_size(&f->type, &align);
4285 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4286 #ifdef TCC_TARGET_ARM
4287 && !(f->c & (align - 1))
4288 #endif
4290 continue;
4292 /* try to access the field using a different type */
4293 c0 = -1, s = align = 1;
4294 t.t = VT_BYTE;
4295 for (;;) {
4296 px = f->c * 8 + bit_pos;
4297 cx = (px >> 3) & -align;
4298 px = px - (cx << 3);
4299 if (c0 == cx)
4300 break;
4301 s = (px + bit_size + 7) >> 3;
4302 if (s > 4) {
4303 t.t = VT_LLONG;
4304 } else if (s > 2) {
4305 t.t = VT_INT;
4306 } else if (s > 1) {
4307 t.t = VT_SHORT;
4308 } else {
4309 t.t = VT_BYTE;
4311 s = type_size(&t, &align);
4312 c0 = cx;
4315 if (px + bit_size <= s * 8 && cx + s <= c
4316 #ifdef TCC_TARGET_ARM
4317 && !(cx & (align - 1))
4318 #endif
4320 /* update offset and bit position */
4321 f->c = cx;
4322 bit_pos = px;
4323 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4324 | (bit_pos << VT_STRUCT_SHIFT);
4325 if (s != size)
4326 f->auxtype = t.t;
4327 #ifdef BF_DEBUG
4328 printf("FIX field %s offset %-2d size %-2d align %-2d "
4329 "pos %-2d bits %-2d\n",
4330 get_tok_str(f->v & ~SYM_FIELD, NULL),
4331 cx, s, align, px, bit_size);
4332 #endif
4333 } else {
4334 /* fall back to load/store single-byte wise */
4335 f->auxtype = VT_STRUCT;
4336 #ifdef BF_DEBUG
4337 printf("FIX field %s : load byte-wise\n",
4338 get_tok_str(f->v & ~SYM_FIELD, NULL));
4339 #endif
4344 static void do_Static_assert(void);
4346 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4347 static void struct_decl(CType *type, int u)
4349 int v, c, size, align, flexible;
4350 int bit_size, bsize, bt;
4351 Sym *s, *ss, **ps;
4352 AttributeDef ad, ad1;
4353 CType type1, btype;
4355 memset(&ad, 0, sizeof ad);
4356 next();
4357 parse_attribute(&ad);
4358 if (tok != '{') {
4359 v = tok;
4360 next();
4361 /* struct already defined ? return it */
4362 if (v < TOK_IDENT)
4363 expect("struct/union/enum name");
4364 s = struct_find(v);
4365 if (s && (s->sym_scope == local_scope || tok != '{')) {
4366 if (u == s->type.t)
4367 goto do_decl;
4368 if (u == VT_ENUM && IS_ENUM(s->type.t))
4369 goto do_decl;
4370 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4372 } else {
4373 v = anon_sym++;
4375 /* Record the original enum/struct/union token. */
4376 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4377 type1.ref = NULL;
4378 /* we put an undefined size for struct/union */
4379 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4380 s->r = 0; /* default alignment is zero as gcc */
4381 do_decl:
4382 type->t = s->type.t;
4383 type->ref = s;
4385 if (tok == '{') {
4386 next();
4387 if (s->c != -1)
4388 tcc_error("struct/union/enum already defined");
4389 s->c = -2;
4390 /* cannot be empty */
4391 /* non empty enums are not allowed */
4392 ps = &s->next;
4393 if (u == VT_ENUM) {
4394 long long ll = 0, pl = 0, nl = 0;
4395 CType t;
4396 t.ref = s;
4397 /* enum symbols have static storage */
4398 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4399 for(;;) {
4400 v = tok;
4401 if (v < TOK_UIDENT)
4402 expect("identifier");
4403 ss = sym_find(v);
4404 if (ss && !local_stack)
4405 tcc_error("redefinition of enumerator '%s'",
4406 get_tok_str(v, NULL));
4407 next();
4408 if (tok == '=') {
4409 next();
4410 ll = expr_const64();
4412 ss = sym_push(v, &t, VT_CONST, 0);
4413 ss->enum_val = ll;
4414 *ps = ss, ps = &ss->next;
4415 if (ll < nl)
4416 nl = ll;
4417 if (ll > pl)
4418 pl = ll;
4419 if (tok != ',')
4420 break;
4421 next();
4422 ll++;
4423 /* NOTE: we accept a trailing comma */
4424 if (tok == '}')
4425 break;
4427 skip('}');
4428 /* set integral type of the enum */
4429 t.t = VT_INT;
4430 if (nl >= 0) {
4431 if (pl != (unsigned)pl)
4432 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4433 t.t |= VT_UNSIGNED;
4434 } else if (pl != (int)pl || nl != (int)nl)
4435 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4436 s->type.t = type->t = t.t | VT_ENUM;
4437 s->c = 0;
4438 /* set type for enum members */
4439 for (ss = s->next; ss; ss = ss->next) {
4440 ll = ss->enum_val;
4441 if (ll == (int)ll) /* default is int if it fits */
4442 continue;
4443 if (t.t & VT_UNSIGNED) {
4444 ss->type.t |= VT_UNSIGNED;
4445 if (ll == (unsigned)ll)
4446 continue;
4448 ss->type.t = (ss->type.t & ~VT_BTYPE)
4449 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4451 } else {
4452 c = 0;
4453 flexible = 0;
4454 while (tok != '}') {
4455 if (!parse_btype(&btype, &ad1, 0)) {
4456 if (tok == TOK_STATIC_ASSERT) {
4457 do_Static_assert();
4458 continue;
4460 skip(';');
4461 continue;
4463 while (1) {
4464 if (flexible)
4465 tcc_error("flexible array member '%s' not at the end of struct",
4466 get_tok_str(v, NULL));
4467 bit_size = -1;
4468 v = 0;
4469 type1 = btype;
4470 if (tok != ':') {
4471 if (tok != ';')
4472 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4473 if (v == 0) {
4474 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4475 expect("identifier");
4476 else {
4477 int v = btype.ref->v;
4478 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4479 if (tcc_state->ms_extensions == 0)
4480 expect("identifier");
4484 if (type_size(&type1, &align) < 0) {
4485 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4486 flexible = 1;
4487 else
4488 tcc_error("field '%s' has incomplete type",
4489 get_tok_str(v, NULL));
4491 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4492 (type1.t & VT_BTYPE) == VT_VOID ||
4493 (type1.t & VT_STORAGE))
4494 tcc_error("invalid type for '%s'",
4495 get_tok_str(v, NULL));
4497 if (tok == ':') {
4498 next();
4499 bit_size = expr_const();
4500 /* XXX: handle v = 0 case for messages */
4501 if (bit_size < 0)
4502 tcc_error("negative width in bit-field '%s'",
4503 get_tok_str(v, NULL));
4504 if (v && bit_size == 0)
4505 tcc_error("zero width for bit-field '%s'",
4506 get_tok_str(v, NULL));
4507 parse_attribute(&ad1);
4509 size = type_size(&type1, &align);
4510 if (bit_size >= 0) {
4511 bt = type1.t & VT_BTYPE;
4512 if (bt != VT_INT &&
4513 bt != VT_BYTE &&
4514 bt != VT_SHORT &&
4515 bt != VT_BOOL &&
4516 bt != VT_LLONG)
4517 tcc_error("bitfields must have scalar type");
4518 bsize = size * 8;
4519 if (bit_size > bsize) {
4520 tcc_error("width of '%s' exceeds its type",
4521 get_tok_str(v, NULL));
4522 } else if (bit_size == bsize
4523 && !ad.a.packed && !ad1.a.packed) {
4524 /* no need for bit fields */
4526 } else if (bit_size == 64) {
4527 tcc_error("field width 64 not implemented");
4528 } else {
4529 type1.t = (type1.t & ~VT_STRUCT_MASK)
4530 | VT_BITFIELD
4531 | (bit_size << (VT_STRUCT_SHIFT + 6));
4534 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4535 /* Remember we've seen a real field to check
4536 for placement of flexible array member. */
4537 c = 1;
4539 /* If member is a struct or bit-field, enforce
4540 placing into the struct (as anonymous). */
4541 if (v == 0 &&
4542 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4543 bit_size >= 0)) {
4544 v = anon_sym++;
4546 if (v) {
4547 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4548 ss->a = ad1.a;
4549 *ps = ss;
4550 ps = &ss->next;
4552 if (tok == ';' || tok == TOK_EOF)
4553 break;
4554 skip(',');
4556 skip(';');
4558 skip('}');
4559 parse_attribute(&ad);
4560 if (ad.cleanup_func) {
4561 tcc_warning("attribute '__cleanup__' ignored on type");
4563 check_fields(type, 1);
4564 check_fields(type, 0);
4565 struct_layout(type, &ad);
4566 if (debug_modes)
4567 tcc_debug_fix_anon(tcc_state, type);
4572 static void sym_to_attr(AttributeDef *ad, Sym *s)
4574 merge_symattr(&ad->a, &s->a);
4575 merge_funcattr(&ad->f, &s->f);
4578 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4579 are added to the element type, copied because it could be a typedef. */
4580 static void parse_btype_qualify(CType *type, int qualifiers)
4582 while (type->t & VT_ARRAY) {
4583 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4584 type = &type->ref->type;
4586 type->t |= qualifiers;
4589 /* return 0 if no type declaration. otherwise, return the basic type
4590 and skip it.
4592 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4594 int t, u, bt, st, type_found, typespec_found, g, n;
4595 Sym *s;
4596 CType type1;
4598 memset(ad, 0, sizeof(AttributeDef));
4599 type_found = 0;
4600 typespec_found = 0;
4601 t = VT_INT;
4602 bt = st = -1;
4603 type->ref = NULL;
4605 while(1) {
4606 switch(tok) {
4607 case TOK_EXTENSION:
4608 /* currently, we really ignore extension */
4609 next();
4610 continue;
4612 /* basic types */
4613 case TOK_CHAR:
4614 u = VT_BYTE;
4615 basic_type:
4616 next();
4617 basic_type1:
4618 if (u == VT_SHORT || u == VT_LONG) {
4619 if (st != -1 || (bt != -1 && bt != VT_INT))
4620 tmbt: tcc_error("too many basic types");
4621 st = u;
4622 } else {
4623 if (bt != -1 || (st != -1 && u != VT_INT))
4624 goto tmbt;
4625 bt = u;
4627 if (u != VT_INT)
4628 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4629 typespec_found = 1;
4630 break;
4631 case TOK_VOID:
4632 u = VT_VOID;
4633 goto basic_type;
4634 case TOK_SHORT:
4635 u = VT_SHORT;
4636 goto basic_type;
4637 case TOK_INT:
4638 u = VT_INT;
4639 goto basic_type;
4640 case TOK_ALIGNAS:
4641 { int n;
4642 AttributeDef ad1;
4643 next();
4644 skip('(');
4645 memset(&ad1, 0, sizeof(AttributeDef));
4646 if (parse_btype(&type1, &ad1, 0)) {
4647 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4648 if (ad1.a.aligned)
4649 n = 1 << (ad1.a.aligned - 1);
4650 else
4651 type_size(&type1, &n);
4652 } else {
4653 n = expr_const();
4654 if (n < 0 || (n & (n - 1)) != 0)
4655 tcc_error("alignment must be a positive power of two");
4657 skip(')');
4658 ad->a.aligned = exact_log2p1(n);
4660 continue;
4661 case TOK_LONG:
4662 if ((t & VT_BTYPE) == VT_DOUBLE) {
4663 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4664 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4665 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4666 } else {
4667 u = VT_LONG;
4668 goto basic_type;
4670 next();
4671 break;
4672 #ifdef TCC_TARGET_ARM64
4673 case TOK_UINT128:
4674 /* GCC's __uint128_t appears in some Linux header files. Make it a
4675 synonym for long double to get the size and alignment right. */
4676 u = VT_LDOUBLE;
4677 goto basic_type;
4678 #endif
4679 case TOK_BOOL:
4680 u = VT_BOOL;
4681 goto basic_type;
4682 case TOK_COMPLEX:
4683 tcc_error("_Complex is not yet supported");
4684 case TOK_FLOAT:
4685 u = VT_FLOAT;
4686 goto basic_type;
4687 case TOK_DOUBLE:
4688 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4689 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4690 } else {
4691 u = VT_DOUBLE;
4692 goto basic_type;
4694 next();
4695 break;
4696 case TOK_ENUM:
4697 struct_decl(&type1, VT_ENUM);
4698 basic_type2:
4699 u = type1.t;
4700 type->ref = type1.ref;
4701 goto basic_type1;
4702 case TOK_STRUCT:
4703 struct_decl(&type1, VT_STRUCT);
4704 goto basic_type2;
4705 case TOK_UNION:
4706 struct_decl(&type1, VT_UNION);
4707 goto basic_type2;
4709 /* type modifiers */
4710 case TOK__Atomic:
4711 next();
4712 type->t = t;
4713 parse_btype_qualify(type, VT_ATOMIC);
4714 t = type->t;
4715 if (tok == '(') {
4716 parse_expr_type(&type1);
4717 /* remove all storage modifiers except typedef */
4718 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4719 if (type1.ref)
4720 sym_to_attr(ad, type1.ref);
4721 goto basic_type2;
4723 break;
4724 case TOK_CONST1:
4725 case TOK_CONST2:
4726 case TOK_CONST3:
4727 type->t = t;
4728 parse_btype_qualify(type, VT_CONSTANT);
4729 t = type->t;
4730 next();
4731 break;
4732 case TOK_VOLATILE1:
4733 case TOK_VOLATILE2:
4734 case TOK_VOLATILE3:
4735 type->t = t;
4736 parse_btype_qualify(type, VT_VOLATILE);
4737 t = type->t;
4738 next();
4739 break;
4740 case TOK_SIGNED1:
4741 case TOK_SIGNED2:
4742 case TOK_SIGNED3:
4743 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4744 tcc_error("signed and unsigned modifier");
4745 t |= VT_DEFSIGN;
4746 next();
4747 typespec_found = 1;
4748 break;
4749 case TOK_REGISTER:
4750 case TOK_AUTO:
4751 case TOK_RESTRICT1:
4752 case TOK_RESTRICT2:
4753 case TOK_RESTRICT3:
4754 next();
4755 break;
4756 case TOK_UNSIGNED:
4757 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4758 tcc_error("signed and unsigned modifier");
4759 t |= VT_DEFSIGN | VT_UNSIGNED;
4760 next();
4761 typespec_found = 1;
4762 break;
4764 /* storage */
4765 case TOK_EXTERN:
4766 g = VT_EXTERN;
4767 goto storage;
4768 case TOK_STATIC:
4769 g = VT_STATIC;
4770 goto storage;
4771 case TOK_TYPEDEF:
4772 g = VT_TYPEDEF;
4773 goto storage;
4774 storage:
4775 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4776 tcc_error("multiple storage classes");
4777 t |= g;
4778 next();
4779 break;
4780 case TOK_INLINE1:
4781 case TOK_INLINE2:
4782 case TOK_INLINE3:
4783 t |= VT_INLINE;
4784 next();
4785 break;
4786 case TOK_NORETURN3:
4787 next();
4788 ad->f.func_noreturn = 1;
4789 break;
4790 /* GNUC attribute */
4791 case TOK_ATTRIBUTE1:
4792 case TOK_ATTRIBUTE2:
4793 parse_attribute(ad);
4794 if (ad->attr_mode) {
4795 u = ad->attr_mode -1;
4796 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4798 continue;
4799 /* GNUC typeof */
4800 case TOK_TYPEOF1:
4801 case TOK_TYPEOF2:
4802 case TOK_TYPEOF3:
4803 next();
4804 parse_expr_type(&type1);
4805 /* remove all storage modifiers except typedef */
4806 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4807 if (type1.ref)
4808 sym_to_attr(ad, type1.ref);
4809 goto basic_type2;
4810 case TOK_THREAD_LOCAL:
4811 tcc_error("_Thread_local is not implemented");
4812 default:
4813 if (typespec_found)
4814 goto the_end;
4815 s = sym_find(tok);
4816 if (!s || !(s->type.t & VT_TYPEDEF))
4817 goto the_end;
4819 n = tok, next();
4820 if (tok == ':' && ignore_label) {
4821 /* ignore if it's a label */
4822 unget_tok(n);
4823 goto the_end;
4826 t &= ~(VT_BTYPE|VT_LONG);
4827 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4828 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4829 type->ref = s->type.ref;
4830 if (t)
4831 parse_btype_qualify(type, t);
4832 t = type->t;
4833 /* get attributes from typedef */
4834 sym_to_attr(ad, s);
4835 typespec_found = 1;
4836 st = bt = -2;
4837 break;
4839 type_found = 1;
4841 the_end:
4842 if (tcc_state->char_is_unsigned) {
4843 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4844 t |= VT_UNSIGNED;
4846 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4847 bt = t & (VT_BTYPE|VT_LONG);
4848 if (bt == VT_LONG)
4849 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4850 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4851 if (bt == VT_LDOUBLE)
4852 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4853 #endif
4854 type->t = t;
4855 return type_found;
4858 /* convert a function parameter type (array to pointer and function to
4859 function pointer) */
4860 static inline void convert_parameter_type(CType *pt)
4862 /* remove const and volatile qualifiers (XXX: const could be used
4863 to indicate a const function parameter */
4864 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4865 /* array must be transformed to pointer according to ANSI C */
4866 pt->t &= ~(VT_ARRAY | VT_VLA);
4867 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4868 mk_pointer(pt);
4872 ST_FUNC CString* parse_asm_str(void)
4874 skip('(');
4875 return parse_mult_str("string constant");
4878 /* Parse an asm label and return the token */
4879 static int asm_label_instr(void)
4881 int v;
4882 char *astr;
4884 next();
4885 astr = parse_asm_str()->data;
4886 skip(')');
4887 #ifdef ASM_DEBUG
4888 printf("asm_alias: \"%s\"\n", astr);
4889 #endif
4890 v = tok_alloc_const(astr);
4891 return v;
4894 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4896 int n, l, t1, arg_size, align;
4897 Sym **plast, *s, *first;
4898 AttributeDef ad1;
4899 CType pt;
4900 TokenString *vla_array_tok = NULL;
4901 int *vla_array_str = NULL;
4903 if (tok == '(') {
4904 /* function type, or recursive declarator (return if so) */
4905 next();
4906 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4907 return 0;
4908 if (tok == ')')
4909 l = 0;
4910 else if (parse_btype(&pt, &ad1, 0))
4911 l = FUNC_NEW;
4912 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4913 merge_attr (ad, &ad1);
4914 return 0;
4915 } else
4916 l = FUNC_OLD;
4918 first = NULL;
4919 plast = &first;
4920 arg_size = 0;
4921 ++local_scope;
4922 if (l) {
4923 for(;;) {
4924 /* read param name and compute offset */
4925 if (l != FUNC_OLD) {
4926 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4927 break;
4928 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4929 if ((pt.t & VT_BTYPE) == VT_VOID)
4930 tcc_error("parameter declared as void");
4931 if (n == 0)
4932 n = SYM_FIELD;
4933 } else {
4934 n = tok;
4935 pt.t = VT_VOID; /* invalid type */
4936 pt.ref = NULL;
4937 next();
4939 if (n < TOK_UIDENT)
4940 expect("identifier");
4941 convert_parameter_type(&pt);
4942 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4943 /* these symbols may be evaluated for VLArrays (see below, under
4944 nocode_wanted) which is why we push them here as normal symbols
4945 temporarily. Example: int func(int a, int b[++a]); */
4946 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4947 *plast = s;
4948 plast = &s->next;
4949 if (tok == ')')
4950 break;
4951 skip(',');
4952 if (l == FUNC_NEW && tok == TOK_DOTS) {
4953 l = FUNC_ELLIPSIS;
4954 next();
4955 break;
4957 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4958 tcc_error("invalid type");
4960 } else
4961 /* if no parameters, then old type prototype */
4962 l = FUNC_OLD;
4963 skip(')');
4964 /* remove parameter symbols from token table, keep on stack */
4965 if (first) {
4966 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4967 for (s = first; s; s = s->next)
4968 s->v |= SYM_FIELD;
4970 --local_scope;
4971 /* NOTE: const is ignored in returned type as it has a special
4972 meaning in gcc / C++ */
4973 type->t &= ~VT_CONSTANT;
4974 /* some ancient pre-K&R C allows a function to return an array
4975 and the array brackets to be put after the arguments, such
4976 that "int c()[]" means something like "int[] c()" */
4977 if (tok == '[') {
4978 next();
4979 skip(']'); /* only handle simple "[]" */
4980 mk_pointer(type);
4982 /* we push a anonymous symbol which will contain the function prototype */
4983 ad->f.func_args = arg_size;
4984 ad->f.func_type = l;
4985 s = sym_push(SYM_FIELD, type, 0, 0);
4986 s->a = ad->a;
4987 s->f = ad->f;
4988 s->next = first;
4989 type->t = VT_FUNC;
4990 type->ref = s;
4991 } else if (tok == '[') {
4992 int saved_nocode_wanted = nocode_wanted;
4993 /* array definition */
4994 next();
4995 n = -1;
4996 t1 = 0;
4997 if (td & TYPE_PARAM) while (1) {
4998 /* XXX The optional type-quals and static should only be accepted
4999 in parameter decls. The '*' as well, and then even only
5000 in prototypes (not function defs). */
5001 switch (tok) {
5002 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5003 case TOK_CONST1:
5004 case TOK_VOLATILE1:
5005 case TOK_STATIC:
5006 case '*':
5007 next();
5008 continue;
5009 default:
5010 break;
5012 if (tok != ']') {
5013 /* Code generation is not done now but has to be done
5014 at start of function. Save code here for later use. */
5015 nocode_wanted = 1;
5016 skip_or_save_block(&vla_array_tok);
5017 unget_tok(0);
5018 vla_array_str = vla_array_tok->str;
5019 begin_macro(vla_array_tok, 2);
5020 next();
5021 gexpr();
5022 end_macro();
5023 next();
5024 goto check;
5026 break;
5028 } else if (tok != ']') {
5029 if (!local_stack || (storage & VT_STATIC))
5030 vpushi(expr_const());
5031 else {
5032 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5033 length must always be evaluated, even under nocode_wanted,
5034 so that its size slot is initialized (e.g. under sizeof
5035 or typeof). */
5036 nocode_wanted = 0;
5037 gexpr();
5039 check:
5040 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5041 n = vtop->c.i;
5042 if (n < 0)
5043 tcc_error("invalid array size");
5044 } else {
5045 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5046 tcc_error("size of variable length array should be an integer");
5047 n = 0;
5048 t1 = VT_VLA;
5051 skip(']');
5052 /* parse next post type */
5053 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
5055 if ((type->t & VT_BTYPE) == VT_FUNC)
5056 tcc_error("declaration of an array of functions");
5057 if ((type->t & VT_BTYPE) == VT_VOID
5058 || type_size(type, &align) < 0)
5059 tcc_error("declaration of an array of incomplete type elements");
5061 t1 |= type->t & VT_VLA;
5063 if (t1 & VT_VLA) {
5064 if (n < 0) {
5065 if (td & TYPE_NEST)
5066 tcc_error("need explicit inner array size in VLAs");
5068 else {
5069 loc -= type_size(&int_type, &align);
5070 loc &= -align;
5071 n = loc;
5073 vpush_type_size(type, &align);
5074 gen_op('*');
5075 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5076 vswap();
5077 vstore();
5080 if (n != -1)
5081 vpop();
5082 nocode_wanted = saved_nocode_wanted;
5084 /* we push an anonymous symbol which will contain the array
5085 element type */
5086 s = sym_push(SYM_FIELD, type, 0, n);
5087 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5088 type->ref = s;
5090 if (vla_array_str) {
5091 /* for function args, the top dimension is converted to pointer */
5092 if ((t1 & VT_VLA) && (td & TYPE_NEST))
5093 s->vla_array_str = vla_array_str;
5094 else
5095 tok_str_free_str(vla_array_str);
5098 return 1;
5101 /* Parse a type declarator (except basic type), and return the type
5102 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5103 expected. 'type' should contain the basic type. 'ad' is the
5104 attribute definition of the basic type. It can be modified by
5105 type_decl(). If this (possibly abstract) declarator is a pointer chain
5106 it returns the innermost pointed to type (equals *type, but is a different
5107 pointer), otherwise returns type itself, that's used for recursive calls. */
5108 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5110 CType *post, *ret;
5111 int qualifiers, storage;
5113 /* recursive type, remove storage bits first, apply them later again */
5114 storage = type->t & VT_STORAGE;
5115 type->t &= ~VT_STORAGE;
5116 post = ret = type;
5118 while (tok == '*') {
5119 qualifiers = 0;
5120 redo:
5121 next();
5122 switch(tok) {
5123 case TOK__Atomic:
5124 qualifiers |= VT_ATOMIC;
5125 goto redo;
5126 case TOK_CONST1:
5127 case TOK_CONST2:
5128 case TOK_CONST3:
5129 qualifiers |= VT_CONSTANT;
5130 goto redo;
5131 case TOK_VOLATILE1:
5132 case TOK_VOLATILE2:
5133 case TOK_VOLATILE3:
5134 qualifiers |= VT_VOLATILE;
5135 goto redo;
5136 case TOK_RESTRICT1:
5137 case TOK_RESTRICT2:
5138 case TOK_RESTRICT3:
5139 goto redo;
5140 /* XXX: clarify attribute handling */
5141 case TOK_ATTRIBUTE1:
5142 case TOK_ATTRIBUTE2:
5143 parse_attribute(ad);
5144 break;
5146 mk_pointer(type);
5147 type->t |= qualifiers;
5148 if (ret == type)
5149 /* innermost pointed to type is the one for the first derivation */
5150 ret = pointed_type(type);
5153 if (tok == '(') {
5154 /* This is possibly a parameter type list for abstract declarators
5155 ('int ()'), use post_type for testing this. */
5156 if (!post_type(type, ad, 0, td)) {
5157 /* It's not, so it's a nested declarator, and the post operations
5158 apply to the innermost pointed to type (if any). */
5159 /* XXX: this is not correct to modify 'ad' at this point, but
5160 the syntax is not clear */
5161 parse_attribute(ad);
5162 post = type_decl(type, ad, v, td);
5163 skip(')');
5164 } else
5165 goto abstract;
5166 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5167 /* type identifier */
5168 *v = tok;
5169 next();
5170 } else {
5171 abstract:
5172 if (!(td & TYPE_ABSTRACT))
5173 expect("identifier");
5174 *v = 0;
5176 post_type(post, ad, post != ret ? 0 : storage,
5177 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5178 parse_attribute(ad);
5179 type->t |= storage;
5180 return ret;
5183 /* indirection with full error checking and bound check */
5184 ST_FUNC void indir(void)
5186 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5187 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5188 return;
5189 expect("pointer");
5191 if (vtop->r & VT_LVAL)
5192 gv(RC_INT);
5193 vtop->type = *pointed_type(&vtop->type);
5194 /* Arrays and functions are never lvalues */
5195 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5196 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5197 vtop->r |= VT_LVAL;
5198 /* if bound checking, the referenced pointer must be checked */
5199 #ifdef CONFIG_TCC_BCHECK
5200 if (tcc_state->do_bounds_check)
5201 vtop->r |= VT_MUSTBOUND;
5202 #endif
5206 /* pass a parameter to a function and do type checking and casting */
5207 static void gfunc_param_typed(Sym *func, Sym *arg)
5209 int func_type;
5210 CType type;
5212 func_type = func->f.func_type;
5213 if (func_type == FUNC_OLD ||
5214 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5215 /* default casting : only need to convert float to double */
5216 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5217 gen_cast_s(VT_DOUBLE);
5218 } else if (vtop->type.t & VT_BITFIELD) {
5219 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5220 type.ref = vtop->type.ref;
5221 gen_cast(&type);
5222 } else if (vtop->r & VT_MUSTCAST) {
5223 force_charshort_cast();
5225 } else if (arg == NULL) {
5226 tcc_error("too many arguments to function");
5227 } else {
5228 type = arg->type;
5229 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5230 gen_assign_cast(&type);
5234 /* parse an expression and return its type without any side effect. */
5235 static void expr_type(CType *type, void (*expr_fn)(void))
5237 nocode_wanted++;
5238 expr_fn();
5239 *type = vtop->type;
5240 vpop();
5241 nocode_wanted--;
5244 /* parse an expression of the form '(type)' or '(expr)' and return its
5245 type */
5246 static void parse_expr_type(CType *type)
5248 int n;
5249 AttributeDef ad;
5251 skip('(');
5252 if (parse_btype(type, &ad, 0)) {
5253 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5254 } else {
5255 expr_type(type, gexpr);
5257 skip(')');
5260 static void parse_type(CType *type)
5262 AttributeDef ad;
5263 int n;
5265 if (!parse_btype(type, &ad, 0)) {
5266 expect("type");
5268 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5271 static void parse_builtin_params(int nc, const char *args)
5273 char c, sep = '(';
5274 CType type;
5275 if (nc)
5276 nocode_wanted++;
5277 next();
5278 if (*args == 0)
5279 skip(sep);
5280 while ((c = *args++)) {
5281 skip(sep);
5282 sep = ',';
5283 if (c == 't') {
5284 parse_type(&type);
5285 vpush(&type);
5286 continue;
5288 expr_eq();
5289 type.ref = NULL;
5290 type.t = 0;
5291 switch (c) {
5292 case 'e':
5293 continue;
5294 case 'V':
5295 type.t = VT_CONSTANT;
5296 case 'v':
5297 type.t |= VT_VOID;
5298 mk_pointer (&type);
5299 break;
5300 case 'S':
5301 type.t = VT_CONSTANT;
5302 case 's':
5303 type.t |= char_type.t;
5304 mk_pointer (&type);
5305 break;
5306 case 'i':
5307 type.t = VT_INT;
5308 break;
5309 case 'l':
5310 type.t = VT_SIZE_T;
5311 break;
5312 default:
5313 break;
5315 gen_assign_cast(&type);
5317 skip(')');
5318 if (nc)
5319 nocode_wanted--;
5322 static void parse_atomic(int atok)
5324 int size, align, arg, t, save = 0;
5325 CType *atom, *atom_ptr, ct = {0};
5326 SValue store;
5327 char buf[40];
5328 static const char *const templates[] = {
5330 * Each entry consists of callback and function template.
5331 * The template represents argument types and return type.
5333 * ? void (return-only)
5334 * b bool
5335 * a atomic
5336 * A read-only atomic
5337 * p pointer to memory
5338 * v value
5339 * l load pointer
5340 * s save pointer
5341 * m memory model
5344 /* keep in order of appearance in tcctok.h: */
5345 /* __atomic_store */ "alm.?",
5346 /* __atomic_load */ "Asm.v",
5347 /* __atomic_exchange */ "alsm.v",
5348 /* __atomic_compare_exchange */ "aplbmm.b",
5349 /* __atomic_fetch_add */ "avm.v",
5350 /* __atomic_fetch_sub */ "avm.v",
5351 /* __atomic_fetch_or */ "avm.v",
5352 /* __atomic_fetch_xor */ "avm.v",
5353 /* __atomic_fetch_and */ "avm.v",
5354 /* __atomic_fetch_nand */ "avm.v",
5355 /* __atomic_and_fetch */ "avm.v",
5356 /* __atomic_sub_fetch */ "avm.v",
5357 /* __atomic_or_fetch */ "avm.v",
5358 /* __atomic_xor_fetch */ "avm.v",
5359 /* __atomic_and_fetch */ "avm.v",
5360 /* __atomic_nand_fetch */ "avm.v"
5362 const char *template = templates[(atok - TOK___atomic_store)];
5364 atom = atom_ptr = NULL;
5365 size = 0; /* pacify compiler */
5366 next();
5367 skip('(');
5368 for (arg = 0;;) {
5369 expr_eq();
5370 switch (template[arg]) {
5371 case 'a':
5372 case 'A':
5373 atom_ptr = &vtop->type;
5374 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5375 expect("pointer");
5376 atom = pointed_type(atom_ptr);
5377 size = type_size(atom, &align);
5378 if (size > 8
5379 || (size & (size - 1))
5380 || (atok > TOK___atomic_compare_exchange
5381 && (0 == btype_size(atom->t & VT_BTYPE)
5382 || (atom->t & VT_BTYPE) == VT_PTR)))
5383 expect("integral or integer-sized pointer target type");
5384 /* GCC does not care either: */
5385 /* if (!(atom->t & VT_ATOMIC))
5386 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5387 break;
5389 case 'p':
5390 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5391 || type_size(pointed_type(&vtop->type), &align) != size)
5392 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5393 gen_assign_cast(atom_ptr);
5394 break;
5395 case 'v':
5396 gen_assign_cast(atom);
5397 break;
5398 case 'l':
5399 indir();
5400 gen_assign_cast(atom);
5401 break;
5402 case 's':
5403 save = 1;
5404 indir();
5405 store = *vtop;
5406 vpop();
5407 break;
5408 case 'm':
5409 gen_assign_cast(&int_type);
5410 break;
5411 case 'b':
5412 ct.t = VT_BOOL;
5413 gen_assign_cast(&ct);
5414 break;
5416 if ('.' == template[++arg])
5417 break;
5418 skip(',');
5420 skip(')');
5422 ct.t = VT_VOID;
5423 switch (template[arg + 1]) {
5424 case 'b':
5425 ct.t = VT_BOOL;
5426 break;
5427 case 'v':
5428 ct = *atom;
5429 break;
5432 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5433 vpush_helper_func(tok_alloc_const(buf));
5434 vrott(arg - save + 1);
5435 gfunc_call(arg - save);
5437 vpush(&ct);
5438 PUT_R_RET(vtop, ct.t);
5439 t = ct.t & VT_BTYPE;
5440 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5441 #ifdef PROMOTE_RET
5442 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5443 #else
5444 vtop->type.t = VT_INT;
5445 #endif
5447 gen_cast(&ct);
5448 if (save) {
5449 vpush(&ct);
5450 *vtop = store;
5451 vswap();
5452 vstore();
5456 ST_FUNC void unary(void)
5458 int n, t, align, size, r;
5459 CType type;
5460 Sym *s;
5461 AttributeDef ad;
5463 /* generate line number info */
5464 if (debug_modes)
5465 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5467 type.ref = NULL;
5468 /* XXX: GCC 2.95.3 does not generate a table although it should be
5469 better here */
5470 tok_next:
5471 switch(tok) {
5472 case TOK_EXTENSION:
5473 next();
5474 goto tok_next;
5475 case TOK_LCHAR:
5476 #ifdef TCC_TARGET_PE
5477 t = VT_SHORT|VT_UNSIGNED;
5478 goto push_tokc;
5479 #endif
5480 case TOK_CINT:
5481 case TOK_CCHAR:
5482 t = VT_INT;
5483 push_tokc:
5484 type.t = t;
5485 vsetc(&type, VT_CONST, &tokc);
5486 next();
5487 break;
5488 case TOK_CUINT:
5489 t = VT_INT | VT_UNSIGNED;
5490 goto push_tokc;
5491 case TOK_CLLONG:
5492 t = VT_LLONG;
5493 goto push_tokc;
5494 case TOK_CULLONG:
5495 t = VT_LLONG | VT_UNSIGNED;
5496 goto push_tokc;
5497 case TOK_CFLOAT:
5498 t = VT_FLOAT;
5499 goto push_tokc;
5500 case TOK_CDOUBLE:
5501 t = VT_DOUBLE;
5502 goto push_tokc;
5503 case TOK_CLDOUBLE:
5504 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5505 t = VT_DOUBLE | VT_LONG;
5506 #else
5507 t = VT_LDOUBLE;
5508 #endif
5509 goto push_tokc;
5510 case TOK_CLONG:
5511 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5512 goto push_tokc;
5513 case TOK_CULONG:
5514 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5515 goto push_tokc;
5516 case TOK___FUNCTION__:
5517 if (!gnu_ext)
5518 goto tok_identifier;
5519 /* fall thru */
5520 case TOK___FUNC__:
5521 tok = TOK_STR;
5522 cstr_reset(&tokcstr);
5523 cstr_cat(&tokcstr, funcname, 0);
5524 tokc.str.size = tokcstr.size;
5525 tokc.str.data = tokcstr.data;
5526 goto case_TOK_STR;
5527 case TOK_LSTR:
5528 #ifdef TCC_TARGET_PE
5529 t = VT_SHORT | VT_UNSIGNED;
5530 #else
5531 t = VT_INT;
5532 #endif
5533 goto str_init;
5534 case TOK_STR:
5535 case_TOK_STR:
5536 /* string parsing */
5537 t = char_type.t;
5538 str_init:
5539 if (tcc_state->warn_write_strings & WARN_ON)
5540 t |= VT_CONSTANT;
5541 type.t = t;
5542 mk_pointer(&type);
5543 type.t |= VT_ARRAY;
5544 memset(&ad, 0, sizeof(AttributeDef));
5545 ad.section = rodata_section;
5546 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5547 break;
5548 case TOK_SOTYPE:
5549 case '(':
5550 t = tok;
5551 next();
5552 /* cast ? */
5553 if (parse_btype(&type, &ad, 0)) {
5554 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5555 skip(')');
5556 /* check ISOC99 compound literal */
5557 if (tok == '{') {
5558 /* data is allocated locally by default */
5559 if (global_expr)
5560 r = VT_CONST;
5561 else
5562 r = VT_LOCAL;
5563 /* all except arrays are lvalues */
5564 if (!(type.t & VT_ARRAY))
5565 r |= VT_LVAL;
5566 memset(&ad, 0, sizeof(AttributeDef));
5567 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5568 } else if (t == TOK_SOTYPE) { /* from sizeof/alignof (...) */
5569 vpush(&type);
5570 return;
5571 } else {
5572 unary();
5573 gen_cast(&type);
5575 } else if (tok == '{') {
5576 int saved_nocode_wanted = nocode_wanted;
5577 if (CONST_WANTED && !NOEVAL_WANTED)
5578 expect("constant");
5579 if (0 == local_scope)
5580 tcc_error("statement expression outside of function");
5581 /* save all registers */
5582 save_regs(0);
5583 /* statement expression : we do not accept break/continue
5584 inside as GCC does. We do retain the nocode_wanted state,
5585 as statement expressions can't ever be entered from the
5586 outside, so any reactivation of code emission (from labels
5587 or loop heads) can be disabled again after the end of it. */
5588 block(STMT_EXPR);
5589 /* If the statement expr can be entered, then we retain the current
5590 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5591 If it can't be entered then the state is that from before the
5592 statement expression. */
5593 if (saved_nocode_wanted)
5594 nocode_wanted = saved_nocode_wanted;
5595 skip(')');
5596 } else {
5597 gexpr();
5598 skip(')');
5600 break;
5601 case '*':
5602 next();
5603 unary();
5604 indir();
5605 break;
5606 case '&':
5607 next();
5608 unary();
5609 /* functions names must be treated as function pointers,
5610 except for unary '&' and sizeof. Since we consider that
5611 functions are not lvalues, we only have to handle it
5612 there and in function calls. */
5613 /* arrays can also be used although they are not lvalues */
5614 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5615 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5616 test_lvalue();
5617 if (vtop->sym)
5618 vtop->sym->a.addrtaken = 1;
5619 mk_pointer(&vtop->type);
5620 gaddrof();
5621 break;
5622 case '!':
5623 next();
5624 unary();
5625 gen_test_zero(TOK_EQ);
5626 break;
5627 case '~':
5628 next();
5629 unary();
5630 vpushi(-1);
5631 gen_op('^');
5632 break;
5633 case '+':
5634 next();
5635 unary();
5636 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5637 tcc_error("pointer not accepted for unary plus");
5638 /* In order to force cast, we add zero, except for floating point
5639 where we really need an noop (otherwise -0.0 will be transformed
5640 into +0.0). */
5641 if (!is_float(vtop->type.t)) {
5642 vpushi(0);
5643 gen_op('+');
5645 break;
5646 case TOK_SIZEOF:
5647 case TOK_ALIGNOF1:
5648 case TOK_ALIGNOF2:
5649 case TOK_ALIGNOF3:
5650 t = tok;
5651 next();
5652 if (tok == '(')
5653 tok = TOK_SOTYPE;
5654 expr_type(&type, unary);
5655 if (t == TOK_SIZEOF) {
5656 vpush_type_size(&type, &align);
5657 gen_cast_s(VT_SIZE_T);
5658 } else {
5659 type_size(&type, &align);
5660 s = NULL;
5661 if (vtop[1].r & VT_SYM)
5662 s = vtop[1].sym; /* hack: accessing previous vtop */
5663 if (s && s->a.aligned)
5664 align = 1 << (s->a.aligned - 1);
5665 vpushs(align);
5667 break;
5669 case TOK_builtin_expect:
5670 /* __builtin_expect is a no-op for now */
5671 parse_builtin_params(0, "ee");
5672 vpop();
5673 break;
5674 case TOK_builtin_types_compatible_p:
5675 parse_builtin_params(0, "tt");
5676 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5677 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5678 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5679 vtop -= 2;
5680 vpushi(n);
5681 break;
5682 case TOK_builtin_choose_expr:
5684 int64_t c;
5685 next();
5686 skip('(');
5687 c = expr_const64();
5688 skip(',');
5689 if (!c) {
5690 nocode_wanted++;
5692 expr_eq();
5693 if (!c) {
5694 vpop();
5695 nocode_wanted--;
5697 skip(',');
5698 if (c) {
5699 nocode_wanted++;
5701 expr_eq();
5702 if (c) {
5703 vpop();
5704 nocode_wanted--;
5706 skip(')');
5708 break;
5709 case TOK_builtin_constant_p:
5710 parse_builtin_params(1, "e");
5711 n = 1;
5712 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
5713 || ((vtop->r & VT_SYM) && vtop->sym->a.addrtaken)
5715 n = 0;
5716 vtop--;
5717 vpushi(n);
5718 break;
5719 case TOK_builtin_unreachable:
5720 parse_builtin_params(0, ""); /* just skip '()' */
5721 type.t = VT_VOID;
5722 vpush(&type);
5723 CODE_OFF();
5724 break;
5725 case TOK_builtin_frame_address:
5726 case TOK_builtin_return_address:
5728 int tok1 = tok;
5729 int level;
5730 next();
5731 skip('(');
5732 level = expr_const();
5733 if (level < 0)
5734 tcc_error("%s only takes positive integers", get_tok_str(tok1, 0));
5735 skip(')');
5736 type.t = VT_VOID;
5737 mk_pointer(&type);
5738 vset(&type, VT_LOCAL, 0); /* local frame */
5739 while (level--) {
5740 #ifdef TCC_TARGET_RISCV64
5741 vpushi(2*PTR_SIZE);
5742 gen_op('-');
5743 #endif
5744 mk_pointer(&vtop->type);
5745 indir(); /* -> parent frame */
5747 if (tok1 == TOK_builtin_return_address) {
5748 // assume return address is just above frame pointer on stack
5749 #ifdef TCC_TARGET_ARM
5750 vpushi(2*PTR_SIZE);
5751 gen_op('+');
5752 #elif defined TCC_TARGET_RISCV64
5753 vpushi(PTR_SIZE);
5754 gen_op('-');
5755 #else
5756 vpushi(PTR_SIZE);
5757 gen_op('+');
5758 #endif
5759 mk_pointer(&vtop->type);
5760 indir();
5763 break;
5764 #ifdef TCC_TARGET_RISCV64
5765 case TOK_builtin_va_start:
5766 parse_builtin_params(0, "ee");
5767 r = vtop->r & VT_VALMASK;
5768 if (r == VT_LLOCAL)
5769 r = VT_LOCAL;
5770 if (r != VT_LOCAL)
5771 tcc_error("__builtin_va_start expects a local variable");
5772 gen_va_start();
5773 vstore();
5774 break;
5775 #endif
5776 #ifdef TCC_TARGET_X86_64
5777 #ifdef TCC_TARGET_PE
5778 case TOK_builtin_va_start:
5779 parse_builtin_params(0, "ee");
5780 r = vtop->r & VT_VALMASK;
5781 if (r == VT_LLOCAL)
5782 r = VT_LOCAL;
5783 if (r != VT_LOCAL)
5784 tcc_error("__builtin_va_start expects a local variable");
5785 vtop->r = r;
5786 vtop->type = char_pointer_type;
5787 vtop->c.i += 8;
5788 vstore();
5789 break;
5790 #else
5791 case TOK_builtin_va_arg_types:
5792 parse_builtin_params(0, "t");
5793 vpushi(classify_x86_64_va_arg(&vtop->type));
5794 vswap();
5795 vpop();
5796 break;
5797 #endif
5798 #endif
5800 #ifdef TCC_TARGET_ARM64
5801 case TOK_builtin_va_start: {
5802 parse_builtin_params(0, "ee");
5803 //xx check types
5804 gen_va_start();
5805 vpushi(0);
5806 vtop->type.t = VT_VOID;
5807 break;
5809 case TOK_builtin_va_arg: {
5810 parse_builtin_params(0, "et");
5811 type = vtop->type;
5812 vpop();
5813 //xx check types
5814 gen_va_arg(&type);
5815 vtop->type = type;
5816 break;
5818 case TOK___arm64_clear_cache: {
5819 parse_builtin_params(0, "ee");
5820 gen_clear_cache();
5821 vpushi(0);
5822 vtop->type.t = VT_VOID;
5823 break;
5825 #endif
5827 /* atomic operations */
5828 case TOK___atomic_store:
5829 case TOK___atomic_load:
5830 case TOK___atomic_exchange:
5831 case TOK___atomic_compare_exchange:
5832 case TOK___atomic_fetch_add:
5833 case TOK___atomic_fetch_sub:
5834 case TOK___atomic_fetch_or:
5835 case TOK___atomic_fetch_xor:
5836 case TOK___atomic_fetch_and:
5837 case TOK___atomic_fetch_nand:
5838 case TOK___atomic_add_fetch:
5839 case TOK___atomic_sub_fetch:
5840 case TOK___atomic_or_fetch:
5841 case TOK___atomic_xor_fetch:
5842 case TOK___atomic_and_fetch:
5843 case TOK___atomic_nand_fetch:
5844 parse_atomic(tok);
5845 break;
5847 /* pre operations */
5848 case TOK_INC:
5849 case TOK_DEC:
5850 t = tok;
5851 next();
5852 unary();
5853 inc(0, t);
5854 break;
5855 case '-':
5856 next();
5857 unary();
5858 if (is_float(vtop->type.t)) {
5859 gen_opif(TOK_NEG);
5860 } else {
5861 vpushi(0);
5862 vswap();
5863 gen_op('-');
5865 break;
5866 case TOK_LAND:
5867 if (!gnu_ext)
5868 goto tok_identifier;
5869 next();
5870 /* allow to take the address of a label */
5871 if (tok < TOK_UIDENT)
5872 expect("label identifier");
5873 s = label_find(tok);
5874 if (!s) {
5875 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5876 } else {
5877 if (s->r == LABEL_DECLARED)
5878 s->r = LABEL_FORWARD;
5880 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5881 s->type.t = VT_VOID;
5882 mk_pointer(&s->type);
5883 s->type.t |= VT_STATIC;
5885 vpushsym(&s->type, s);
5886 next();
5887 break;
5889 case TOK_GENERIC:
5891 CType controlling_type;
5892 int has_default = 0;
5893 int has_match = 0;
5894 int learn = 0;
5895 TokenString *str = NULL;
5896 int saved_nocode_wanted = nocode_wanted;
5897 nocode_wanted &= ~CONST_WANTED_MASK;
5899 next();
5900 skip('(');
5901 expr_type(&controlling_type, expr_eq);
5902 convert_parameter_type (&controlling_type);
5904 nocode_wanted = saved_nocode_wanted;
5906 for (;;) {
5907 learn = 0;
5908 skip(',');
5909 if (tok == TOK_DEFAULT) {
5910 if (has_default)
5911 tcc_error("too many 'default'");
5912 has_default = 1;
5913 if (!has_match)
5914 learn = 1;
5915 next();
5916 } else {
5917 AttributeDef ad_tmp;
5918 int itmp;
5919 CType cur_type;
5921 parse_btype(&cur_type, &ad_tmp, 0);
5922 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5923 if (compare_types(&controlling_type, &cur_type, 0)) {
5924 if (has_match) {
5925 tcc_error("type match twice");
5927 has_match = 1;
5928 learn = 1;
5931 skip(':');
5932 if (learn) {
5933 if (str)
5934 tok_str_free(str);
5935 skip_or_save_block(&str);
5936 } else {
5937 skip_or_save_block(NULL);
5939 if (tok == ')')
5940 break;
5942 if (!str) {
5943 char buf[60];
5944 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5945 tcc_error("type '%s' does not match any association", buf);
5947 begin_macro(str, 1);
5948 next();
5949 expr_eq();
5950 if (tok != TOK_EOF)
5951 expect(",");
5952 end_macro();
5953 next();
5954 break;
5956 // special qnan , snan and infinity values
5957 case TOK___NAN__:
5958 n = 0x7fc00000;
5959 special_math_val:
5960 vpushi(n);
5961 vtop->type.t = VT_FLOAT;
5962 next();
5963 break;
5964 case TOK___SNAN__:
5965 n = 0x7f800001;
5966 goto special_math_val;
5967 case TOK___INF__:
5968 n = 0x7f800000;
5969 goto special_math_val;
5971 default:
5972 tok_identifier:
5973 if (tok < TOK_UIDENT)
5974 tcc_error("expression expected before '%s'", get_tok_str(tok, &tokc));
5975 t = tok;
5976 next();
5977 s = sym_find(t);
5978 if (!s || IS_ASM_SYM(s)) {
5979 const char *name = get_tok_str(t, NULL);
5980 if (tok != '(')
5981 tcc_error("'%s' undeclared", name);
5982 /* for simple function calls, we tolerate undeclared
5983 external reference to int() function */
5984 tcc_warning_c(warn_implicit_function_declaration)(
5985 "implicit declaration of function '%s'", name);
5986 s = external_global_sym(t, &func_old_type);
5989 r = s->r;
5990 /* A symbol that has a register is a local register variable,
5991 which starts out as VT_LOCAL value. */
5992 if ((r & VT_VALMASK) < VT_CONST)
5993 r = (r & ~VT_VALMASK) | VT_LOCAL;
5995 vset(&s->type, r, s->c);
5996 /* Point to s as backpointer (even without r&VT_SYM).
5997 Will be used by at least the x86 inline asm parser for
5998 regvars. */
5999 vtop->sym = s;
6001 if (r & VT_SYM) {
6002 vtop->c.i = 0;
6003 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
6004 vtop->c.i = s->enum_val;
6006 break;
6009 /* post operations */
6010 while (1) {
6011 if (tok == TOK_INC || tok == TOK_DEC) {
6012 inc(1, tok);
6013 next();
6014 } else if (tok == '.' || tok == TOK_ARROW) {
6015 int qualifiers, cumofs;
6016 /* field */
6017 if (tok == TOK_ARROW)
6018 indir();
6019 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6020 test_lvalue();
6021 /* expect pointer on structure */
6022 next();
6023 s = find_field(&vtop->type, tok, &cumofs);
6024 /* add field offset to pointer */
6025 gaddrof();
6026 vtop->type = char_pointer_type; /* change type to 'char *' */
6027 vpushi(cumofs);
6028 gen_op('+');
6029 /* change type to field type, and set to lvalue */
6030 vtop->type = s->type;
6031 vtop->type.t |= qualifiers;
6032 /* an array is never an lvalue */
6033 if (!(vtop->type.t & VT_ARRAY)) {
6034 vtop->r |= VT_LVAL;
6035 #ifdef CONFIG_TCC_BCHECK
6036 /* if bound checking, the referenced pointer must be checked */
6037 if (tcc_state->do_bounds_check)
6038 vtop->r |= VT_MUSTBOUND;
6039 #endif
6041 next();
6042 } else if (tok == '[') {
6043 next();
6044 gexpr();
6045 gen_op('+');
6046 indir();
6047 skip(']');
6048 } else if (tok == '(') {
6049 SValue ret;
6050 Sym *sa;
6051 int nb_args, ret_nregs, ret_align, regsize, variadic;
6053 /* function call */
6054 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6055 /* pointer test (no array accepted) */
6056 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6057 vtop->type = *pointed_type(&vtop->type);
6058 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6059 goto error_func;
6060 } else {
6061 error_func:
6062 expect("function pointer");
6064 } else {
6065 vtop->r &= ~VT_LVAL; /* no lvalue */
6067 /* get return type */
6068 s = vtop->type.ref;
6069 next();
6070 sa = s->next; /* first parameter */
6071 nb_args = regsize = 0;
6072 ret.r2 = VT_CONST;
6073 /* compute first implicit argument if a structure is returned */
6074 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6075 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6076 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6077 &ret_align, &regsize);
6078 if (ret_nregs <= 0) {
6079 /* get some space for the returned structure */
6080 size = type_size(&s->type, &align);
6081 #ifdef TCC_TARGET_ARM64
6082 /* On arm64, a small struct is return in registers.
6083 It is much easier to write it to memory if we know
6084 that we are allowed to write some extra bytes, so
6085 round the allocated space up to a power of 2: */
6086 if (size < 16)
6087 while (size & (size - 1))
6088 size = (size | (size - 1)) + 1;
6089 #endif
6090 loc = (loc - size) & -align;
6091 ret.type = s->type;
6092 ret.r = VT_LOCAL | VT_LVAL;
6093 /* pass it as 'int' to avoid structure arg passing
6094 problems */
6095 vseti(VT_LOCAL, loc);
6096 #ifdef CONFIG_TCC_BCHECK
6097 if (tcc_state->do_bounds_check)
6098 --loc;
6099 #endif
6100 ret.c = vtop->c;
6101 if (ret_nregs < 0)
6102 vtop--;
6103 else
6104 nb_args++;
6106 } else {
6107 ret_nregs = 1;
6108 ret.type = s->type;
6111 if (ret_nregs > 0) {
6112 /* return in register */
6113 ret.c.i = 0;
6114 PUT_R_RET(&ret, ret.type.t);
6116 if (tok != ')') {
6117 for(;;) {
6118 expr_eq();
6119 gfunc_param_typed(s, sa);
6120 nb_args++;
6121 if (sa)
6122 sa = sa->next;
6123 if (tok == ')')
6124 break;
6125 skip(',');
6128 if (sa)
6129 tcc_error("too few arguments to function");
6130 skip(')');
6131 gfunc_call(nb_args);
6133 if (ret_nregs < 0) {
6134 vsetc(&ret.type, ret.r, &ret.c);
6135 #ifdef TCC_TARGET_RISCV64
6136 arch_transfer_ret_regs(1);
6137 #endif
6138 } else {
6139 /* return value */
6140 n = ret_nregs;
6141 while (n > 1) {
6142 int rc = reg_classes[ret.r] & ~(RC_INT | RC_FLOAT);
6143 /* We assume that when a structure is returned in multiple
6144 registers, their classes are consecutive values of the
6145 suite s(n) = 2^n */
6146 rc <<= --n;
6147 for (r = 0; r < NB_REGS; ++r)
6148 if (reg_classes[r] & rc)
6149 break;
6150 vsetc(&ret.type, r, &ret.c);
6152 vsetc(&ret.type, ret.r, &ret.c);
6153 vtop->r2 = ret.r2;
6155 /* handle packed struct return */
6156 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6157 int addr, offset;
6159 size = type_size(&s->type, &align);
6160 /* We're writing whole regs often, make sure there's enough
6161 space. Assume register size is power of 2. */
6162 size = (size + regsize - 1) & -regsize;
6163 if (ret_align > align)
6164 align = ret_align;
6165 loc = (loc - size) & -align;
6166 addr = loc;
6167 offset = 0;
6168 for (;;) {
6169 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6170 vswap();
6171 vstore();
6172 vtop--;
6173 if (--ret_nregs == 0)
6174 break;
6175 offset += regsize;
6177 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6180 /* Promote char/short return values. This is matters only
6181 for calling function that were not compiled by TCC and
6182 only on some architectures. For those where it doesn't
6183 matter we expect things to be already promoted to int,
6184 but not larger. */
6185 t = s->type.t & VT_BTYPE;
6186 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6187 #ifdef PROMOTE_RET
6188 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6189 #else
6190 vtop->type.t = VT_INT;
6191 #endif
6194 if (s->f.func_noreturn) {
6195 if (debug_modes)
6196 tcc_tcov_block_end(tcc_state, -1);
6197 CODE_OFF();
6199 } else {
6200 break;
6205 #ifndef precedence_parser /* original top-down parser */
6207 static void expr_prod(void)
6209 int t;
6211 unary();
6212 while ((t = tok) == '*' || t == '/' || t == '%') {
6213 next();
6214 unary();
6215 gen_op(t);
6219 static void expr_sum(void)
6221 int t;
6223 expr_prod();
6224 while ((t = tok) == '+' || t == '-') {
6225 next();
6226 expr_prod();
6227 gen_op(t);
6231 static void expr_shift(void)
6233 int t;
6235 expr_sum();
6236 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6237 next();
6238 expr_sum();
6239 gen_op(t);
6243 static void expr_cmp(void)
6245 int t;
6247 expr_shift();
6248 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6249 t == TOK_ULT || t == TOK_UGE) {
6250 next();
6251 expr_shift();
6252 gen_op(t);
6256 static void expr_cmpeq(void)
6258 int t;
6260 expr_cmp();
6261 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6262 next();
6263 expr_cmp();
6264 gen_op(t);
6268 static void expr_and(void)
6270 expr_cmpeq();
6271 while (tok == '&') {
6272 next();
6273 expr_cmpeq();
6274 gen_op('&');
6278 static void expr_xor(void)
6280 expr_and();
6281 while (tok == '^') {
6282 next();
6283 expr_and();
6284 gen_op('^');
6288 static void expr_or(void)
6290 expr_xor();
6291 while (tok == '|') {
6292 next();
6293 expr_xor();
6294 gen_op('|');
6298 static void expr_landor(int op);
6300 static void expr_land(void)
6302 expr_or();
6303 if (tok == TOK_LAND)
6304 expr_landor(tok);
6307 static void expr_lor(void)
6309 expr_land();
6310 if (tok == TOK_LOR)
6311 expr_landor(tok);
6314 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6315 #else /* defined precedence_parser */
6316 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6317 # define expr_lor() unary(), expr_infix(1)
6319 static int precedence(int tok)
6321 switch (tok) {
6322 case TOK_LOR: return 1;
6323 case TOK_LAND: return 2;
6324 case '|': return 3;
6325 case '^': return 4;
6326 case '&': return 5;
6327 case TOK_EQ: case TOK_NE: return 6;
6328 relat: case TOK_ULT: case TOK_UGE: return 7;
6329 case TOK_SHL: case TOK_SAR: return 8;
6330 case '+': case '-': return 9;
6331 case '*': case '/': case '%': return 10;
6332 default:
6333 if (tok >= TOK_ULE && tok <= TOK_GT)
6334 goto relat;
6335 return 0;
6338 static unsigned char prec[256];
6339 static void init_prec(void)
6341 int i;
6342 for (i = 0; i < 256; i++)
6343 prec[i] = precedence(i);
6345 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6347 static void expr_landor(int op);
6349 static void expr_infix(int p)
6351 int t = tok, p2;
6352 while ((p2 = precedence(t)) >= p) {
6353 if (t == TOK_LOR || t == TOK_LAND) {
6354 expr_landor(t);
6355 } else {
6356 next();
6357 unary();
6358 if (precedence(tok) > p2)
6359 expr_infix(p2 + 1);
6360 gen_op(t);
6362 t = tok;
6365 #endif
6367 /* Assuming vtop is a value used in a conditional context
6368 (i.e. compared with zero) return 0 if it's false, 1 if
6369 true and -1 if it can't be statically determined. */
6370 static int condition_3way(void)
6372 int c = -1;
6373 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6374 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6375 vdup();
6376 gen_cast_s(VT_BOOL);
6377 c = vtop->c.i;
6378 vpop();
6380 return c;
6383 static void expr_landor(int op)
6385 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6386 for(;;) {
6387 c = f ? i : condition_3way();
6388 if (c < 0)
6389 save_regs(1), cc = 0;
6390 else if (c != i)
6391 nocode_wanted++, f = 1;
6392 if (tok != op)
6393 break;
6394 if (c < 0)
6395 t = gvtst(i, t);
6396 else
6397 vpop();
6398 next();
6399 expr_landor_next(op);
6401 if (cc || f) {
6402 vpop();
6403 vpushi(i ^ f);
6404 gsym(t);
6405 nocode_wanted -= f;
6406 } else {
6407 gvtst_set(i, t);
6411 static int is_cond_bool(SValue *sv)
6413 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6414 && (sv->type.t & VT_BTYPE) == VT_INT)
6415 return (unsigned)sv->c.i < 2;
6416 if (sv->r == VT_CMP)
6417 return 1;
6418 return 0;
6421 static void expr_cond(void)
6423 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6424 SValue sv;
6425 CType type;
6427 expr_lor();
6428 if (tok == '?') {
6429 next();
6430 c = condition_3way();
6431 g = (tok == ':' && gnu_ext);
6432 tt = 0;
6433 if (!g) {
6434 if (c < 0) {
6435 save_regs(1);
6436 tt = gvtst(1, 0);
6437 } else {
6438 vpop();
6440 } else if (c < 0) {
6441 /* needed to avoid having different registers saved in
6442 each branch */
6443 save_regs(1);
6444 gv_dup();
6445 tt = gvtst(0, 0);
6448 if (c == 0)
6449 nocode_wanted++;
6450 if (!g)
6451 gexpr();
6453 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6454 mk_pointer(&vtop->type);
6455 sv = *vtop; /* save value to handle it later */
6456 vtop--; /* no vpop so that FP stack is not flushed */
6458 if (g) {
6459 u = tt;
6460 } else if (c < 0) {
6461 u = gjmp(0);
6462 gsym(tt);
6463 } else
6464 u = 0;
6466 if (c == 0)
6467 nocode_wanted--;
6468 if (c == 1)
6469 nocode_wanted++;
6470 skip(':');
6471 expr_cond();
6473 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6474 mk_pointer(&vtop->type);
6476 /* cast operands to correct type according to ISOC rules */
6477 if (!combine_types(&type, &sv, vtop, '?'))
6478 type_incompatibility_error(&sv.type, &vtop->type,
6479 "type mismatch in conditional expression (have '%s' and '%s')");
6481 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6482 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6483 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6484 this code jumps directly to the if's then/else branches. */
6485 t1 = gvtst(0, 0);
6486 t2 = gjmp(0);
6487 gsym(u);
6488 vpushv(&sv);
6489 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6490 gvtst_set(0, t1);
6491 gvtst_set(1, t2);
6492 gen_cast(&type);
6493 // tcc_warning("two conditions expr_cond");
6494 return;
6497 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6498 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6499 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6501 /* now we convert second operand */
6502 if (c != 1) {
6503 gen_cast(&type);
6504 if (islv) {
6505 mk_pointer(&vtop->type);
6506 gaddrof();
6507 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6508 gaddrof();
6511 rc = RC_TYPE(type.t);
6512 /* for long longs, we use fixed registers to avoid having
6513 to handle a complicated move */
6514 if (USING_TWO_WORDS(type.t))
6515 rc = RC_RET(type.t);
6517 tt = r2 = 0;
6518 if (c < 0) {
6519 r2 = gv(rc);
6520 tt = gjmp(0);
6522 gsym(u);
6523 if (c == 1)
6524 nocode_wanted--;
6526 /* this is horrible, but we must also convert first
6527 operand */
6528 if (c != 0) {
6529 *vtop = sv;
6530 gen_cast(&type);
6531 if (islv) {
6532 mk_pointer(&vtop->type);
6533 gaddrof();
6534 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6535 gaddrof();
6538 if (c < 0) {
6539 r1 = gv(rc);
6540 move_reg(r2, r1, islv ? VT_PTR : type.t);
6541 vtop->r = r2;
6542 gsym(tt);
6545 if (islv)
6546 indir();
6550 static void expr_eq(void)
6552 int t;
6554 expr_cond();
6555 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6556 test_lvalue();
6557 next();
6558 if (t == '=') {
6559 expr_eq();
6560 } else {
6561 vdup();
6562 expr_eq();
6563 gen_op(TOK_ASSIGN_OP(t));
6565 vstore();
6569 ST_FUNC void gexpr(void)
6571 expr_eq();
6572 if (tok == ',') {
6573 do {
6574 vpop();
6575 next();
6576 expr_eq();
6577 } while (tok == ',');
6579 /* convert array & function to pointer */
6580 convert_parameter_type(&vtop->type);
6582 /* make builtin_constant_p((1,2)) return 0 (like on gcc) */
6583 if ((vtop->r & VT_VALMASK) == VT_CONST && nocode_wanted && !CONST_WANTED)
6584 gv(RC_TYPE(vtop->type.t));
6588 /* parse a constant expression and return value in vtop. */
6589 static void expr_const1(void)
6591 nocode_wanted += CONST_WANTED_BIT;
6592 expr_cond();
6593 nocode_wanted -= CONST_WANTED_BIT;
6596 /* parse an integer constant and return its value. */
6597 static inline int64_t expr_const64(void)
6599 int64_t c;
6600 expr_const1();
6601 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
6602 expect("constant expression");
6603 c = vtop->c.i;
6604 vpop();
6605 return c;
6608 /* parse an integer constant and return its value.
6609 Complain if it doesn't fit 32bit (signed or unsigned). */
6610 ST_FUNC int expr_const(void)
6612 int c;
6613 int64_t wc = expr_const64();
6614 c = wc;
6615 if (c != wc && (unsigned)c != wc)
6616 tcc_error("constant exceeds 32 bit");
6617 return c;
6620 /* ------------------------------------------------------------------------- */
6621 /* return from function */
6623 #ifndef TCC_TARGET_ARM64
6624 static void gfunc_return(CType *func_type)
6626 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6627 CType type, ret_type;
6628 int ret_align, ret_nregs, regsize;
6629 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6630 &ret_align, &regsize);
6631 if (ret_nregs < 0) {
6632 #ifdef TCC_TARGET_RISCV64
6633 arch_transfer_ret_regs(0);
6634 #endif
6635 } else if (0 == ret_nregs) {
6636 /* if returning structure, must copy it to implicit
6637 first pointer arg location */
6638 type = *func_type;
6639 mk_pointer(&type);
6640 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6641 indir();
6642 vswap();
6643 /* copy structure value to pointer */
6644 vstore();
6645 } else {
6646 /* returning structure packed into registers */
6647 int size, addr, align, rc, n;
6648 size = type_size(func_type,&align);
6649 if ((align & (ret_align - 1))
6650 && ((vtop->r & VT_VALMASK) < VT_CONST /* pointer to struct */
6651 || (vtop->c.i & (ret_align - 1))
6652 )) {
6653 loc = (loc - size) & -ret_align;
6654 addr = loc;
6655 type = *func_type;
6656 vset(&type, VT_LOCAL | VT_LVAL, addr);
6657 vswap();
6658 vstore();
6659 vpop();
6660 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6662 vtop->type = ret_type;
6663 rc = RC_RET(ret_type.t);
6664 //printf("struct return: n:%d t:%02x rc:%02x\n", ret_nregs, ret_type.t, rc);
6665 for (n = ret_nregs; --n > 0;) {
6666 vdup();
6667 gv(rc);
6668 vswap();
6669 incr_offset(regsize);
6670 /* We assume that when a structure is returned in multiple
6671 registers, their classes are consecutive values of the
6672 suite s(n) = 2^n */
6673 rc <<= 1;
6675 gv(rc);
6676 vtop -= ret_nregs - 1;
6678 } else {
6679 gv(RC_RET(func_type->t));
6681 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6683 #endif
6685 static void check_func_return(void)
6687 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6688 return;
6689 if (!strcmp (funcname, "main")
6690 && (func_vt.t & VT_BTYPE) == VT_INT) {
6691 /* main returns 0 by default */
6692 vpushi(0);
6693 gen_assign_cast(&func_vt);
6694 gfunc_return(&func_vt);
6695 } else {
6696 tcc_warning("function might return no value: '%s'", funcname);
6700 /* ------------------------------------------------------------------------- */
6701 /* switch/case */
6703 static int case_cmpi(const void *pa, const void *pb)
6705 int64_t a = (*(struct case_t**) pa)->v1;
6706 int64_t b = (*(struct case_t**) pb)->v1;
6707 return a < b ? -1 : a > b;
6710 static int case_cmpu(const void *pa, const void *pb)
6712 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6713 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6714 return a < b ? -1 : a > b;
6717 static void gtst_addr(int t, int a)
6719 gsym_addr(gvtst(0, t), a);
6722 static void gcase(struct case_t **base, int len, int *bsym)
6724 struct case_t *p;
6725 int e;
6726 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6727 while (len > 8) {
6728 /* binary search */
6729 p = base[len/2];
6730 vdup();
6731 if (ll)
6732 vpushll(p->v2);
6733 else
6734 vpushi(p->v2);
6735 gen_op(TOK_LE);
6736 e = gvtst(1, 0);
6737 vdup();
6738 if (ll)
6739 vpushll(p->v1);
6740 else
6741 vpushi(p->v1);
6742 gen_op(TOK_GE);
6743 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6744 /* x < v1 */
6745 gcase(base, len/2, bsym);
6746 /* x > v2 */
6747 gsym(e);
6748 e = len/2 + 1;
6749 base += e; len -= e;
6751 /* linear scan */
6752 while (len--) {
6753 p = *base++;
6754 vdup();
6755 if (ll)
6756 vpushll(p->v2);
6757 else
6758 vpushi(p->v2);
6759 if (p->v1 == p->v2) {
6760 gen_op(TOK_EQ);
6761 gtst_addr(0, p->sym);
6762 } else {
6763 gen_op(TOK_LE);
6764 e = gvtst(1, 0);
6765 vdup();
6766 if (ll)
6767 vpushll(p->v1);
6768 else
6769 vpushi(p->v1);
6770 gen_op(TOK_GE);
6771 gtst_addr(0, p->sym);
6772 gsym(e);
6775 *bsym = gjmp(*bsym);
6778 static void end_switch(void)
6780 struct switch_t *sw = cur_switch;
6781 dynarray_reset(&sw->p, &sw->n);
6782 cur_switch = sw->prev;
6783 tcc_free(sw);
6786 /* ------------------------------------------------------------------------- */
6787 /* __attribute__((cleanup(fn))) */
6789 static void try_call_scope_cleanup(Sym *stop)
6791 Sym *cls = cur_scope->cl.s;
6793 for (; cls != stop; cls = cls->ncl) {
6794 Sym *fs = cls->next;
6795 Sym *vs = cls->prev_tok;
6797 vpushsym(&fs->type, fs);
6798 vset(&vs->type, vs->r, vs->c);
6799 vtop->sym = vs;
6800 mk_pointer(&vtop->type);
6801 gaddrof();
6802 gfunc_call(1);
6806 static void try_call_cleanup_goto(Sym *cleanupstate)
6808 Sym *oc, *cc;
6809 int ocd, ccd;
6811 if (!cur_scope->cl.s)
6812 return;
6814 /* search NCA of both cleanup chains given parents and initial depth */
6815 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6816 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6818 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6820 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6823 try_call_scope_cleanup(cc);
6826 /* call 'func' for each __attribute__((cleanup(func))) */
6827 static void block_cleanup(struct scope *o)
6829 int jmp = 0;
6830 Sym *g, **pg;
6831 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6832 if (g->prev_tok->r & LABEL_FORWARD) {
6833 Sym *pcl = g->next;
6834 if (!jmp)
6835 jmp = gjmp(0);
6836 gsym(pcl->jnext);
6837 try_call_scope_cleanup(o->cl.s);
6838 pcl->jnext = gjmp(0);
6839 if (!o->cl.n)
6840 goto remove_pending;
6841 g->c = o->cl.n;
6842 pg = &g->prev;
6843 } else {
6844 remove_pending:
6845 *pg = g->prev;
6846 sym_free(g);
6849 gsym(jmp);
6850 try_call_scope_cleanup(o->cl.s);
6853 /* ------------------------------------------------------------------------- */
6854 /* VLA */
6856 static void vla_restore(int loc)
6858 if (loc)
6859 gen_vla_sp_restore(loc);
6862 static void vla_leave(struct scope *o)
6864 struct scope *c = cur_scope, *v = NULL;
6865 for (; c != o && c; c = c->prev)
6866 if (c->vla.num)
6867 v = c;
6868 if (v)
6869 vla_restore(v->vla.locorig);
6872 /* ------------------------------------------------------------------------- */
6873 /* local scopes */
6875 static void new_scope(struct scope *o)
6877 /* copy and link previous scope */
6878 *o = *cur_scope;
6879 o->prev = cur_scope;
6880 cur_scope = o;
6881 cur_scope->vla.num = 0;
6883 /* record local declaration stack position */
6884 o->lstk = local_stack;
6885 o->llstk = local_label_stack;
6886 ++local_scope;
6889 static void prev_scope(struct scope *o, int is_expr)
6891 vla_leave(o->prev);
6893 if (o->cl.s != o->prev->cl.s)
6894 block_cleanup(o->prev);
6896 /* pop locally defined labels */
6897 label_pop(&local_label_stack, o->llstk, is_expr);
6899 /* In the is_expr case (a statement expression is finished here),
6900 vtop might refer to symbols on the local_stack. Either via the
6901 type or via vtop->sym. We can't pop those nor any that in turn
6902 might be referred to. To make it easier we don't roll back
6903 any symbols in that case; some upper level call to block() will
6904 do that. We do have to remove such symbols from the lookup
6905 tables, though. sym_pop will do that. */
6907 /* pop locally defined symbols */
6908 pop_local_syms(o->lstk, is_expr);
6909 cur_scope = o->prev;
6910 --local_scope;
6913 /* leave a scope via break/continue(/goto) */
6914 static void leave_scope(struct scope *o)
6916 if (!o)
6917 return;
6918 try_call_scope_cleanup(o->cl.s);
6919 vla_leave(o);
6922 /* short versiona for scopes with 'if/do/while/switch' which can
6923 declare only types (of struct/union/enum) */
6924 static void new_scope_s(struct scope *o)
6926 o->lstk = local_stack;
6927 ++local_scope;
6930 static void prev_scope_s(struct scope *o)
6932 sym_pop(&local_stack, o->lstk, 0);
6933 --local_scope;
6936 /* ------------------------------------------------------------------------- */
6937 /* call block from 'for do while' loops */
6939 static void lblock(int *bsym, int *csym)
6941 struct scope *lo = loop_scope, *co = cur_scope;
6942 int *b = co->bsym, *c = co->csym;
6943 if (csym) {
6944 co->csym = csym;
6945 loop_scope = co;
6947 co->bsym = bsym;
6948 block(0);
6949 co->bsym = b;
6950 if (csym) {
6951 co->csym = c;
6952 loop_scope = lo;
6956 static void block(int flags)
6958 int a, b, c, d, e, t;
6959 struct scope o;
6960 Sym *s;
6962 if (flags & STMT_EXPR) {
6963 /* default return value is (void) */
6964 vpushi(0);
6965 vtop->type.t = VT_VOID;
6968 again:
6969 t = tok;
6970 /* If the token carries a value, next() might destroy it. Only with
6971 invalid code such as f(){"123"4;} */
6972 if (TOK_HAS_VALUE(t))
6973 goto expr;
6974 next();
6976 if (debug_modes)
6977 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6979 if (t == TOK_IF) {
6980 new_scope_s(&o);
6981 skip('(');
6982 gexpr();
6983 skip(')');
6984 a = gvtst(1, 0);
6985 block(0);
6986 if (tok == TOK_ELSE) {
6987 d = gjmp(0);
6988 gsym(a);
6989 next();
6990 block(0);
6991 gsym(d); /* patch else jmp */
6992 } else {
6993 gsym(a);
6995 prev_scope_s(&o);
6997 } else if (t == TOK_WHILE) {
6998 new_scope_s(&o);
6999 d = gind();
7000 skip('(');
7001 gexpr();
7002 skip(')');
7003 a = gvtst(1, 0);
7004 b = 0;
7005 lblock(&a, &b);
7006 gjmp_addr(d);
7007 gsym_addr(b, d);
7008 gsym(a);
7009 prev_scope_s(&o);
7011 } else if (t == '{') {
7012 if (debug_modes)
7013 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
7014 new_scope(&o);
7016 /* handle local labels declarations */
7017 while (tok == TOK_LABEL) {
7018 do {
7019 next();
7020 if (tok < TOK_UIDENT)
7021 expect("label identifier");
7022 label_push(&local_label_stack, tok, LABEL_DECLARED);
7023 next();
7024 } while (tok == ',');
7025 skip(';');
7028 while (tok != '}') {
7029 decl(VT_LOCAL);
7030 if (tok != '}') {
7031 if (flags & STMT_EXPR)
7032 vpop();
7033 block(flags | STMT_COMPOUND);
7037 prev_scope(&o, flags & STMT_EXPR);
7038 if (debug_modes)
7039 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7040 if (local_scope)
7041 next();
7042 else if (!nocode_wanted)
7043 check_func_return();
7045 } else if (t == TOK_RETURN) {
7046 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7047 if (tok != ';') {
7048 gexpr();
7049 if (b) {
7050 gen_assign_cast(&func_vt);
7051 } else {
7052 if (vtop->type.t != VT_VOID)
7053 tcc_warning("void function returns a value");
7054 vtop--;
7056 } else if (b) {
7057 tcc_warning("'return' with no value");
7058 b = 0;
7060 leave_scope(root_scope);
7061 if (b)
7062 gfunc_return(&func_vt);
7063 skip(';');
7064 /* jump unless last stmt in top-level block */
7065 if (tok != '}' || local_scope != 1)
7066 rsym = gjmp(rsym);
7067 if (debug_modes)
7068 tcc_tcov_block_end (tcc_state, -1);
7069 CODE_OFF();
7071 } else if (t == TOK_BREAK) {
7072 /* compute jump */
7073 if (!cur_scope->bsym)
7074 tcc_error("cannot break");
7075 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7076 leave_scope(cur_switch->scope);
7077 else
7078 leave_scope(loop_scope);
7079 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7080 skip(';');
7082 } else if (t == TOK_CONTINUE) {
7083 /* compute jump */
7084 if (!cur_scope->csym)
7085 tcc_error("cannot continue");
7086 leave_scope(loop_scope);
7087 *cur_scope->csym = gjmp(*cur_scope->csym);
7088 skip(';');
7090 } else if (t == TOK_FOR) {
7091 new_scope(&o);
7093 skip('(');
7094 if (tok != ';') {
7095 /* c99 for-loop init decl? */
7096 if (!decl(VT_JMP)) {
7097 /* no, regular for-loop init expr */
7098 gexpr();
7099 vpop();
7102 skip(';');
7103 a = b = 0;
7104 c = d = gind();
7105 if (tok != ';') {
7106 gexpr();
7107 a = gvtst(1, 0);
7109 skip(';');
7110 if (tok != ')') {
7111 e = gjmp(0);
7112 d = gind();
7113 gexpr();
7114 vpop();
7115 gjmp_addr(c);
7116 gsym(e);
7118 skip(')');
7119 lblock(&a, &b);
7120 gjmp_addr(d);
7121 gsym_addr(b, d);
7122 gsym(a);
7123 prev_scope(&o, 0);
7125 } else if (t == TOK_DO) {
7126 new_scope_s(&o);
7127 a = b = 0;
7128 d = gind();
7129 lblock(&a, &b);
7130 gsym(b);
7131 skip(TOK_WHILE);
7132 skip('(');
7133 gexpr();
7134 skip(')');
7135 skip(';');
7136 c = gvtst(0, 0);
7137 gsym_addr(c, d);
7138 gsym(a);
7139 prev_scope_s(&o);
7141 } else if (t == TOK_SWITCH) {
7142 struct switch_t *sw;
7144 sw = tcc_mallocz(sizeof *sw);
7145 sw->bsym = &a;
7146 sw->scope = cur_scope;
7147 sw->prev = cur_switch;
7148 sw->nocode_wanted = nocode_wanted;
7149 cur_switch = sw;
7151 new_scope_s(&o);
7152 skip('(');
7153 gexpr();
7154 skip(')');
7155 sw->sv = *vtop--; /* save switch value */
7156 a = 0;
7157 b = gjmp(0); /* jump to first case */
7158 lblock(&a, NULL);
7159 a = gjmp(a); /* add implicit break */
7160 /* case lookup */
7161 gsym(b);
7162 prev_scope_s(&o);
7164 if (sw->nocode_wanted)
7165 goto skip_switch;
7166 if (sw->sv.type.t & VT_UNSIGNED)
7167 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7168 else
7169 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7170 for (b = 1; b < sw->n; b++)
7171 if (sw->sv.type.t & VT_UNSIGNED
7172 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7173 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7174 tcc_error("duplicate case value");
7175 vpushv(&sw->sv);
7176 gv(RC_INT);
7177 d = 0, gcase(sw->p, sw->n, &d);
7178 vpop();
7179 if (sw->def_sym)
7180 gsym_addr(d, sw->def_sym);
7181 else
7182 gsym(d);
7183 skip_switch:
7184 /* break label */
7185 gsym(a);
7186 end_switch();
7188 } else if (t == TOK_CASE) {
7189 struct case_t *cr;
7190 if (!cur_switch)
7191 expect("switch");
7192 cr = tcc_malloc(sizeof(struct case_t));
7193 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7194 cr->v1 = cr->v2 = expr_const64();
7195 if (gnu_ext && tok == TOK_DOTS) {
7196 next();
7197 cr->v2 = expr_const64();
7198 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7199 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7200 tcc_warning("empty case range");
7202 /* case and default are unreachable from a switch under nocode_wanted */
7203 if (!cur_switch->nocode_wanted)
7204 cr->sym = gind();
7205 skip(':');
7206 goto block_after_label;
7208 } else if (t == TOK_DEFAULT) {
7209 if (!cur_switch)
7210 expect("switch");
7211 if (cur_switch->def_sym)
7212 tcc_error("too many 'default'");
7213 cur_switch->def_sym = cur_switch->nocode_wanted ? 1 : gind();
7214 skip(':');
7215 goto block_after_label;
7217 } else if (t == TOK_GOTO) {
7218 vla_restore(cur_scope->vla.locorig);
7219 if (tok == '*' && gnu_ext) {
7220 /* computed goto */
7221 next();
7222 gexpr();
7223 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7224 expect("pointer");
7225 ggoto();
7227 } else if (tok >= TOK_UIDENT) {
7228 s = label_find(tok);
7229 /* put forward definition if needed */
7230 if (!s)
7231 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7232 else if (s->r == LABEL_DECLARED)
7233 s->r = LABEL_FORWARD;
7235 if (s->r & LABEL_FORWARD) {
7236 /* start new goto chain for cleanups, linked via label->next */
7237 if (cur_scope->cl.s && !nocode_wanted) {
7238 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7239 pending_gotos->prev_tok = s;
7240 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7241 pending_gotos->next = s;
7243 s->jnext = gjmp(s->jnext);
7244 } else {
7245 try_call_cleanup_goto(s->cleanupstate);
7246 gjmp_addr(s->jnext);
7248 next();
7250 } else {
7251 expect("label identifier");
7253 skip(';');
7255 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7256 asm_instr();
7258 } else {
7259 if (tok == ':' && t >= TOK_UIDENT) {
7260 /* label case */
7261 next();
7262 s = label_find(t);
7263 if (s) {
7264 if (s->r == LABEL_DEFINED)
7265 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7266 s->r = LABEL_DEFINED;
7267 if (s->next) {
7268 Sym *pcl; /* pending cleanup goto */
7269 for (pcl = s->next; pcl; pcl = pcl->prev)
7270 gsym(pcl->jnext);
7271 sym_pop(&s->next, NULL, 0);
7272 } else
7273 gsym(s->jnext);
7274 } else {
7275 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7277 s->jnext = gind();
7278 s->cleanupstate = cur_scope->cl.s;
7280 block_after_label:
7282 /* Accept attributes after labels (e.g. 'unused') */
7283 AttributeDef ad_tmp;
7284 parse_attribute(&ad_tmp);
7286 if (debug_modes)
7287 tcc_tcov_reset_ind(tcc_state);
7288 vla_restore(cur_scope->vla.loc);
7290 if (tok != '}') {
7291 if (0 == (flags & STMT_COMPOUND))
7292 goto again;
7293 /* C23: insert implicit null-statement whithin compound statement */
7294 } else {
7295 /* we accept this, but it is a mistake */
7296 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7298 } else {
7299 /* expression case */
7300 if (t != ';') {
7301 unget_tok(t);
7302 expr:
7303 if (flags & STMT_EXPR) {
7304 vpop();
7305 gexpr();
7306 } else {
7307 gexpr();
7308 vpop();
7310 skip(';');
7315 if (debug_modes)
7316 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7319 /* This skips over a stream of tokens containing balanced {} and ()
7320 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7321 with a '{'). If STR then allocates and stores the skipped tokens
7322 in *STR. This doesn't check if () and {} are nested correctly,
7323 i.e. "({)}" is accepted. */
7324 static void skip_or_save_block(TokenString **str)
7326 int braces = tok == '{';
7327 int level = 0;
7328 if (str)
7329 *str = tok_str_alloc();
7331 while (1) {
7332 int t = tok;
7333 if (level == 0
7334 && (t == ','
7335 || t == ';'
7336 || t == '}'
7337 || t == ')'
7338 || t == ']'))
7339 break;
7340 if (t == TOK_EOF) {
7341 if (str || level > 0)
7342 tcc_error("unexpected end of file");
7343 else
7344 break;
7346 if (str)
7347 tok_str_add_tok(*str);
7348 next();
7349 if (t == '{' || t == '(' || t == '[') {
7350 level++;
7351 } else if (t == '}' || t == ')' || t == ']') {
7352 level--;
7353 if (level == 0 && braces && t == '}')
7354 break;
7357 if (str)
7358 tok_str_add(*str, TOK_EOF);
7361 #define EXPR_CONST 1
7362 #define EXPR_ANY 2
7364 static void parse_init_elem(int expr_type)
7366 int saved_global_expr;
7367 switch(expr_type) {
7368 case EXPR_CONST:
7369 /* compound literals must be allocated globally in this case */
7370 saved_global_expr = global_expr;
7371 global_expr = 1;
7372 expr_const1();
7373 global_expr = saved_global_expr;
7374 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7375 (compound literals). */
7376 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7377 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7378 || vtop->sym->v < SYM_FIRST_ANOM))
7379 #ifdef TCC_TARGET_PE
7380 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7381 #endif
7383 tcc_error("initializer element is not constant");
7384 break;
7385 case EXPR_ANY:
7386 expr_eq();
7387 break;
7391 #if 1
7392 static void init_assert(init_params *p, int offset)
7394 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7395 : !nocode_wanted && offset > p->local_offset)
7396 tcc_internal_error("initializer overflow");
7398 #else
7399 #define init_assert(sec, offset)
7400 #endif
7402 /* put zeros for variable based init */
7403 static void init_putz(init_params *p, unsigned long c, int size)
7405 init_assert(p, c + size);
7406 if (p->sec) {
7407 /* nothing to do because globals are already set to zero */
7408 } else {
7409 vpush_helper_func(TOK_memset);
7410 vseti(VT_LOCAL, c);
7411 vpushi(0);
7412 vpushs(size);
7413 #if defined TCC_TARGET_ARM && defined TCC_ARM_EABI
7414 vswap(); /* using __aeabi_memset(void*, size_t, int) */
7415 #endif
7416 gfunc_call(3);
7420 #define DIF_FIRST 1
7421 #define DIF_SIZE_ONLY 2
7422 #define DIF_HAVE_ELEM 4
7423 #define DIF_CLEAR 8
7425 /* delete relocations for specified range c ... c + size. Unfortunatly
7426 in very special cases, relocations may occur unordered */
7427 static void decl_design_delrels(Section *sec, int c, int size)
7429 ElfW_Rel *rel, *rel2, *rel_end;
7430 if (!sec || !sec->reloc)
7431 return;
7432 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7433 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7434 while (rel < rel_end) {
7435 if (rel->r_offset >= c && rel->r_offset < c + size) {
7436 sec->reloc->data_offset -= sizeof *rel;
7437 } else {
7438 if (rel2 != rel)
7439 memcpy(rel2, rel, sizeof *rel);
7440 ++rel2;
7442 ++rel;
7446 static void decl_design_flex(init_params *p, Sym *ref, int index)
7448 if (ref == p->flex_array_ref) {
7449 if (index >= ref->c)
7450 ref->c = index + 1;
7451 } else if (ref->c < 0)
7452 tcc_error("flexible array has zero size in this context");
7455 /* t is the array or struct type. c is the array or struct
7456 address. cur_field is the pointer to the current
7457 field, for arrays the 'c' member contains the current start
7458 index. 'flags' is as in decl_initializer.
7459 'al' contains the already initialized length of the
7460 current container (starting at c). This returns the new length of that. */
7461 static int decl_designator(init_params *p, CType *type, unsigned long c,
7462 Sym **cur_field, int flags, int al)
7464 Sym *s, *f;
7465 int index, index_last, align, l, nb_elems, elem_size;
7466 unsigned long corig = c;
7468 elem_size = 0;
7469 nb_elems = 1;
7471 if (flags & DIF_HAVE_ELEM)
7472 goto no_designator;
7474 if (gnu_ext && tok >= TOK_UIDENT) {
7475 l = tok, next();
7476 if (tok == ':')
7477 goto struct_field;
7478 unget_tok(l);
7481 /* NOTE: we only support ranges for last designator */
7482 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7483 if (tok == '[') {
7484 if (!(type->t & VT_ARRAY))
7485 expect("array type");
7486 next();
7487 index = index_last = expr_const();
7488 if (tok == TOK_DOTS && gnu_ext) {
7489 next();
7490 index_last = expr_const();
7492 skip(']');
7493 s = type->ref;
7494 decl_design_flex(p, s, index_last);
7495 if (index < 0 || index_last >= s->c || index_last < index)
7496 tcc_error("index exceeds array bounds or range is empty");
7497 if (cur_field)
7498 (*cur_field)->c = index_last;
7499 type = pointed_type(type);
7500 elem_size = type_size(type, &align);
7501 c += index * elem_size;
7502 nb_elems = index_last - index + 1;
7503 } else {
7504 int cumofs;
7505 next();
7506 l = tok;
7507 struct_field:
7508 next();
7509 f = find_field(type, l, &cumofs);
7510 if (cur_field)
7511 *cur_field = f;
7512 type = &f->type;
7513 c += cumofs;
7515 cur_field = NULL;
7517 if (!cur_field) {
7518 if (tok == '=') {
7519 next();
7520 } else if (!gnu_ext) {
7521 expect("=");
7523 } else {
7524 no_designator:
7525 if (type->t & VT_ARRAY) {
7526 index = (*cur_field)->c;
7527 s = type->ref;
7528 decl_design_flex(p, s, index);
7529 if (index >= s->c)
7530 tcc_error("too many initializers");
7531 type = pointed_type(type);
7532 elem_size = type_size(type, &align);
7533 c += index * elem_size;
7534 } else {
7535 f = *cur_field;
7536 /* Skip bitfield padding. Also with size 32 and 64. */
7537 while (f && (f->v & SYM_FIRST_ANOM) &&
7538 is_integer_btype(f->type.t & VT_BTYPE))
7539 *cur_field = f = f->next;
7540 if (!f)
7541 tcc_error("too many initializers");
7542 type = &f->type;
7543 c += f->c;
7547 if (!elem_size) /* for structs */
7548 elem_size = type_size(type, &align);
7550 /* Using designators the same element can be initialized more
7551 than once. In that case we need to delete possibly already
7552 existing relocations. */
7553 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7554 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7555 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7558 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7560 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7561 Sym aref = {0};
7562 CType t1;
7563 int i;
7564 if (p->sec || (type->t & VT_ARRAY)) {
7565 /* make init_putv/vstore believe it were a struct */
7566 aref.c = elem_size;
7567 t1.t = VT_STRUCT, t1.ref = &aref;
7568 type = &t1;
7570 if (p->sec)
7571 vpush_ref(type, p->sec, c, elem_size);
7572 else
7573 vset(type, VT_LOCAL|VT_LVAL, c);
7574 for (i = 1; i < nb_elems; i++) {
7575 vdup();
7576 init_putv(p, type, c + elem_size * i);
7578 vpop();
7581 c += nb_elems * elem_size;
7582 if (c - corig > al)
7583 al = c - corig;
7584 return al;
7587 /* store a value or an expression directly in global data or in local array */
7588 static void init_putv(init_params *p, CType *type, unsigned long c)
7590 int bt;
7591 void *ptr;
7592 CType dtype;
7593 int size, align;
7594 Section *sec = p->sec;
7595 uint64_t val;
7597 dtype = *type;
7598 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7600 size = type_size(type, &align);
7601 if (type->t & VT_BITFIELD)
7602 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7603 init_assert(p, c + size);
7605 if (sec) {
7606 /* XXX: not portable */
7607 /* XXX: generate error if incorrect relocation */
7608 gen_assign_cast(&dtype);
7609 bt = type->t & VT_BTYPE;
7611 if ((vtop->r & VT_SYM)
7612 && bt != VT_PTR
7613 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7614 || (type->t & VT_BITFIELD))
7615 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7617 tcc_error("initializer element is not computable at load time");
7619 if (NODATA_WANTED) {
7620 vtop--;
7621 return;
7624 ptr = sec->data + c;
7625 val = vtop->c.i;
7627 /* XXX: make code faster ? */
7628 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7629 vtop->sym->v >= SYM_FIRST_ANOM &&
7630 /* XXX This rejects compound literals like
7631 '(void *){ptr}'. The problem is that '&sym' is
7632 represented the same way, which would be ruled out
7633 by the SYM_FIRST_ANOM check above, but also '"string"'
7634 in 'char *p = "string"' is represented the same
7635 with the type being VT_PTR and the symbol being an
7636 anonymous one. That is, there's no difference in vtop
7637 between '(void *){x}' and '&(void *){x}'. Ignore
7638 pointer typed entities here. Hopefully no real code
7639 will ever use compound literals with scalar type. */
7640 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7641 /* These come from compound literals, memcpy stuff over. */
7642 Section *ssec;
7643 ElfSym *esym;
7644 ElfW_Rel *rel;
7645 esym = elfsym(vtop->sym);
7646 ssec = tcc_state->sections[esym->st_shndx];
7647 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7648 if (ssec->reloc) {
7649 /* We need to copy over all memory contents, and that
7650 includes relocations. Use the fact that relocs are
7651 created it order, so look from the end of relocs
7652 until we hit one before the copied region. */
7653 unsigned long relofs = ssec->reloc->data_offset;
7654 while (relofs >= sizeof(*rel)) {
7655 relofs -= sizeof(*rel);
7656 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7657 if (rel->r_offset >= esym->st_value + size)
7658 continue;
7659 if (rel->r_offset < esym->st_value)
7660 break;
7661 put_elf_reloca(symtab_section, sec,
7662 c + rel->r_offset - esym->st_value,
7663 ELFW(R_TYPE)(rel->r_info),
7664 ELFW(R_SYM)(rel->r_info),
7665 #if PTR_SIZE == 8
7666 rel->r_addend
7667 #else
7669 #endif
7673 } else {
7674 if (type->t & VT_BITFIELD) {
7675 int bit_pos, bit_size, bits, n;
7676 unsigned char *p, v, m;
7677 bit_pos = BIT_POS(vtop->type.t);
7678 bit_size = BIT_SIZE(vtop->type.t);
7679 p = (unsigned char*)ptr + (bit_pos >> 3);
7680 bit_pos &= 7, bits = 0;
7681 while (bit_size) {
7682 n = 8 - bit_pos;
7683 if (n > bit_size)
7684 n = bit_size;
7685 v = val >> bits << bit_pos;
7686 m = ((1 << n) - 1) << bit_pos;
7687 *p = (*p & ~m) | (v & m);
7688 bits += n, bit_size -= n, bit_pos = 0, ++p;
7690 } else
7691 switch(bt) {
7692 case VT_BOOL:
7693 *(char *)ptr = val != 0;
7694 break;
7695 case VT_BYTE:
7696 *(char *)ptr = val;
7697 break;
7698 case VT_SHORT:
7699 write16le(ptr, val);
7700 break;
7701 case VT_FLOAT:
7702 write32le(ptr, val);
7703 break;
7704 case VT_DOUBLE:
7705 write64le(ptr, val);
7706 break;
7707 case VT_LDOUBLE:
7708 #if defined TCC_IS_NATIVE_387
7709 /* Host and target platform may be different but both have x87.
7710 On windows, tcc does not use VT_LDOUBLE, except when it is a
7711 cross compiler. In this case a mingw gcc as host compiler
7712 comes here with 10-byte long doubles, while msvc or tcc won't.
7713 tcc itself can still translate by asm.
7714 In any case we avoid possibly random bytes 11 and 12.
7716 if (sizeof (long double) >= 10)
7717 memcpy(ptr, &vtop->c.ld, 10);
7718 #ifdef __TINYC__
7719 else if (sizeof (long double) == sizeof (double))
7720 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7721 #endif
7722 else
7723 #endif
7724 /* For other platforms it should work natively, but may not work
7725 for cross compilers */
7726 if (sizeof(long double) == LDOUBLE_SIZE)
7727 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7728 else if (sizeof(double) == LDOUBLE_SIZE)
7729 *(double*)ptr = (double)vtop->c.ld;
7730 else if (0 == memcmp(ptr, &vtop->c.ld, LDOUBLE_SIZE))
7731 ; /* nothing to do for 0.0 */
7732 #ifndef TCC_CROSS_TEST
7733 else
7734 tcc_error("can't cross compile long double constants");
7735 #endif
7736 break;
7738 #if PTR_SIZE == 8
7739 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7740 case VT_LLONG:
7741 case VT_PTR:
7742 if (vtop->r & VT_SYM)
7743 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7744 else
7745 write64le(ptr, val);
7746 break;
7747 case VT_INT:
7748 write32le(ptr, val);
7749 break;
7750 #else
7751 case VT_LLONG:
7752 write64le(ptr, val);
7753 break;
7754 case VT_PTR:
7755 case VT_INT:
7756 if (vtop->r & VT_SYM)
7757 greloc(sec, vtop->sym, c, R_DATA_PTR);
7758 write32le(ptr, val);
7759 break;
7760 #endif
7761 default:
7762 //tcc_internal_error("unexpected type");
7763 break;
7766 vtop--;
7767 } else {
7768 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7769 vswap();
7770 vstore();
7771 vpop();
7775 /* 't' contains the type and storage info. 'c' is the offset of the
7776 object in section 'sec'. If 'sec' is NULL, it means stack based
7777 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7778 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7779 size only evaluation is wanted (only for arrays). */
7780 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7782 int len, n, no_oblock, i;
7783 int size1, align1;
7784 Sym *s, *f;
7785 Sym indexsym;
7786 CType *t1;
7788 /* generate line number info */
7789 if (debug_modes && !(flags & DIF_SIZE_ONLY) && !p->sec)
7790 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7792 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7793 /* In case of strings we have special handling for arrays, so
7794 don't consume them as initializer value (which would commit them
7795 to some anonymous symbol). */
7796 tok != TOK_LSTR && tok != TOK_STR &&
7797 (!(flags & DIF_SIZE_ONLY)
7798 /* a struct may be initialized from a struct of same type, as in
7799 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7800 In that case we need to parse the element in order to check
7801 it for compatibility below */
7802 || (type->t & VT_BTYPE) == VT_STRUCT)
7804 int ncw_prev = nocode_wanted;
7805 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7806 ++nocode_wanted;
7807 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7808 nocode_wanted = ncw_prev;
7809 flags |= DIF_HAVE_ELEM;
7812 if (type->t & VT_ARRAY) {
7813 no_oblock = 1;
7814 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7815 tok == '{') {
7816 skip('{');
7817 no_oblock = 0;
7820 s = type->ref;
7821 n = s->c;
7822 t1 = pointed_type(type);
7823 size1 = type_size(t1, &align1);
7825 /* only parse strings here if correct type (otherwise: handle
7826 them as ((w)char *) expressions */
7827 if ((tok == TOK_LSTR &&
7828 #ifdef TCC_TARGET_PE
7829 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7830 #else
7831 (t1->t & VT_BTYPE) == VT_INT
7832 #endif
7833 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7834 len = 0;
7835 cstr_reset(&initstr);
7836 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7837 tcc_error("unhandled string literal merging");
7838 while (tok == TOK_STR || tok == TOK_LSTR) {
7839 if (initstr.size)
7840 initstr.size -= size1;
7841 if (tok == TOK_STR)
7842 len += tokc.str.size;
7843 else
7844 len += tokc.str.size / sizeof(nwchar_t);
7845 len--;
7846 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7847 next();
7849 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7850 && tok != TOK_EOF) {
7851 /* Not a lone literal but part of a bigger expression. */
7852 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7853 tokc.str.size = initstr.size;
7854 tokc.str.data = initstr.data;
7855 goto do_init_array;
7858 decl_design_flex(p, s, len);
7859 if (!(flags & DIF_SIZE_ONLY)) {
7860 int nb = n, ch;
7861 if (len < nb)
7862 nb = len;
7863 if (len > nb)
7864 tcc_warning("initializer-string for array is too long");
7865 /* in order to go faster for common case (char
7866 string in global variable, we handle it
7867 specifically */
7868 if (p->sec && size1 == 1) {
7869 init_assert(p, c + nb);
7870 if (!NODATA_WANTED)
7871 memcpy(p->sec->data + c, initstr.data, nb);
7872 } else {
7873 for(i=0;i<n;i++) {
7874 if (i >= nb) {
7875 /* only add trailing zero if enough storage (no
7876 warning in this case since it is standard) */
7877 if (flags & DIF_CLEAR)
7878 break;
7879 if (n - i >= 4) {
7880 init_putz(p, c + i * size1, (n - i) * size1);
7881 break;
7883 ch = 0;
7884 } else if (size1 == 1)
7885 ch = ((unsigned char *)initstr.data)[i];
7886 else
7887 ch = ((nwchar_t *)initstr.data)[i];
7888 vpushi(ch);
7889 init_putv(p, t1, c + i * size1);
7893 } else {
7895 do_init_array:
7896 indexsym.c = 0;
7897 f = &indexsym;
7899 do_init_list:
7900 /* zero memory once in advance */
7901 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7902 init_putz(p, c, n*size1);
7903 flags |= DIF_CLEAR;
7906 len = 0;
7907 /* GNU extension: if the initializer is empty for a flex array,
7908 it's size is zero. We won't enter the loop, so set the size
7909 now. */
7910 decl_design_flex(p, s, len);
7911 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7912 len = decl_designator(p, type, c, &f, flags, len);
7913 flags &= ~DIF_HAVE_ELEM;
7914 if (type->t & VT_ARRAY) {
7915 ++indexsym.c;
7916 /* special test for multi dimensional arrays (may not
7917 be strictly correct if designators are used at the
7918 same time) */
7919 if (no_oblock && len >= n*size1)
7920 break;
7921 } else {
7922 if (s->type.t == VT_UNION)
7923 f = NULL;
7924 else
7925 f = f->next;
7926 if (no_oblock && f == NULL)
7927 break;
7930 if (tok == '}')
7931 break;
7932 skip(',');
7935 if (!no_oblock)
7936 skip('}');
7938 } else if ((flags & DIF_HAVE_ELEM)
7939 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7940 The source type might have VT_CONSTANT set, which is
7941 of course assignable to non-const elements. */
7942 && is_compatible_unqualified_types(type, &vtop->type)) {
7943 goto one_elem;
7945 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7946 no_oblock = 1;
7947 if ((flags & DIF_FIRST) || tok == '{') {
7948 skip('{');
7949 no_oblock = 0;
7951 s = type->ref;
7952 f = s->next;
7953 n = s->c;
7954 size1 = 1;
7955 goto do_init_list;
7957 } else if (tok == '{') {
7958 if (flags & DIF_HAVE_ELEM)
7959 skip(';');
7960 next();
7961 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7962 skip('}');
7964 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7965 /* If we supported only ISO C we wouldn't have to accept calling
7966 this on anything than an array if DIF_SIZE_ONLY (and even then
7967 only on the outermost level, so no recursion would be needed),
7968 because initializing a flex array member isn't supported.
7969 But GNU C supports it, so we need to recurse even into
7970 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7971 /* just skip expression */
7972 if (flags & DIF_HAVE_ELEM)
7973 vpop();
7974 else
7975 skip_or_save_block(NULL);
7977 } else {
7978 if (!(flags & DIF_HAVE_ELEM)) {
7979 /* This should happen only when we haven't parsed
7980 the init element above for fear of committing a
7981 string constant to memory too early. */
7982 if (tok != TOK_STR && tok != TOK_LSTR)
7983 expect("string constant");
7984 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7986 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7987 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7988 && vtop->c.i == 0
7989 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7991 vpop();
7992 else
7993 init_putv(p, type, c);
7997 /* parse an initializer for type 't' if 'has_init' is non zero, and
7998 allocate space in local or global data space ('r' is either
7999 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8000 variable 'v' of scope 'scope' is declared before initializers
8001 are parsed. If 'v' is zero, then a reference to the new object
8002 is put in the value stack. If 'has_init' is 2, a special parsing
8003 is done to handle string constants. */
8004 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
8005 int has_init, int v, int global)
8007 int size, align, addr;
8008 TokenString *init_str = NULL;
8010 Section *sec;
8011 Sym *flexible_array;
8012 Sym *sym;
8013 int saved_nocode_wanted = nocode_wanted;
8014 #ifdef CONFIG_TCC_BCHECK
8015 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8016 #endif
8017 init_params p = {0};
8019 /* Always allocate static or global variables */
8020 if (v && (r & VT_VALMASK) == VT_CONST)
8021 nocode_wanted |= DATA_ONLY_WANTED;
8023 flexible_array = NULL;
8024 size = type_size(type, &align);
8026 /* exactly one flexible array may be initialized, either the
8027 toplevel array or the last member of the toplevel struct */
8029 if (size < 0) {
8030 // error out except for top-level incomplete arrays
8031 // (arrays of incomplete types are handled in array parsing)
8032 if (!(type->t & VT_ARRAY))
8033 tcc_error("initialization of incomplete type");
8035 /* If the base type itself was an array type of unspecified size
8036 (like in 'typedef int arr[]; arr x = {1};') then we will
8037 overwrite the unknown size by the real one for this decl.
8038 We need to unshare the ref symbol holding that size. */
8039 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8040 p.flex_array_ref = type->ref;
8042 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8043 Sym *field = type->ref->next;
8044 if (field) {
8045 while (field->next)
8046 field = field->next;
8047 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8048 flexible_array = field;
8049 p.flex_array_ref = field->type.ref;
8050 size = -1;
8055 if (size < 0) {
8056 /* If unknown size, do a dry-run 1st pass */
8057 if (!has_init)
8058 tcc_error("unknown type size");
8059 if (has_init == 2) {
8060 /* only get strings */
8061 init_str = tok_str_alloc();
8062 while (tok == TOK_STR || tok == TOK_LSTR) {
8063 tok_str_add_tok(init_str);
8064 next();
8066 tok_str_add(init_str, TOK_EOF);
8067 } else
8068 skip_or_save_block(&init_str);
8069 unget_tok(0);
8071 /* compute size */
8072 begin_macro(init_str, 1);
8073 next();
8074 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8075 /* prepare second initializer parsing */
8076 macro_ptr = init_str->str;
8077 next();
8079 /* if still unknown size, error */
8080 size = type_size(type, &align);
8081 if (size < 0)
8082 tcc_error("unknown type size");
8084 /* If there's a flex member and it was used in the initializer
8085 adjust size. */
8086 if (flexible_array && flexible_array->type.ref->c > 0)
8087 size += flexible_array->type.ref->c
8088 * pointed_size(&flexible_array->type);
8091 /* take into account specified alignment if bigger */
8092 if (ad->a.aligned) {
8093 int speca = 1 << (ad->a.aligned - 1);
8094 if (speca > align)
8095 align = speca;
8096 } else if (ad->a.packed) {
8097 align = 1;
8100 if (!v && NODATA_WANTED)
8101 size = 0, align = 1;
8103 if ((r & VT_VALMASK) == VT_LOCAL) {
8104 sec = NULL;
8105 #ifdef CONFIG_TCC_BCHECK
8106 if (bcheck && v) {
8107 /* add padding between stack variables for bound checking */
8108 loc -= align;
8110 #endif
8111 loc = (loc - size) & -align;
8112 addr = loc;
8113 p.local_offset = addr + size;
8114 #ifdef CONFIG_TCC_BCHECK
8115 if (bcheck && v) {
8116 /* add padding between stack variables for bound checking */
8117 loc -= align;
8119 #endif
8120 if (v) {
8121 /* local variable */
8122 #ifdef CONFIG_TCC_ASM
8123 if (ad->asm_label) {
8124 int reg = asm_parse_regvar(ad->asm_label);
8125 if (reg >= 0)
8126 r = (r & ~VT_VALMASK) | reg;
8128 #endif
8129 sym = sym_push(v, type, r, addr);
8130 if (ad->cleanup_func) {
8131 Sym *cls = sym_push2(&all_cleanups,
8132 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8133 cls->prev_tok = sym;
8134 cls->next = ad->cleanup_func;
8135 cls->ncl = cur_scope->cl.s;
8136 cur_scope->cl.s = cls;
8139 sym->a = ad->a;
8140 } else {
8141 /* push local reference */
8142 vset(type, r, addr);
8144 } else {
8145 sym = NULL;
8146 if (v && global) {
8147 /* see if the symbol was already defined */
8148 sym = sym_find(v);
8149 if (sym) {
8150 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8151 && sym->type.ref->c > type->ref->c) {
8152 /* flex array was already declared with explicit size
8153 extern int arr[10];
8154 int arr[] = { 1,2,3 }; */
8155 type->ref->c = sym->type.ref->c;
8156 size = type_size(type, &align);
8158 patch_storage(sym, ad, type);
8159 /* we accept several definitions of the same global variable. */
8160 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8161 goto no_alloc;
8165 /* allocate symbol in corresponding section */
8166 sec = ad->section;
8167 if (!sec) {
8168 CType *tp = type;
8169 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8170 tp = &tp->ref->type;
8171 if (tp->t & VT_CONSTANT) {
8172 sec = rodata_section;
8173 } else if (has_init) {
8174 sec = data_section;
8175 /*if (tcc_state->g_debug & 4)
8176 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8177 } else if (tcc_state->nocommon)
8178 sec = bss_section;
8181 if (sec) {
8182 addr = section_add(sec, size, align);
8183 #ifdef CONFIG_TCC_BCHECK
8184 /* add padding if bound check */
8185 if (bcheck)
8186 section_add(sec, 1, 1);
8187 #endif
8188 } else {
8189 addr = align; /* SHN_COMMON is special, symbol value is align */
8190 sec = common_section;
8193 if (v) {
8194 if (!sym) {
8195 sym = sym_push(v, type, r | VT_SYM, 0);
8196 patch_storage(sym, ad, NULL);
8198 /* update symbol definition */
8199 put_extern_sym(sym, sec, addr, size);
8200 } else {
8201 /* push global reference */
8202 vpush_ref(type, sec, addr, size);
8203 sym = vtop->sym;
8204 vtop->r |= r;
8207 #ifdef CONFIG_TCC_BCHECK
8208 /* handles bounds now because the symbol must be defined
8209 before for the relocation */
8210 if (bcheck) {
8211 addr_t *bounds_ptr;
8213 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8214 /* then add global bound info */
8215 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8216 bounds_ptr[0] = 0; /* relocated */
8217 bounds_ptr[1] = size;
8219 #endif
8222 if (type->t & VT_VLA) {
8223 int a;
8225 if (NODATA_WANTED)
8226 goto no_alloc;
8228 /* save before-VLA stack pointer if needed */
8229 if (cur_scope->vla.num == 0) {
8230 if (cur_scope->prev && cur_scope->prev->vla.num) {
8231 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8232 } else {
8233 gen_vla_sp_save(loc -= PTR_SIZE);
8234 cur_scope->vla.locorig = loc;
8238 vpush_type_size(type, &a);
8239 gen_vla_alloc(type, a);
8240 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8241 /* on _WIN64, because of the function args scratch area, the
8242 result of alloca differs from RSP and is returned in RAX. */
8243 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8244 #endif
8245 gen_vla_sp_save(addr);
8246 cur_scope->vla.loc = addr;
8247 cur_scope->vla.num++;
8248 } else if (has_init) {
8249 p.sec = sec;
8250 decl_initializer(&p, type, addr, DIF_FIRST);
8251 /* patch flexible array member size back to -1, */
8252 /* for possible subsequent similar declarations */
8253 if (flexible_array)
8254 flexible_array->type.ref->c = -1;
8257 no_alloc:
8258 /* restore parse state if needed */
8259 if (init_str) {
8260 end_macro();
8261 next();
8264 nocode_wanted = saved_nocode_wanted;
8267 /* generate vla code saved in post_type() */
8268 static void func_vla_arg_code(Sym *arg)
8270 int align;
8271 TokenString *vla_array_tok = NULL;
8273 if (arg->type.ref)
8274 func_vla_arg_code(arg->type.ref);
8276 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8277 loc -= type_size(&int_type, &align);
8278 loc &= -align;
8279 arg->type.ref->c = loc;
8281 unget_tok(0);
8282 vla_array_tok = tok_str_alloc();
8283 vla_array_tok->str = arg->type.ref->vla_array_str;
8284 begin_macro(vla_array_tok, 1);
8285 next();
8286 gexpr();
8287 end_macro();
8288 next();
8289 vpush_type_size(&arg->type.ref->type, &align);
8290 gen_op('*');
8291 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8292 vswap();
8293 vstore();
8294 vpop();
8298 static void func_vla_arg(Sym *sym)
8300 Sym *arg;
8302 for (arg = sym->type.ref->next; arg; arg = arg->next)
8303 if ((arg->type.t & VT_BTYPE) == VT_PTR && (arg->type.ref->type.t & VT_VLA))
8304 func_vla_arg_code(arg->type.ref);
8307 /* parse a function defined by symbol 'sym' and generate its code in
8308 'cur_text_section' */
8309 static void gen_function(Sym *sym)
8311 struct scope f = { 0 };
8312 cur_scope = root_scope = &f;
8313 nocode_wanted = 0;
8315 ind = cur_text_section->data_offset;
8316 if (sym->a.aligned) {
8317 size_t newoff = section_add(cur_text_section, 0,
8318 1 << (sym->a.aligned - 1));
8319 gen_fill_nops(newoff - ind);
8322 funcname = get_tok_str(sym->v, NULL);
8323 func_ind = ind;
8324 func_vt = sym->type.ref->type;
8325 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8327 /* NOTE: we patch the symbol size later */
8328 put_extern_sym(sym, cur_text_section, ind, 0);
8330 if (sym->type.ref->f.func_ctor)
8331 add_array (tcc_state, ".init_array", sym->c);
8332 if (sym->type.ref->f.func_dtor)
8333 add_array (tcc_state, ".fini_array", sym->c);
8335 /* put debug symbol */
8336 tcc_debug_funcstart(tcc_state, sym);
8338 /* push a dummy symbol to enable local sym storage */
8339 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8340 local_scope = 1; /* for function parameters */
8341 gfunc_prolog(sym);
8342 tcc_debug_prolog_epilog(tcc_state, 0);
8344 local_scope = 0;
8345 rsym = 0;
8346 clear_temp_local_var_list();
8347 func_vla_arg(sym);
8348 block(0);
8349 gsym(rsym);
8351 nocode_wanted = 0;
8352 /* reset local stack */
8353 pop_local_syms(NULL, 0);
8354 tcc_debug_prolog_epilog(tcc_state, 1);
8355 gfunc_epilog();
8357 /* end of function */
8358 tcc_debug_funcend(tcc_state, ind - func_ind);
8360 /* patch symbol size */
8361 elfsym(sym)->st_size = ind - func_ind;
8363 cur_text_section->data_offset = ind;
8364 local_scope = 0;
8365 label_pop(&global_label_stack, NULL, 0);
8366 sym_pop(&all_cleanups, NULL, 0);
8368 /* It's better to crash than to generate wrong code */
8369 cur_text_section = NULL;
8370 funcname = ""; /* for safety */
8371 func_vt.t = VT_VOID; /* for safety */
8372 func_var = 0; /* for safety */
8373 ind = 0; /* for safety */
8374 func_ind = -1;
8375 nocode_wanted = DATA_ONLY_WANTED;
8376 check_vstack();
8378 /* do this after funcend debug info */
8379 next();
8382 static void gen_inline_functions(TCCState *s)
8384 Sym *sym;
8385 int inline_generated, i;
8386 struct InlineFunc *fn;
8388 tcc_open_bf(s, ":inline:", 0);
8389 /* iterate while inline function are referenced */
8390 do {
8391 inline_generated = 0;
8392 for (i = 0; i < s->nb_inline_fns; ++i) {
8393 fn = s->inline_fns[i];
8394 sym = fn->sym;
8395 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8396 /* the function was used or forced (and then not internal):
8397 generate its code and convert it to a normal function */
8398 fn->sym = NULL;
8399 tccpp_putfile(fn->filename);
8400 begin_macro(fn->func_str, 1);
8401 next();
8402 cur_text_section = text_section;
8403 gen_function(sym);
8404 end_macro();
8406 inline_generated = 1;
8409 } while (inline_generated);
8410 tcc_close();
8413 static void free_inline_functions(TCCState *s)
8415 int i;
8416 /* free tokens of unused inline functions */
8417 for (i = 0; i < s->nb_inline_fns; ++i) {
8418 struct InlineFunc *fn = s->inline_fns[i];
8419 if (fn->sym)
8420 tok_str_free(fn->func_str);
8422 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8425 static void do_Static_assert(void)
8427 int c;
8428 const char *msg;
8430 next();
8431 skip('(');
8432 c = expr_const();
8433 msg = "_Static_assert fail";
8434 if (tok == ',') {
8435 next();
8436 msg = parse_mult_str("string constant")->data;
8438 skip(')');
8439 if (c == 0)
8440 tcc_error("%s", msg);
8441 skip(';');
8444 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8445 or VT_CMP if parsing old style parameter list
8446 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8447 static int decl(int l)
8449 int v, has_init, r, oldint;
8450 CType type, btype;
8451 Sym *sym;
8452 AttributeDef ad, adbase;
8454 while (1) {
8456 oldint = 0;
8457 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8458 if (l == VT_JMP)
8459 return 0;
8460 /* skip redundant ';' if not in old parameter decl scope */
8461 if (tok == ';' && l != VT_CMP) {
8462 next();
8463 continue;
8465 if (tok == TOK_STATIC_ASSERT) {
8466 do_Static_assert();
8467 continue;
8469 if (l != VT_CONST)
8470 break;
8471 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8472 /* global asm block */
8473 asm_global_instr();
8474 continue;
8476 if (tok >= TOK_UIDENT) {
8477 /* special test for old K&R protos without explicit int
8478 type. Only accepted when defining global data */
8479 btype.t = VT_INT;
8480 oldint = 1;
8481 } else {
8482 if (tok != TOK_EOF)
8483 expect("declaration");
8484 break;
8488 if (tok == ';') {
8489 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8490 v = btype.ref->v;
8491 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8492 tcc_warning("unnamed struct/union that defines no instances");
8493 next();
8494 continue;
8496 if (IS_ENUM(btype.t)) {
8497 next();
8498 continue;
8502 while (1) { /* iterate thru each declaration */
8503 type = btype;
8504 ad = adbase;
8505 type_decl(&type, &ad, &v, TYPE_DIRECT);
8506 #if 0
8508 char buf[500];
8509 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8510 printf("type = '%s'\n", buf);
8512 #endif
8513 if ((type.t & VT_BTYPE) == VT_FUNC) {
8514 if ((type.t & VT_STATIC) && (l != VT_CONST))
8515 tcc_error("function without file scope cannot be static");
8516 /* if old style function prototype, we accept a
8517 declaration list */
8518 sym = type.ref;
8519 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8520 func_vt = type;
8521 decl(VT_CMP);
8523 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8524 if (sym->f.func_alwinl
8525 && ((type.t & (VT_EXTERN | VT_INLINE))
8526 == (VT_EXTERN | VT_INLINE))) {
8527 /* always_inline functions must be handled as if they
8528 don't generate multiple global defs, even if extern
8529 inline, i.e. GNU inline semantics for those. Rewrite
8530 them into static inline. */
8531 type.t &= ~VT_EXTERN;
8532 type.t |= VT_STATIC;
8534 #endif
8535 /* always compile 'extern inline' */
8536 if (type.t & VT_EXTERN)
8537 type.t &= ~VT_INLINE;
8539 } else if (oldint) {
8540 tcc_warning("type defaults to int");
8543 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8544 ad.asm_label = asm_label_instr();
8545 /* parse one last attribute list, after asm label */
8546 parse_attribute(&ad);
8547 #if 0
8548 /* gcc does not allow __asm__("label") with function definition,
8549 but why not ... */
8550 if (tok == '{')
8551 expect(";");
8552 #endif
8555 #ifdef TCC_TARGET_PE
8556 if (ad.a.dllimport || ad.a.dllexport) {
8557 if (type.t & VT_STATIC)
8558 tcc_error("cannot have dll linkage with static");
8559 if (type.t & VT_TYPEDEF) {
8560 tcc_warning("'%s' attribute ignored for typedef",
8561 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8562 (ad.a.dllexport = 0, "dllexport"));
8563 } else if (ad.a.dllimport) {
8564 if ((type.t & VT_BTYPE) == VT_FUNC)
8565 ad.a.dllimport = 0;
8566 else
8567 type.t |= VT_EXTERN;
8570 #endif
8571 if (tok == '{') {
8572 if (l != VT_CONST)
8573 tcc_error("cannot use local functions");
8574 if ((type.t & VT_BTYPE) != VT_FUNC)
8575 expect("function definition");
8577 /* reject abstract declarators in function definition
8578 make old style params without decl have int type */
8579 sym = type.ref;
8580 while ((sym = sym->next) != NULL) {
8581 if (!(sym->v & ~SYM_FIELD))
8582 expect("identifier");
8583 if (sym->type.t == VT_VOID)
8584 sym->type = int_type;
8587 /* apply post-declaraton attributes */
8588 merge_funcattr(&type.ref->f, &ad.f);
8590 /* put function symbol */
8591 type.t &= ~VT_EXTERN;
8592 sym = external_sym(v, &type, 0, &ad);
8594 /* static inline functions are just recorded as a kind
8595 of macro. Their code will be emitted at the end of
8596 the compilation unit only if they are used */
8597 if (sym->type.t & VT_INLINE) {
8598 struct InlineFunc *fn;
8599 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8600 strcpy(fn->filename, file->filename);
8601 fn->sym = sym;
8602 dynarray_add(&tcc_state->inline_fns,
8603 &tcc_state->nb_inline_fns, fn);
8604 skip_or_save_block(&fn->func_str);
8605 } else {
8606 /* compute text section */
8607 cur_text_section = ad.section;
8608 if (!cur_text_section)
8609 cur_text_section = text_section;
8610 else if (cur_text_section->sh_num > bss_section->sh_num)
8611 cur_text_section->sh_flags = text_section->sh_flags;
8612 gen_function(sym);
8614 break;
8615 } else {
8616 if (l == VT_CMP) {
8617 /* find parameter in function parameter list */
8618 for (sym = func_vt.ref->next; sym; sym = sym->next)
8619 if ((sym->v & ~SYM_FIELD) == v)
8620 goto found;
8621 tcc_error("declaration for parameter '%s' but no such parameter",
8622 get_tok_str(v, NULL));
8623 found:
8624 if (type.t & VT_STORAGE) /* 'register' is okay */
8625 tcc_error("storage class specified for '%s'",
8626 get_tok_str(v, NULL));
8627 if (sym->type.t != VT_VOID)
8628 tcc_error("redefinition of parameter '%s'",
8629 get_tok_str(v, NULL));
8630 convert_parameter_type(&type);
8631 sym->type = type;
8632 } else if (type.t & VT_TYPEDEF) {
8633 /* save typedefed type */
8634 /* XXX: test storage specifiers ? */
8635 sym = sym_find(v);
8636 if (sym && sym->sym_scope == local_scope) {
8637 if (!is_compatible_types(&sym->type, &type)
8638 || !(sym->type.t & VT_TYPEDEF))
8639 tcc_error("incompatible redefinition of '%s'",
8640 get_tok_str(v, NULL));
8641 sym->type = type;
8642 } else {
8643 sym = sym_push(v, &type, 0, 0);
8645 sym->a = ad.a;
8646 if ((type.t & VT_BTYPE) == VT_FUNC)
8647 merge_funcattr(&sym->type.ref->f, &ad.f);
8648 if (debug_modes)
8649 tcc_debug_typedef (tcc_state, sym);
8650 } else if ((type.t & VT_BTYPE) == VT_VOID
8651 && !(type.t & VT_EXTERN)) {
8652 tcc_error("declaration of void object");
8653 } else {
8654 r = 0;
8655 if ((type.t & VT_BTYPE) == VT_FUNC) {
8656 /* external function definition */
8657 /* specific case for func_call attribute */
8658 merge_funcattr(&type.ref->f, &ad.f);
8659 } else if (!(type.t & VT_ARRAY)) {
8660 /* not lvalue if array */
8661 r |= VT_LVAL;
8663 has_init = (tok == '=');
8664 if (has_init && (type.t & VT_VLA))
8665 tcc_error("variable length array cannot be initialized");
8667 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8668 || (type.t & VT_BTYPE) == VT_FUNC
8669 /* as with GCC, uninitialized global arrays with no size
8670 are considered extern: */
8671 || ((type.t & VT_ARRAY) && !has_init
8672 && l == VT_CONST && type.ref->c < 0)
8674 /* external variable or function */
8675 type.t |= VT_EXTERN;
8676 sym = external_sym(v, &type, r, &ad);
8677 } else {
8678 if (l == VT_CONST || (type.t & VT_STATIC))
8679 r |= VT_CONST;
8680 else
8681 r |= VT_LOCAL;
8682 if (has_init)
8683 next();
8684 else if (l == VT_CONST)
8685 /* uninitialized global variables may be overridden */
8686 type.t |= VT_EXTERN;
8687 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8690 if (ad.alias_target && l == VT_CONST) {
8691 /* Aliases need to be emitted when their target symbol
8692 is emitted, even if perhaps unreferenced.
8693 We only support the case where the base is already
8694 defined, otherwise we would need deferring to emit
8695 the aliases until the end of the compile unit. */
8696 Sym *alias_target = sym_find(ad.alias_target);
8697 ElfSym *esym = elfsym(alias_target);
8698 if (!esym)
8699 tcc_error("unsupported forward __alias__ attribute");
8700 put_extern_sym2(sym_find(v), esym->st_shndx,
8701 esym->st_value, esym->st_size, 1);
8704 if (tok != ',') {
8705 if (l == VT_JMP)
8706 return 1;
8707 skip(';');
8708 break;
8710 next();
8714 return 0;
8717 /* ------------------------------------------------------------------------- */
8718 #undef gjmp_addr
8719 #undef gjmp
8720 /* ------------------------------------------------------------------------- */