2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
46 ST_DATA
char debug_modes
;
49 static SValue _vstack
[1 + VSTACK_SIZE
];
50 #define vstack (_vstack + 1)
52 ST_DATA
int nocode_wanted
; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
56 /* no code output after unconditional jumps such as with if (0) ... */
57 #define CODE_OFF_BIT 0x20000000
58 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
59 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
61 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
62 #define NOEVAL_MASK 0x0000FFFF
63 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
65 /* no code output when parsing constant expressions */
66 #define CONST_WANTED_BIT 0x00010000
67 #define CONST_WANTED_MASK 0x0FFF0000
68 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
70 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
71 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
72 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
75 ST_DATA
const char *funcname
;
76 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
77 static CString initstr
;
80 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
81 #define VT_PTRDIFF_T VT_INT
83 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
84 #define VT_PTRDIFF_T VT_LLONG
86 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
87 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
90 static struct switch_t
{
94 } **p
; int n
; /* list of case ranges */
95 int def_sym
; /* default symbol */
99 struct switch_t
*prev
;
101 } *cur_switch
; /* current switch */
103 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
104 /*list of temporary local variables on the stack in current function. */
105 static struct temp_local_variable
{
106 int location
; //offset on stack. Svalue.c.i
109 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
110 static int nb_temp_local_vars
;
112 static struct scope
{
114 struct { int loc
, locorig
, num
; } vla
;
115 struct { Sym
*s
; int n
; } cl
;
118 } *cur_scope
, *loop_scope
, *root_scope
;
127 #define precedence_parser
128 static void init_prec(void);
131 static void block(int flags
);
133 #define STMT_COMPOUND 2
135 static void gen_cast(CType
*type
);
136 static void gen_cast_s(int t
);
137 static inline CType
*pointed_type(CType
*type
);
138 static int is_compatible_types(CType
*type1
, CType
*type2
);
139 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
140 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
141 static void parse_expr_type(CType
*type
);
142 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
143 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
144 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
145 static int decl(int l
);
146 static void expr_eq(void);
147 static void vpush_type_size(CType
*type
, int *a
);
148 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
149 static inline int64_t expr_const64(void);
150 static void vpush64(int ty
, unsigned long long v
);
151 static void vpush(CType
*type
);
152 static int gvtst(int inv
, int t
);
153 static void gen_inline_functions(TCCState
*s
);
154 static void free_inline_functions(TCCState
*s
);
155 static void skip_or_save_block(TokenString
**str
);
156 static void gv_dup(void);
157 static int get_temp_local_var(int size
,int align
);
158 static void clear_temp_local_var_list();
159 static void cast_error(CType
*st
, CType
*dt
);
160 static void end_switch(void);
162 /* ------------------------------------------------------------------------- */
163 /* Automagical code suppression */
165 /* Clear 'nocode_wanted' at forward label if it was used */
166 ST_FUNC
void gsym(int t
)
174 /* Clear 'nocode_wanted' if current pc is a label */
180 tcc_tcov_block_begin(tcc_state
);
184 /* Set 'nocode_wanted' after unconditional (backwards) jump */
185 static void gjmp_addr_acs(int t
)
191 /* Set 'nocode_wanted' after unconditional (forwards) jump */
192 static int gjmp_acs(int t
)
199 /* These are #undef'd at the end of this file */
200 #define gjmp_addr gjmp_addr_acs
201 #define gjmp gjmp_acs
202 /* ------------------------------------------------------------------------- */
204 ST_INLN
int is_float(int t
)
206 int bt
= t
& VT_BTYPE
;
207 return bt
== VT_LDOUBLE
213 static inline int is_integer_btype(int bt
)
222 static int btype_size(int bt
)
224 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
228 bt
== VT_PTR
? PTR_SIZE
: 0;
231 /* returns function return register from type */
232 static int R_RET(int t
)
236 #ifdef TCC_TARGET_X86_64
237 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
239 #elif defined TCC_TARGET_RISCV64
240 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
246 /* returns 2nd function return register, if any */
247 static int R2_RET(int t
)
253 #elif defined TCC_TARGET_X86_64
258 #elif defined TCC_TARGET_RISCV64
265 /* returns true for two-word types */
266 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
268 /* put function return registers to stack value */
269 static void PUT_R_RET(SValue
*sv
, int t
)
271 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
274 /* returns function return register class for type t */
275 static int RC_RET(int t
)
277 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
280 /* returns generic register class for type t */
281 static int RC_TYPE(int t
)
285 #ifdef TCC_TARGET_X86_64
286 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
288 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
290 #elif defined TCC_TARGET_RISCV64
291 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
297 /* returns 2nd register class corresponding to t and rc */
298 static int RC2_TYPE(int t
, int rc
)
300 if (!USING_TWO_WORDS(t
))
315 /* we use our own 'finite' function to avoid potential problems with
316 non standard math libs */
317 /* XXX: endianness dependent */
318 ST_FUNC
int ieee_finite(double d
)
321 memcpy(p
, &d
, sizeof(double));
322 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
325 /* compiling intel long double natively */
326 #if (defined __i386__ || defined __x86_64__) \
327 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
328 # define TCC_IS_NATIVE_387
331 ST_FUNC
void test_lvalue(void)
333 if (!(vtop
->r
& VT_LVAL
))
337 ST_FUNC
void check_vstack(void)
339 if (vtop
!= vstack
- 1)
340 tcc_error("internal compiler error: vstack leak (%d)",
341 (int)(vtop
- vstack
+ 1));
344 /* vstack debugging aid */
346 void pv (const char *lbl
, int a
, int b
)
349 for (i
= a
; i
< a
+ b
; ++i
) {
350 SValue
*p
= &vtop
[-i
];
351 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
352 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
357 /* ------------------------------------------------------------------------- */
358 /* initialize vstack and types. This must be done also for tcc -E */
359 ST_FUNC
void tccgen_init(TCCState
*s1
)
362 memset(vtop
, 0, sizeof *vtop
);
364 /* define some often used types */
367 char_type
.t
= VT_BYTE
;
368 if (s1
->char_is_unsigned
)
369 char_type
.t
|= VT_UNSIGNED
;
370 char_pointer_type
= char_type
;
371 mk_pointer(&char_pointer_type
);
373 func_old_type
.t
= VT_FUNC
;
374 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
375 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
376 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
377 #ifdef precedence_parser
383 ST_FUNC
int tccgen_compile(TCCState
*s1
)
387 anon_sym
= SYM_FIRST_ANOM
;
388 nocode_wanted
= DATA_ONLY_WANTED
; /* no code outside of functions */
389 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
393 #ifdef TCC_TARGET_ARM
397 printf("%s: **** new file\n", file
->filename
);
399 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
402 gen_inline_functions(s1
);
404 /* end of translation unit info */
410 ST_FUNC
void tccgen_finish(TCCState
*s1
)
412 tcc_debug_end(s1
); /* just in case of errors: free memory */
413 free_inline_functions(s1
);
414 sym_pop(&global_stack
, NULL
, 0);
415 sym_pop(&local_stack
, NULL
, 0);
416 /* free preprocessor macros */
419 dynarray_reset(&sym_pools
, &nb_sym_pools
);
421 dynarray_reset(&stk_data
, &nb_stk_data
);
427 pending_gotos
= NULL
;
428 nb_temp_local_vars
= 0;
429 global_label_stack
= NULL
;
430 local_label_stack
= NULL
;
431 cur_text_section
= NULL
;
432 sym_free_first
= NULL
;
435 /* ------------------------------------------------------------------------- */
436 ST_FUNC ElfSym
*elfsym(Sym
*s
)
440 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
443 /* apply storage attributes to Elf symbol */
444 ST_FUNC
void update_storage(Sym
*sym
)
447 int sym_bind
, old_sym_bind
;
453 if (sym
->a
.visibility
)
454 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
457 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
458 sym_bind
= STB_LOCAL
;
459 else if (sym
->a
.weak
)
462 sym_bind
= STB_GLOBAL
;
463 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
464 if (sym_bind
!= old_sym_bind
) {
465 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
469 if (sym
->a
.dllimport
)
470 esym
->st_other
|= ST_PE_IMPORT
;
471 if (sym
->a
.dllexport
)
472 esym
->st_other
|= ST_PE_EXPORT
;
476 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
477 get_tok_str(sym
->v
, NULL
),
478 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
486 /* ------------------------------------------------------------------------- */
487 /* update sym->c so that it points to an external symbol in section
488 'section' with value 'value' */
490 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
491 addr_t value
, unsigned long size
,
492 int can_add_underscore
)
494 int sym_type
, sym_bind
, info
, other
, t
;
500 name
= get_tok_str(sym
->v
, NULL
);
502 if ((t
& VT_BTYPE
) == VT_FUNC
) {
504 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
505 sym_type
= STT_NOTYPE
;
506 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
509 sym_type
= STT_OBJECT
;
511 if (t
& (VT_STATIC
| VT_INLINE
))
512 sym_bind
= STB_LOCAL
;
514 sym_bind
= STB_GLOBAL
;
518 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
519 Sym
*ref
= sym
->type
.ref
;
520 if (ref
->a
.nodecorate
) {
521 can_add_underscore
= 0;
523 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
524 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
526 other
|= ST_PE_STDCALL
;
527 can_add_underscore
= 0;
532 if (sym
->asm_label
) {
533 name
= get_tok_str(sym
->asm_label
, NULL
);
534 can_add_underscore
= 0;
537 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
539 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
543 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
544 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
547 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
551 esym
->st_value
= value
;
552 esym
->st_size
= size
;
553 esym
->st_shndx
= sh_num
;
558 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
560 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
562 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
565 /* add a new relocation entry to symbol 'sym' in section 's' */
566 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
571 if (nocode_wanted
&& s
== cur_text_section
)
576 put_extern_sym(sym
, NULL
, 0, 0);
580 /* now we can add ELF relocation info */
581 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
585 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
587 greloca(s
, sym
, offset
, type
, 0);
591 /* ------------------------------------------------------------------------- */
592 /* symbol allocator */
593 static Sym
*__sym_malloc(void)
595 Sym
*sym_pool
, *sym
, *last_sym
;
598 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
599 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
601 last_sym
= sym_free_first
;
603 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
604 sym
->next
= last_sym
;
608 sym_free_first
= last_sym
;
612 static inline Sym
*sym_malloc(void)
616 sym
= sym_free_first
;
618 sym
= __sym_malloc();
619 sym_free_first
= sym
->next
;
622 sym
= tcc_malloc(sizeof(Sym
));
627 ST_INLN
void sym_free(Sym
*sym
)
630 sym
->next
= sym_free_first
;
631 sym_free_first
= sym
;
637 /* push, without hashing */
638 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
643 memset(s
, 0, sizeof *s
);
653 /* find a symbol and return its associated structure. 's' is the top
654 of the symbol stack */
655 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
665 /* structure lookup */
666 ST_INLN Sym
*struct_find(int v
)
669 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
671 return table_ident
[v
]->sym_struct
;
674 /* find an identifier */
675 ST_INLN Sym
*sym_find(int v
)
678 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
680 return table_ident
[v
]->sym_identifier
;
683 static int sym_scope(Sym
*s
)
685 if (IS_ENUM_VAL (s
->type
.t
))
686 return s
->type
.ref
->sym_scope
;
691 /* push a given symbol on the symbol stack */
692 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
701 s
= sym_push2(ps
, v
, type
->t
, c
);
702 s
->type
.ref
= type
->ref
;
704 /* don't record fields or anonymous symbols */
706 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
707 /* record symbol in token array */
708 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
710 ps
= &ts
->sym_struct
;
712 ps
= &ts
->sym_identifier
;
715 s
->sym_scope
= local_scope
;
716 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
717 tcc_error("redeclaration of '%s'",
718 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
723 /* push a global identifier */
724 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
727 s
= sym_push2(&global_stack
, v
, t
, c
);
728 s
->r
= VT_CONST
| VT_SYM
;
729 /* don't record anonymous symbol */
730 if (v
< SYM_FIRST_ANOM
) {
731 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
732 /* modify the top most local identifier, so that sym_identifier will
733 point to 's' when popped; happens when called from inline asm */
734 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
735 ps
= &(*ps
)->prev_tok
;
742 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
743 pop them yet from the list, but do remove them from the token array. */
744 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
754 /* remove symbol in token array */
756 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
757 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
759 ps
= &ts
->sym_struct
;
761 ps
= &ts
->sym_identifier
;
773 ST_FUNC Sym
*label_find(int v
)
776 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
778 return table_ident
[v
]->sym_label
;
781 ST_FUNC Sym
*label_push(Sym
**ptop
, int v
, int flags
)
784 s
= sym_push2(ptop
, v
, VT_STATIC
, 0);
786 ps
= &table_ident
[v
- TOK_IDENT
]->sym_label
;
787 if (ptop
== &global_label_stack
) {
788 /* modify the top most local identifier, so that
789 sym_identifier will point to 's' when popped */
791 ps
= &(*ps
)->prev_tok
;
798 /* pop labels until element last is reached. Look if any labels are
799 undefined. Define symbols if '&&label' was used. */
800 ST_FUNC
void label_pop(Sym
**ptop
, Sym
*slast
, int keep
)
803 for(s
= *ptop
; s
!= slast
; s
= s1
) {
805 if (s
->r
== LABEL_DECLARED
) {
806 tcc_warning_c(warn_all
)("label '%s' declared but not used", get_tok_str(s
->v
, NULL
));
807 } else if (s
->r
== LABEL_FORWARD
) {
808 tcc_error("label '%s' used but not defined",
809 get_tok_str(s
->v
, NULL
));
812 /* define corresponding symbol. A size of
814 put_extern_sym(s
, cur_text_section
, s
->jnext
, 1);
818 if (s
->r
!= LABEL_GONE
)
819 table_ident
[s
->v
- TOK_IDENT
]->sym_label
= s
->prev_tok
;
829 /* ------------------------------------------------------------------------- */
830 static void vcheck_cmp(void)
832 /* cannot let cpu flags if other instruction are generated. Also
833 avoid leaving VT_JMP anywhere except on the top of the stack
834 because it would complicate the code generator.
836 Don't do this when nocode_wanted. vtop might come from
837 !nocode_wanted regions (see 88_codeopt.c) and transforming
838 it to a register without actually generating code is wrong
839 as their value might still be used for real. All values
840 we push under nocode_wanted will eventually be popped
841 again, so that the VT_CMP/VT_JMP value will be in vtop
842 when code is unsuppressed again. */
844 /* However if it's just automatic suppression via CODE_OFF/ON()
845 then it seems that we better let things work undisturbed.
846 How can it work at all under nocode_wanted? Well, gv() will
847 actually clear it at the gsym() in load()/VT_JMP in the
848 generator backends */
850 if (vtop
->r
== VT_CMP
&& 0 == (nocode_wanted
& ~CODE_OFF_BIT
))
854 static void vsetc(CType
*type
, int r
, CValue
*vc
)
856 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
857 tcc_error("memory full (vstack)");
867 ST_FUNC
void vswap(void)
877 /* pop stack value */
878 ST_FUNC
void vpop(void)
881 v
= vtop
->r
& VT_VALMASK
;
882 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
883 /* for x86, we need to pop the FP stack */
885 o(0xd8dd); /* fstp %st(0) */
889 /* need to put correct jump if && or || without test */
896 /* push constant of type "type" with useless value */
897 static void vpush(CType
*type
)
899 vset(type
, VT_CONST
, 0);
902 /* push arbitrary 64bit constant */
903 static void vpush64(int ty
, unsigned long long v
)
910 vsetc(&ctype
, VT_CONST
, &cval
);
913 /* push integer constant */
914 ST_FUNC
void vpushi(int v
)
919 /* push a pointer sized constant */
920 static void vpushs(addr_t v
)
922 vpush64(VT_SIZE_T
, v
);
925 /* push long long constant */
926 static inline void vpushll(long long v
)
928 vpush64(VT_LLONG
, v
);
931 ST_FUNC
void vset(CType
*type
, int r
, int v
)
935 vsetc(type
, r
, &cval
);
938 static void vseti(int r
, int v
)
946 ST_FUNC
void vpushv(SValue
*v
)
948 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
949 tcc_error("memory full (vstack)");
954 static void vdup(void)
959 /* rotate n first stack elements to the bottom
960 I1 ... In -> I2 ... In I1 [top is right]
962 ST_FUNC
void vrotb(int n
)
974 /* rotate the n elements before entry e towards the top
975 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
977 ST_FUNC
void vrote(SValue
*e
, int n
)
984 for(i
= 0;i
< n
- 1; i
++)
989 /* rotate n first stack elements to the top
990 I1 ... In -> In I1 ... I(n-1) [top is right]
992 ST_FUNC
void vrott(int n
)
997 /* ------------------------------------------------------------------------- */
998 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1000 /* called from generators to set the result from relational ops */
1001 ST_FUNC
void vset_VT_CMP(int op
)
1009 /* called once before asking generators to load VT_CMP to a register */
1010 static void vset_VT_JMP(void)
1012 int op
= vtop
->cmp_op
;
1014 if (vtop
->jtrue
|| vtop
->jfalse
) {
1015 int origt
= vtop
->type
.t
;
1016 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1017 int inv
= op
& (op
< 2); /* small optimization */
1018 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1019 vtop
->type
.t
|= origt
& (VT_UNSIGNED
| VT_DEFSIGN
);
1021 /* otherwise convert flags (rsp. 0/1) to register */
1023 if (op
< 2) /* doesn't seem to happen */
1028 /* Set CPU Flags, doesn't yet jump */
1029 static void gvtst_set(int inv
, int t
)
1033 if (vtop
->r
!= VT_CMP
) {
1036 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1037 vset_VT_CMP(vtop
->c
.i
!= 0);
1040 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1041 *p
= gjmp_append(*p
, t
);
1044 /* Generate value test
1046 * Generate a test for any value (jump, comparison and integers) */
1047 static int gvtst(int inv
, int t
)
1052 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1054 x
= u
, u
= t
, t
= x
;
1057 /* jump to the wanted target */
1059 t
= gjmp_cond(op
^ inv
, t
);
1062 /* resolve complementary jumps to here */
1069 /* generate a zero or nozero test */
1070 static void gen_test_zero(int op
)
1072 if (vtop
->r
== VT_CMP
) {
1076 vtop
->jfalse
= vtop
->jtrue
;
1086 /* ------------------------------------------------------------------------- */
1087 /* push a symbol value of TYPE */
1088 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1092 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1096 /* Return a static symbol pointing to a section */
1097 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1103 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1104 sym
->type
.t
|= VT_STATIC
;
1105 put_extern_sym(sym
, sec
, offset
, size
);
1109 /* push a reference to a section offset by adding a dummy symbol */
1110 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1112 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1115 /* define a new external reference to a symbol 'v' of type 'u' */
1116 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1122 /* push forward reference */
1123 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1124 s
->type
.ref
= type
->ref
;
1125 } else if (IS_ASM_SYM(s
)) {
1126 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1127 s
->type
.ref
= type
->ref
;
1133 /* create an external reference with no specific type similar to asm labels.
1134 This avoids type conflicts if the symbol is used from C too */
1135 ST_FUNC Sym
*external_helper_sym(int v
)
1137 CType ct
= { VT_ASM_FUNC
, NULL
};
1138 return external_global_sym(v
, &ct
);
1141 /* push a reference to an helper function (such as memmove) */
1142 ST_FUNC
void vpush_helper_func(int v
)
1144 vpushsym(&func_old_type
, external_helper_sym(v
));
1147 /* Merge symbol attributes. */
1148 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1150 if (sa1
->aligned
&& !sa
->aligned
)
1151 sa
->aligned
= sa1
->aligned
;
1152 sa
->packed
|= sa1
->packed
;
1153 sa
->weak
|= sa1
->weak
;
1154 sa
->nodebug
|= sa1
->nodebug
;
1155 if (sa1
->visibility
!= STV_DEFAULT
) {
1156 int vis
= sa
->visibility
;
1157 if (vis
== STV_DEFAULT
1158 || vis
> sa1
->visibility
)
1159 vis
= sa1
->visibility
;
1160 sa
->visibility
= vis
;
1162 sa
->dllexport
|= sa1
->dllexport
;
1163 sa
->nodecorate
|= sa1
->nodecorate
;
1164 sa
->dllimport
|= sa1
->dllimport
;
1167 /* Merge function attributes. */
1168 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1170 if (fa1
->func_call
&& !fa
->func_call
)
1171 fa
->func_call
= fa1
->func_call
;
1172 if (fa1
->func_type
&& !fa
->func_type
)
1173 fa
->func_type
= fa1
->func_type
;
1174 if (fa1
->func_args
&& !fa
->func_args
)
1175 fa
->func_args
= fa1
->func_args
;
1176 if (fa1
->func_noreturn
)
1177 fa
->func_noreturn
= 1;
1184 /* Merge attributes. */
1185 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1187 merge_symattr(&ad
->a
, &ad1
->a
);
1188 merge_funcattr(&ad
->f
, &ad1
->f
);
1191 ad
->section
= ad1
->section
;
1192 if (ad1
->alias_target
)
1193 ad
->alias_target
= ad1
->alias_target
;
1195 ad
->asm_label
= ad1
->asm_label
;
1197 ad
->attr_mode
= ad1
->attr_mode
;
1200 /* Merge some type attributes. */
1201 static void patch_type(Sym
*sym
, CType
*type
)
1203 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1204 if (!(sym
->type
.t
& VT_EXTERN
))
1205 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1206 sym
->type
.t
&= ~VT_EXTERN
;
1209 if (IS_ASM_SYM(sym
)) {
1210 /* stay static if both are static */
1211 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1212 sym
->type
.ref
= type
->ref
;
1213 if ((type
->t
& VT_BTYPE
) != VT_FUNC
&& !(type
->t
& VT_ARRAY
))
1217 if (!is_compatible_types(&sym
->type
, type
)) {
1218 tcc_error("incompatible types for redefinition of '%s'",
1219 get_tok_str(sym
->v
, NULL
));
1221 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1222 int static_proto
= sym
->type
.t
& VT_STATIC
;
1223 /* warn if static follows non-static function declaration */
1224 if ((type
->t
& VT_STATIC
) && !static_proto
1225 /* XXX this test for inline shouldn't be here. Until we
1226 implement gnu-inline mode again it silences a warning for
1227 mingw caused by our workarounds. */
1228 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1229 tcc_warning("static storage ignored for redefinition of '%s'",
1230 get_tok_str(sym
->v
, NULL
));
1232 /* set 'inline' if both agree or if one has static */
1233 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1234 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1235 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1236 static_proto
|= VT_INLINE
;
1239 if (0 == (type
->t
& VT_EXTERN
)) {
1240 struct FuncAttr f
= sym
->type
.ref
->f
;
1241 /* put complete type, use static from prototype */
1242 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1243 sym
->type
.ref
= type
->ref
;
1244 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1246 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1249 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1250 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1251 sym
->type
.ref
= type
->ref
;
1255 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1256 /* set array size if it was omitted in extern declaration */
1257 sym
->type
.ref
->c
= type
->ref
->c
;
1259 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1260 tcc_warning("storage mismatch for redefinition of '%s'",
1261 get_tok_str(sym
->v
, NULL
));
1265 /* Merge some storage attributes. */
1266 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1269 patch_type(sym
, type
);
1271 #ifdef TCC_TARGET_PE
1272 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1273 tcc_error("incompatible dll linkage for redefinition of '%s'",
1274 get_tok_str(sym
->v
, NULL
));
1276 merge_symattr(&sym
->a
, &ad
->a
);
1278 sym
->asm_label
= ad
->asm_label
;
1279 update_storage(sym
);
1282 /* copy sym to other stack */
1283 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1286 s
= sym_malloc(), *s
= *s0
;
1287 s
->prev
= *ps
, *ps
= s
;
1288 if (s
->v
< SYM_FIRST_ANOM
) {
1289 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1290 s
->prev_tok
= *ps
, *ps
= s
;
1295 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1296 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1298 int bt
= s
->type
.t
& VT_BTYPE
;
1299 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1300 Sym
**sp
= &s
->type
.ref
;
1301 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1302 Sym
*s2
= sym_copy(s
, ps
);
1303 sp
= &(*sp
= s2
)->next
;
1304 sym_copy_ref(s2
, ps
);
1309 /* define a new external reference to a symbol 'v' */
1310 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1314 /* look for global symbol */
1316 while (s
&& s
->sym_scope
)
1320 /* push forward reference */
1321 s
= global_identifier_push(v
, type
->t
, 0);
1324 s
->asm_label
= ad
->asm_label
;
1325 s
->type
.ref
= type
->ref
;
1326 /* copy type to the global stack */
1328 sym_copy_ref(s
, &global_stack
);
1330 patch_storage(s
, ad
, type
);
1332 /* push variables on local_stack if any */
1333 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1334 s
= sym_copy(s
, &local_stack
);
1338 /* save registers up to (vtop - n) stack entry */
1339 ST_FUNC
void save_regs(int n
)
1342 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1346 /* save r to the memory stack, and mark it as being free */
1347 ST_FUNC
void save_reg(int r
)
1349 save_reg_upstack(r
, 0);
1352 /* save r to the memory stack, and mark it as being free,
1353 if seen up to (vtop - n) stack entry */
1354 ST_FUNC
void save_reg_upstack(int r
, int n
)
1356 int l
, size
, align
, bt
;
1359 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1364 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1365 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1366 /* must save value on stack if not already done */
1368 bt
= p
->type
.t
& VT_BTYPE
;
1371 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1374 size
= type_size(&sv
.type
, &align
);
1375 l
= get_temp_local_var(size
,align
);
1376 sv
.r
= VT_LOCAL
| VT_LVAL
;
1378 store(p
->r
& VT_VALMASK
, &sv
);
1379 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1380 /* x86 specific: need to pop fp register ST0 if saved */
1381 if (r
== TREG_ST0
) {
1382 o(0xd8dd); /* fstp %st(0) */
1385 /* special long long case */
1386 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1391 /* mark that stack entry as being saved on the stack */
1392 if (p
->r
& VT_LVAL
) {
1393 /* also clear the bounded flag because the
1394 relocation address of the function was stored in
1396 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1398 p
->r
= VT_LVAL
| VT_LOCAL
;
1399 p
->type
.t
&= ~VT_ARRAY
; /* cannot combine VT_LVAL with VT_ARRAY */
1408 #ifdef TCC_TARGET_ARM
1409 /* find a register of class 'rc2' with at most one reference on stack.
1410 * If none, call get_reg(rc) */
1411 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1416 for(r
=0;r
<NB_REGS
;r
++) {
1417 if (reg_classes
[r
] & rc2
) {
1420 for(p
= vstack
; p
<= vtop
; p
++) {
1421 if ((p
->r
& VT_VALMASK
) == r
||
1433 /* find a free register of class 'rc'. If none, save one register */
1434 ST_FUNC
int get_reg(int rc
)
1439 /* find a free register */
1440 for(r
=0;r
<NB_REGS
;r
++) {
1441 if (reg_classes
[r
] & rc
) {
1444 for(p
=vstack
;p
<=vtop
;p
++) {
1445 if ((p
->r
& VT_VALMASK
) == r
||
1454 /* no register left : free the first one on the stack (VERY
1455 IMPORTANT to start from the bottom to ensure that we don't
1456 spill registers used in gen_opi()) */
1457 for(p
=vstack
;p
<=vtop
;p
++) {
1458 /* look at second register (if long long) */
1460 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1462 r
= p
->r
& VT_VALMASK
;
1463 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1469 /* Should never comes here */
1473 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1474 static int get_temp_local_var(int size
,int align
){
1476 struct temp_local_variable
*temp_var
;
1483 for(i
=0;i
<nb_temp_local_vars
;i
++){
1484 temp_var
=&arr_temp_local_vars
[i
];
1485 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1488 /*check if temp_var is free*/
1490 for(p
=vstack
;p
<=vtop
;p
++) {
1492 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1493 if(p
->c
.i
==temp_var
->location
){
1500 found_var
=temp_var
->location
;
1506 loc
= (loc
- size
) & -align
;
1507 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1508 temp_var
=&arr_temp_local_vars
[i
];
1509 temp_var
->location
=loc
;
1510 temp_var
->size
=size
;
1511 temp_var
->align
=align
;
1512 nb_temp_local_vars
++;
1519 static void clear_temp_local_var_list(){
1520 nb_temp_local_vars
=0;
1523 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1525 static void move_reg(int r
, int s
, int t
)
1539 /* get address of vtop (vtop MUST BE an lvalue) */
1540 ST_FUNC
void gaddrof(void)
1542 vtop
->r
&= ~VT_LVAL
;
1543 /* tricky: if saved lvalue, then we can go back to lvalue */
1544 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1545 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1548 #ifdef CONFIG_TCC_BCHECK
1549 /* generate a bounded pointer addition */
1550 static void gen_bounded_ptr_add(void)
1552 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1557 vpush_helper_func(TOK___bound_ptr_add
);
1562 /* returned pointer is in REG_IRET */
1563 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1566 /* relocation offset of the bounding function call point */
1567 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1570 /* patch pointer addition in vtop so that pointer dereferencing is
1572 static void gen_bounded_ptr_deref(void)
1582 size
= type_size(&vtop
->type
, &align
);
1584 case 1: func
= TOK___bound_ptr_indir1
; break;
1585 case 2: func
= TOK___bound_ptr_indir2
; break;
1586 case 4: func
= TOK___bound_ptr_indir4
; break;
1587 case 8: func
= TOK___bound_ptr_indir8
; break;
1588 case 12: func
= TOK___bound_ptr_indir12
; break;
1589 case 16: func
= TOK___bound_ptr_indir16
; break;
1591 /* may happen with struct member access */
1594 sym
= external_helper_sym(func
);
1596 put_extern_sym(sym
, NULL
, 0, 0);
1597 /* patch relocation */
1598 /* XXX: find a better solution ? */
1599 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1600 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1603 /* generate lvalue bound code */
1604 static void gbound(void)
1608 vtop
->r
&= ~VT_MUSTBOUND
;
1609 /* if lvalue, then use checking code before dereferencing */
1610 if (vtop
->r
& VT_LVAL
) {
1611 /* if not VT_BOUNDED value, then make one */
1612 if (!(vtop
->r
& VT_BOUNDED
)) {
1613 /* must save type because we must set it to int to get pointer */
1615 vtop
->type
.t
= VT_PTR
;
1618 gen_bounded_ptr_add();
1622 /* then check for dereferencing */
1623 gen_bounded_ptr_deref();
1627 /* we need to call __bound_ptr_add before we start to load function
1628 args into registers */
1629 ST_FUNC
void gbound_args(int nb_args
)
1634 for (i
= 1; i
<= nb_args
; ++i
)
1635 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1641 sv
= vtop
- nb_args
;
1642 if (sv
->r
& VT_SYM
) {
1646 #ifndef TCC_TARGET_PE
1647 || v
== TOK_sigsetjmp
1648 || v
== TOK___sigsetjmp
1651 vpush_helper_func(TOK___bound_setjmp
);
1654 func_bound_add_epilog
= 1;
1656 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1657 if (v
== TOK_alloca
)
1658 func_bound_add_epilog
= 1;
1661 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1662 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1667 /* Add bounds for local symbols from S to E (via ->prev) */
1668 static void add_local_bounds(Sym
*s
, Sym
*e
)
1670 for (; s
!= e
; s
= s
->prev
) {
1671 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1673 /* Add arrays/structs/unions because we always take address */
1674 if ((s
->type
.t
& VT_ARRAY
)
1675 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1676 || s
->a
.addrtaken
) {
1677 /* add local bound info */
1678 int align
, size
= type_size(&s
->type
, &align
);
1679 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1680 2 * sizeof(addr_t
));
1681 bounds_ptr
[0] = s
->c
;
1682 bounds_ptr
[1] = size
;
1688 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1689 static void pop_local_syms(Sym
*b
, int keep
)
1691 #ifdef CONFIG_TCC_BCHECK
1692 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1693 add_local_bounds(local_stack
, b
);
1696 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1697 sym_pop(&local_stack
, b
, keep
);
1700 /* increment an lvalue pointer */
1701 static void incr_offset(int offset
)
1703 int t
= vtop
->type
.t
;
1704 gaddrof(); /* remove VT_LVAL */
1705 vtop
->type
.t
= VT_PTRDIFF_T
; /* set scalar type */
1712 static void incr_bf_adr(int o
)
1714 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1718 /* single-byte load mode for packed or otherwise unaligned bitfields */
1719 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1722 save_reg_upstack(vtop
->r
, 1);
1723 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1724 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1733 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1735 vpushi((1 << n
) - 1), gen_op('&');
1738 vpushi(bits
), gen_op(TOK_SHL
);
1741 bits
+= n
, bit_size
-= n
, o
= 1;
1744 if (!(type
->t
& VT_UNSIGNED
)) {
1745 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1746 vpushi(n
), gen_op(TOK_SHL
);
1747 vpushi(n
), gen_op(TOK_SAR
);
1751 /* single-byte store mode for packed or otherwise unaligned bitfields */
1752 static void store_packed_bf(int bit_pos
, int bit_size
)
1754 int bits
, n
, o
, m
, c
;
1755 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1757 save_reg_upstack(vtop
->r
, 1);
1758 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1760 incr_bf_adr(o
); // X B
1762 c
? vdup() : gv_dup(); // B V X
1765 vpushi(bits
), gen_op(TOK_SHR
);
1767 vpushi(bit_pos
), gen_op(TOK_SHL
);
1772 m
= ((1 << n
) - 1) << bit_pos
;
1773 vpushi(m
), gen_op('&'); // X B V1
1774 vpushv(vtop
-1); // X B V1 B
1775 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1776 gen_op('&'); // X B V1 B1
1777 gen_op('|'); // X B V2
1779 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1780 vstore(), vpop(); // X B
1781 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1786 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1789 if (0 == sv
->type
.ref
)
1791 t
= sv
->type
.ref
->auxtype
;
1792 if (t
!= -1 && t
!= VT_STRUCT
) {
1793 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1799 /* store vtop a register belonging to class 'rc'. lvalues are
1800 converted to values. Cannot be used if cannot be converted to
1801 register value (such as structures). */
1802 ST_FUNC
int gv(int rc
)
1804 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1805 int bit_pos
, bit_size
, size
, align
;
1807 /* NOTE: get_reg can modify vstack[] */
1808 if (vtop
->type
.t
& VT_BITFIELD
) {
1811 bit_pos
= BIT_POS(vtop
->type
.t
);
1812 bit_size
= BIT_SIZE(vtop
->type
.t
);
1813 /* remove bit field info to avoid loops */
1814 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1817 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1818 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1819 type
.t
|= VT_UNSIGNED
;
1821 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1823 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1828 if (r
== VT_STRUCT
) {
1829 load_packed_bf(&type
, bit_pos
, bit_size
);
1831 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1832 /* cast to int to propagate signedness in following ops */
1834 /* generate shifts */
1835 vpushi(bits
- (bit_pos
+ bit_size
));
1837 vpushi(bits
- bit_size
);
1838 /* NOTE: transformed to SHR if unsigned */
1843 if (is_float(vtop
->type
.t
) &&
1844 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1845 /* CPUs usually cannot use float constants, so we store them
1846 generically in data segment */
1847 init_params p
= { rodata_section
};
1848 unsigned long offset
;
1849 size
= type_size(&vtop
->type
, &align
);
1851 size
= 0, align
= 1;
1852 offset
= section_add(p
.sec
, size
, align
);
1853 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1855 init_putv(&p
, &vtop
->type
, offset
);
1858 #ifdef CONFIG_TCC_BCHECK
1859 if (vtop
->r
& VT_MUSTBOUND
)
1863 bt
= vtop
->type
.t
& VT_BTYPE
;
1865 #ifdef TCC_TARGET_RISCV64
1867 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1870 rc2
= RC2_TYPE(bt
, rc
);
1872 /* need to reload if:
1874 - lvalue (need to dereference pointer)
1875 - already a register, but not in the right class */
1876 r
= vtop
->r
& VT_VALMASK
;
1877 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1878 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1880 if (!r_ok
|| !r2_ok
) {
1883 if (1 /* we can 'mov (r),r' in cases */
1885 && (reg_classes
[r
] & rc
)
1888 save_reg_upstack(r
, 1);
1894 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1895 int original_type
= vtop
->type
.t
;
1897 /* two register type load :
1898 expand to two words temporarily */
1899 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1901 unsigned long long ll
= vtop
->c
.i
;
1902 vtop
->c
.i
= ll
; /* first word */
1904 vtop
->r
= r
; /* save register value */
1905 vpushi(ll
>> 32); /* second word */
1906 } else if (vtop
->r
& VT_LVAL
) {
1907 /* We do not want to modifier the long long pointer here.
1908 So we save any other instances down the stack */
1909 save_reg_upstack(vtop
->r
, 1);
1910 /* load from memory */
1911 vtop
->type
.t
= load_type
;
1914 vtop
[-1].r
= r
; /* save register value */
1915 /* increment pointer to get second word */
1916 incr_offset(PTR_SIZE
);
1918 /* move registers */
1921 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1924 vtop
[-1].r
= r
; /* save register value */
1925 vtop
->r
= vtop
[-1].r2
;
1927 /* Allocate second register. Here we rely on the fact that
1928 get_reg() tries first to free r2 of an SValue. */
1932 /* write second register */
1935 vtop
->type
.t
= original_type
;
1937 if (vtop
->r
== VT_CMP
)
1939 /* one register type load */
1944 #ifdef TCC_TARGET_C67
1945 /* uses register pairs for doubles */
1946 if (bt
== VT_DOUBLE
)
1953 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1954 ST_FUNC
void gv2(int rc1
, int rc2
)
1956 /* generate more generic register first. But VT_JMP or VT_CMP
1957 values must be generated first in all cases to avoid possible
1959 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1964 /* test if reload is needed for first register */
1965 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1975 /* test if reload is needed for first register */
1976 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1983 /* expand 64bit on stack in two ints */
1984 ST_FUNC
void lexpand(void)
1987 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1988 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1989 if (v
== VT_CONST
) {
1992 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1998 vtop
[0].r
= vtop
[-1].r2
;
1999 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2001 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2006 /* build a long long from two ints */
2007 static void lbuild(int t
)
2009 gv2(RC_INT
, RC_INT
);
2010 vtop
[-1].r2
= vtop
[0].r
;
2011 vtop
[-1].type
.t
= t
;
2016 /* convert stack entry to register and duplicate its value in another
2018 static void gv_dup(void)
2024 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2025 if (t
& VT_BITFIELD
) {
2035 /* stack: H L L1 H1 */
2045 /* duplicate value */
2055 /* generate CPU independent (unsigned) long long operations */
2056 static void gen_opl(int op
)
2058 int t
, a
, b
, op1
, c
, i
;
2060 unsigned short reg_iret
= REG_IRET
;
2061 unsigned short reg_lret
= REG_IRE2
;
2067 func
= TOK___divdi3
;
2070 func
= TOK___udivdi3
;
2073 func
= TOK___moddi3
;
2076 func
= TOK___umoddi3
;
2083 /* call generic long long function */
2084 vpush_helper_func(func
);
2089 vtop
->r2
= reg_lret
;
2097 //pv("gen_opl A",0,2);
2103 /* stack: L1 H1 L2 H2 */
2108 vtop
[-2] = vtop
[-3];
2111 /* stack: H1 H2 L1 L2 */
2112 //pv("gen_opl B",0,4);
2118 /* stack: H1 H2 L1 L2 ML MH */
2121 /* stack: ML MH H1 H2 L1 L2 */
2125 /* stack: ML MH H1 L2 H2 L1 */
2130 /* stack: ML MH M1 M2 */
2133 } else if (op
== '+' || op
== '-') {
2134 /* XXX: add non carry method too (for MIPS or alpha) */
2140 /* stack: H1 H2 (L1 op L2) */
2143 gen_op(op1
+ 1); /* TOK_xxxC2 */
2146 /* stack: H1 H2 (L1 op L2) */
2149 /* stack: (L1 op L2) H1 H2 */
2151 /* stack: (L1 op L2) (H1 op H2) */
2159 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2160 t
= vtop
[-1].type
.t
;
2164 /* stack: L H shift */
2166 /* constant: simpler */
2167 /* NOTE: all comments are for SHL. the other cases are
2168 done by swapping words */
2179 if (op
!= TOK_SAR
) {
2212 /* XXX: should provide a faster fallback on x86 ? */
2215 func
= TOK___ashrdi3
;
2218 func
= TOK___lshrdi3
;
2221 func
= TOK___ashldi3
;
2227 /* compare operations */
2233 /* stack: L1 H1 L2 H2 */
2235 vtop
[-1] = vtop
[-2];
2237 /* stack: L1 L2 H1 H2 */
2241 /* when values are equal, we need to compare low words. since
2242 the jump is inverted, we invert the test too. */
2245 else if (op1
== TOK_GT
)
2247 else if (op1
== TOK_ULT
)
2249 else if (op1
== TOK_UGT
)
2259 /* generate non equal test */
2261 vset_VT_CMP(TOK_NE
);
2265 /* compare low. Always unsigned */
2269 else if (op1
== TOK_LE
)
2271 else if (op1
== TOK_GT
)
2273 else if (op1
== TOK_GE
)
2276 #if 0//def TCC_TARGET_I386
2277 if (op
== TOK_NE
) { gsym(b
); break; }
2278 if (op
== TOK_EQ
) { gsym(a
); break; }
2287 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2289 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2290 return (a
^ b
) >> 63 ? -x
: x
;
2293 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2295 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2298 /* handle integer constant optimizations and various machine
2300 static void gen_opic(int op
)
2302 SValue
*v1
= vtop
- 1;
2304 int t1
= v1
->type
.t
& VT_BTYPE
;
2305 int t2
= v2
->type
.t
& VT_BTYPE
;
2306 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2307 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2308 uint64_t l1
= c1
? v1
->c
.i
: 0;
2309 uint64_t l2
= c2
? v2
->c
.i
: 0;
2310 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2313 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2314 l1
= ((uint32_t)l1
|
2315 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2316 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2317 l2
= ((uint32_t)l2
|
2318 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2322 case '+': l1
+= l2
; break;
2323 case '-': l1
-= l2
; break;
2324 case '&': l1
&= l2
; break;
2325 case '^': l1
^= l2
; break;
2326 case '|': l1
|= l2
; break;
2327 case '*': l1
*= l2
; break;
2334 /* if division by zero, generate explicit division */
2336 if (CONST_WANTED
&& !NOEVAL_WANTED
)
2337 tcc_error("division by zero in constant");
2341 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2342 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2343 case TOK_UDIV
: l1
= l1
/ l2
; break;
2344 case TOK_UMOD
: l1
= l1
% l2
; break;
2347 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2348 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2350 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2353 case TOK_ULT
: l1
= l1
< l2
; break;
2354 case TOK_UGE
: l1
= l1
>= l2
; break;
2355 case TOK_EQ
: l1
= l1
== l2
; break;
2356 case TOK_NE
: l1
= l1
!= l2
; break;
2357 case TOK_ULE
: l1
= l1
<= l2
; break;
2358 case TOK_UGT
: l1
= l1
> l2
; break;
2359 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2360 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2361 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2362 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2364 case TOK_LAND
: l1
= l1
&& l2
; break;
2365 case TOK_LOR
: l1
= l1
|| l2
; break;
2369 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2370 l1
= ((uint32_t)l1
|
2371 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2373 v1
->r
|= v2
->r
& VT_NONCONST
;
2376 /* if commutative ops, put c2 as constant */
2377 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2378 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2380 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2381 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2383 if (c1
&& ((l1
== 0 &&
2384 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2385 (l1
== -1 && op
== TOK_SAR
))) {
2386 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2388 } else if (c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2390 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2391 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2392 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2397 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2400 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2401 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2404 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2405 /* filter out NOP operations like x*1, x-0, x&-1... */
2407 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2408 /* try to use shifts instead of muls or divs */
2409 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2418 else if (op
== TOK_PDIV
)
2424 } else if (c2
&& (op
== '+' || op
== '-') &&
2425 (r
= vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
),
2426 r
== (VT_CONST
| VT_SYM
) || r
== VT_LOCAL
)) {
2427 /* symbol + constant case */
2431 /* The backends can't always deal with addends to symbols
2432 larger than +-1<<31. Don't construct such. */
2439 /* call low level op generator */
2440 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2441 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2446 if (vtop
->r
== VT_CONST
)
2447 vtop
->r
|= VT_NONCONST
; /* is const, but only by optimization */
2451 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2452 # define gen_negf gen_opf
2453 #elif defined TCC_TARGET_ARM
2454 void gen_negf(int op
)
2456 /* arm will detect 0-x and replace by vneg */
2457 vpushi(0), vswap(), gen_op('-');
2460 /* XXX: implement in gen_opf() for other backends too */
2461 void gen_negf(int op
)
2463 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2464 subtract(-0, x), but with them it's really a sign flip
2465 operation. We implement this with bit manipulation and have
2466 to do some type reinterpretation for this, which TCC can do
2469 int align
, size
, bt
;
2471 size
= type_size(&vtop
->type
, &align
);
2472 bt
= vtop
->type
.t
& VT_BTYPE
;
2473 save_reg(gv(RC_TYPE(bt
)));
2475 incr_bf_adr(size
- 1);
2477 vpushi(0x80); /* flip sign */
2484 /* generate a floating point operation with constant propagation */
2485 static void gen_opif(int op
)
2489 #if defined _MSC_VER && defined __x86_64__
2490 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2499 bt
= v1
->type
.t
& VT_BTYPE
;
2501 /* currently, we cannot do computations with forward symbols */
2502 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2503 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2505 if (bt
== VT_FLOAT
) {
2508 } else if (bt
== VT_DOUBLE
) {
2515 /* NOTE: we only do constant propagation if finite number (not
2516 NaN or infinity) (ANSI spec) */
2517 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !CONST_WANTED
)
2520 case '+': f1
+= f2
; break;
2521 case '-': f1
-= f2
; break;
2522 case '*': f1
*= f2
; break;
2525 union { float f
; unsigned u
; } x1
, x2
, y
;
2526 /* If not in initializer we need to potentially generate
2527 FP exceptions at runtime, otherwise we want to fold. */
2530 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2531 when used to compile the f1 /= f2 below, would be -nan */
2532 x1
.f
= f1
, x2
.f
= f2
;
2534 y
.u
= 0x7fc00000; /* nan */
2536 y
.u
= 0x7f800000; /* infinity */
2537 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2572 /* XXX: overflow test ? */
2573 if (bt
== VT_FLOAT
) {
2575 } else if (bt
== VT_DOUBLE
) {
2582 if (op
== TOK_NEG
) {
2590 /* print a type. If 'varstr' is not NULL, then the variable is also
2591 printed in the type */
2593 /* XXX: add array and function pointers */
2594 static void type_to_str(char *buf
, int buf_size
,
2595 CType
*type
, const char *varstr
)
2607 pstrcat(buf
, buf_size
, "extern ");
2609 pstrcat(buf
, buf_size
, "static ");
2611 pstrcat(buf
, buf_size
, "typedef ");
2613 pstrcat(buf
, buf_size
, "inline ");
2615 if (t
& VT_VOLATILE
)
2616 pstrcat(buf
, buf_size
, "volatile ");
2617 if (t
& VT_CONSTANT
)
2618 pstrcat(buf
, buf_size
, "const ");
2620 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2621 || ((t
& VT_UNSIGNED
)
2622 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2625 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2627 buf_size
-= strlen(buf
);
2663 tstr
= "long double";
2665 pstrcat(buf
, buf_size
, tstr
);
2672 pstrcat(buf
, buf_size
, tstr
);
2673 v
= type
->ref
->v
& ~SYM_STRUCT
;
2674 if (v
>= SYM_FIRST_ANOM
)
2675 pstrcat(buf
, buf_size
, "<anonymous>");
2677 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2682 if (varstr
&& '*' == *varstr
) {
2683 pstrcat(buf1
, sizeof(buf1
), "(");
2684 pstrcat(buf1
, sizeof(buf1
), varstr
);
2685 pstrcat(buf1
, sizeof(buf1
), ")");
2687 pstrcat(buf1
, buf_size
, "(");
2689 while (sa
!= NULL
) {
2691 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2692 pstrcat(buf1
, sizeof(buf1
), buf2
);
2695 pstrcat(buf1
, sizeof(buf1
), ", ");
2697 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2698 pstrcat(buf1
, sizeof(buf1
), ", ...");
2699 pstrcat(buf1
, sizeof(buf1
), ")");
2700 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2704 if (t
& (VT_ARRAY
|VT_VLA
)) {
2705 if (varstr
&& '*' == *varstr
)
2706 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2708 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2709 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2712 pstrcpy(buf1
, sizeof(buf1
), "*");
2713 if (t
& VT_CONSTANT
)
2714 pstrcat(buf1
, buf_size
, "const ");
2715 if (t
& VT_VOLATILE
)
2716 pstrcat(buf1
, buf_size
, "volatile ");
2718 pstrcat(buf1
, sizeof(buf1
), varstr
);
2719 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2723 pstrcat(buf
, buf_size
, " ");
2724 pstrcat(buf
, buf_size
, varstr
);
2729 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2731 char buf1
[256], buf2
[256];
2732 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2733 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2734 tcc_error(fmt
, buf1
, buf2
);
2737 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2739 char buf1
[256], buf2
[256];
2740 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2741 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2742 tcc_warning(fmt
, buf1
, buf2
);
2745 static int pointed_size(CType
*type
)
2748 return type_size(pointed_type(type
), &align
);
2751 static inline int is_null_pointer(SValue
*p
)
2753 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
2755 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2756 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2757 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2758 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2759 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2760 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2764 /* compare function types. OLD functions match any new functions */
2765 static int is_compatible_func(CType
*type1
, CType
*type2
)
2771 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2773 if (s1
->f
.func_type
!= s2
->f
.func_type
2774 && s1
->f
.func_type
!= FUNC_OLD
2775 && s2
->f
.func_type
!= FUNC_OLD
)
2778 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2780 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2791 /* return true if type1 and type2 are the same. If unqualified is
2792 true, qualifiers on the types are ignored.
2794 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2798 t1
= type1
->t
& VT_TYPE
;
2799 t2
= type2
->t
& VT_TYPE
;
2801 /* strip qualifiers before comparing */
2802 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2803 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2806 /* Default Vs explicit signedness only matters for char */
2807 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2811 /* XXX: bitfields ? */
2816 && !(type1
->ref
->c
< 0
2817 || type2
->ref
->c
< 0
2818 || type1
->ref
->c
== type2
->ref
->c
))
2821 /* test more complicated cases */
2822 bt1
= t1
& VT_BTYPE
;
2823 if (bt1
== VT_PTR
) {
2824 type1
= pointed_type(type1
);
2825 type2
= pointed_type(type2
);
2826 return is_compatible_types(type1
, type2
);
2827 } else if (bt1
== VT_STRUCT
) {
2828 return (type1
->ref
== type2
->ref
);
2829 } else if (bt1
== VT_FUNC
) {
2830 return is_compatible_func(type1
, type2
);
2831 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2832 /* If both are enums then they must be the same, if only one is then
2833 t1 and t2 must be equal, which was checked above already. */
2834 return type1
->ref
== type2
->ref
;
2841 #define SHIFT_OP 'S'
2843 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2844 type is stored in DEST if non-null (except for pointer plus/minus) . */
2845 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2847 CType
*type1
, *type2
, type
;
2848 int t1
, t2
, bt1
, bt2
;
2851 /* for shifts, 'combine' only left operand */
2855 type1
= &op1
->type
, type2
= &op2
->type
;
2856 t1
= type1
->t
, t2
= type2
->t
;
2857 bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2862 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2863 ret
= op
== '?' ? 1 : 0;
2864 /* NOTE: as an extension, we accept void on only one side */
2866 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2868 if (!is_integer_btype(bt1
== VT_PTR
? bt2
: bt1
))
2871 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2872 /* If one is a null ptr constant the result type is the other. */
2873 else if (is_null_pointer (op2
)) type
= *type1
;
2874 else if (is_null_pointer (op1
)) type
= *type2
;
2875 else if (bt1
!= bt2
) {
2876 /* accept comparison or cond-expr between pointer and integer
2878 if ((op
== '?' || op
== CMP_OP
)
2879 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2880 tcc_warning("pointer/integer mismatch in %s",
2881 op
== '?' ? "conditional expression" : "comparison");
2882 else if (op
!= '-' || !is_integer_btype(bt2
))
2884 type
= *(bt1
== VT_PTR
? type1
: type2
);
2886 CType
*pt1
= pointed_type(type1
);
2887 CType
*pt2
= pointed_type(type2
);
2888 int pbt1
= pt1
->t
& VT_BTYPE
;
2889 int pbt2
= pt2
->t
& VT_BTYPE
;
2890 int newquals
, copied
= 0;
2891 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2892 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2893 if (op
!= '?' && op
!= CMP_OP
)
2896 type_incompatibility_warning(type1
, type2
,
2898 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2899 : "pointer type mismatch in comparison('%s' and '%s')");
2902 /* pointers to void get preferred, otherwise the
2903 pointed to types minus qualifs should be compatible */
2904 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2905 /* combine qualifs */
2906 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2907 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2910 /* copy the pointer target symbol */
2911 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2914 pointed_type(&type
)->t
|= newquals
;
2916 /* pointers to incomplete arrays get converted to
2917 pointers to completed ones if possible */
2918 if (pt1
->t
& VT_ARRAY
2919 && pt2
->t
& VT_ARRAY
2920 && pointed_type(&type
)->ref
->c
< 0
2921 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2924 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2926 pointed_type(&type
)->ref
=
2927 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2928 0, pointed_type(&type
)->ref
->c
);
2929 pointed_type(&type
)->ref
->c
=
2930 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2936 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2937 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2940 } else if (is_float(bt1
) || is_float(bt2
)) {
2941 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2942 type
.t
= VT_LDOUBLE
;
2943 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2948 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2949 /* cast to biggest op */
2950 type
.t
= VT_LLONG
| VT_LONG
;
2951 if (bt1
== VT_LLONG
)
2953 if (bt2
== VT_LLONG
)
2955 /* convert to unsigned if it does not fit in a long long */
2956 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2957 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2958 type
.t
|= VT_UNSIGNED
;
2960 /* integer operations */
2961 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2962 /* convert to unsigned if it does not fit in an integer */
2963 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2964 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2965 type
.t
|= VT_UNSIGNED
;
2972 /* generic gen_op: handles types problems */
2973 ST_FUNC
void gen_op(int op
)
2975 int t1
, t2
, bt1
, bt2
, t
;
2976 CType type1
, combtype
;
2979 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2980 op_class
= SHIFT_OP
;
2981 else if (TOK_ISCOND(op
)) /* == != > ... */
2985 t1
= vtop
[-1].type
.t
;
2986 t2
= vtop
[0].type
.t
;
2987 bt1
= t1
& VT_BTYPE
;
2988 bt2
= t2
& VT_BTYPE
;
2990 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2991 if (bt2
== VT_FUNC
) {
2992 mk_pointer(&vtop
->type
);
2995 if (bt1
== VT_FUNC
) {
2997 mk_pointer(&vtop
->type
);
3002 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op_class
)) {
3004 tcc_error("invalid operand types for binary operation");
3005 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3006 /* at least one operand is a pointer */
3007 /* relational op: must be both pointers */
3009 if (op_class
== CMP_OP
)
3011 /* if both pointers, then it must be the '-' op */
3012 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3015 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
3016 vtop
->type
.t
&= ~VT_UNSIGNED
;
3019 vtop
->type
.t
= VT_PTRDIFF_T
;
3023 /* exactly one pointer : must be '+' or '-'. */
3024 if (op
!= '-' && op
!= '+')
3026 /* Put pointer as first operand */
3027 if (bt2
== VT_PTR
) {
3029 t
= t1
, t1
= t2
, t2
= t
;
3033 if (bt2
== VT_LLONG
)
3034 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3037 type1
= vtop
[-1].type
;
3038 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
3040 #ifdef CONFIG_TCC_BCHECK
3041 if (tcc_state
->do_bounds_check
&& !CONST_WANTED
) {
3042 /* if bounded pointers, we generate a special code to
3049 gen_bounded_ptr_add();
3055 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
3056 /* put again type if gen_opic() swaped operands */
3060 /* floats can only be used for a few operations */
3061 if (is_float(combtype
.t
)
3062 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3063 && op_class
!= CMP_OP
) {
3067 t
= t2
= combtype
.t
;
3068 /* special case for shifts and long long: we keep the shift as
3070 if (op_class
== SHIFT_OP
)
3072 /* XXX: currently, some unsigned operations are explicit, so
3073 we modify them here */
3074 if (t
& VT_UNSIGNED
) {
3081 else if (op
== TOK_LT
)
3083 else if (op
== TOK_GT
)
3085 else if (op
== TOK_LE
)
3087 else if (op
== TOK_GE
)
3098 if (op_class
== CMP_OP
) {
3099 /* relational op: the result is an int */
3100 vtop
->type
.t
= VT_INT
;
3105 // Make sure that we have converted to an rvalue:
3106 if (vtop
->r
& VT_LVAL
)
3107 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3110 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3111 #define gen_cvt_itof1 gen_cvt_itof
3113 /* generic itof for unsigned long long case */
3114 static void gen_cvt_itof1(int t
)
3116 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3117 (VT_LLONG
| VT_UNSIGNED
)) {
3120 vpush_helper_func(TOK___floatundisf
);
3121 #if LDOUBLE_SIZE != 8
3122 else if (t
== VT_LDOUBLE
)
3123 vpush_helper_func(TOK___floatundixf
);
3126 vpush_helper_func(TOK___floatundidf
);
3137 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3138 #define gen_cvt_ftoi1 gen_cvt_ftoi
3140 /* generic ftoi for unsigned long long case */
3141 static void gen_cvt_ftoi1(int t
)
3144 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3145 /* not handled natively */
3146 st
= vtop
->type
.t
& VT_BTYPE
;
3148 vpush_helper_func(TOK___fixunssfdi
);
3149 #if LDOUBLE_SIZE != 8
3150 else if (st
== VT_LDOUBLE
)
3151 vpush_helper_func(TOK___fixunsxfdi
);
3154 vpush_helper_func(TOK___fixunsdfdi
);
3165 /* special delayed cast for char/short */
3166 static void force_charshort_cast(void)
3168 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3169 int dbt
= vtop
->type
.t
;
3170 vtop
->r
&= ~VT_MUSTCAST
;
3172 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3176 static void gen_cast_s(int t
)
3184 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3185 static void gen_cast(CType
*type
)
3187 int sbt
, dbt
, sf
, df
, c
;
3188 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3190 /* special delayed cast for char/short */
3191 if (vtop
->r
& VT_MUSTCAST
)
3192 force_charshort_cast();
3194 /* bitfields first get cast to ints */
3195 if (vtop
->type
.t
& VT_BITFIELD
)
3198 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3199 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3207 dbt_bt
= dbt
& VT_BTYPE
;
3208 sbt_bt
= sbt
& VT_BTYPE
;
3209 if (dbt_bt
== VT_VOID
)
3211 if (sbt_bt
== VT_VOID
) {
3213 cast_error(&vtop
->type
, type
);
3216 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3217 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3218 /* don't try to convert to ldouble when cross-compiling
3219 (except when it's '0' which is needed for arm:gen_negf()) */
3220 if (dbt_bt
== VT_LDOUBLE
&& !nocode_wanted
&& (sf
|| vtop
->c
.i
!= 0))
3224 /* constant case: we can do it now */
3225 /* XXX: in ISOC, cannot do it if error in convert */
3226 if (sbt
== VT_FLOAT
)
3227 vtop
->c
.ld
= vtop
->c
.f
;
3228 else if (sbt
== VT_DOUBLE
)
3229 vtop
->c
.ld
= vtop
->c
.d
;
3232 if (sbt_bt
== VT_LLONG
) {
3233 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3234 vtop
->c
.ld
= vtop
->c
.i
;
3236 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3238 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3239 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3241 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3244 if (dbt
== VT_FLOAT
)
3245 vtop
->c
.f
= (float)vtop
->c
.ld
;
3246 else if (dbt
== VT_DOUBLE
)
3247 vtop
->c
.d
= (double)vtop
->c
.ld
;
3248 } else if (sf
&& dbt
== VT_BOOL
) {
3249 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3252 vtop
->c
.i
= vtop
->c
.ld
;
3253 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3255 else if (sbt
& VT_UNSIGNED
)
3256 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3258 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3260 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3262 else if (dbt
== VT_BOOL
)
3263 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3265 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3266 dbt_bt
== VT_SHORT
? 0xffff :
3269 if (!(dbt
& VT_UNSIGNED
))
3270 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3275 } else if (dbt
== VT_BOOL
3276 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3277 == (VT_CONST
| VT_SYM
)) {
3278 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3284 /* cannot generate code for global or static initializers */
3285 if (nocode_wanted
& DATA_ONLY_WANTED
)
3288 /* non constant case: generate code */
3289 if (dbt
== VT_BOOL
) {
3290 gen_test_zero(TOK_NE
);
3296 /* convert from fp to fp */
3299 /* convert int to fp */
3302 /* convert fp to int */
3304 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3307 goto again
; /* may need char/short cast */
3312 ds
= btype_size(dbt_bt
);
3313 ss
= btype_size(sbt_bt
);
3314 if (ds
== 0 || ss
== 0)
3317 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3318 tcc_error("cast to incomplete type");
3320 /* same size and no sign conversion needed */
3321 if (ds
== ss
&& ds
>= 4)
3323 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3324 tcc_warning("cast between pointer and integer of different size");
3325 if (sbt_bt
== VT_PTR
) {
3326 /* put integer type to allow logical operations below */
3327 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3331 /* processor allows { int a = 0, b = *(char*)&a; }
3332 That means that if we cast to less width, we can just
3333 change the type and read it still later. */
3334 #define ALLOW_SUBTYPE_ACCESS 1
3336 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3337 /* value still in memory */
3341 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3343 goto done
; /* no 64bit envolved */
3351 /* generate high word */
3352 if (sbt
& VT_UNSIGNED
) {
3361 } else if (ss
== 8) {
3362 /* from long long: just take low order word */
3370 /* need to convert from 32bit to 64bit */
3371 if (sbt
& VT_UNSIGNED
) {
3372 #if defined(TCC_TARGET_RISCV64)
3373 /* RISC-V keeps 32bit vals in registers sign-extended.
3374 So here we need a zero-extension. */
3383 ss
= ds
, ds
= 4, dbt
= sbt
;
3384 } else if (ss
== 8) {
3385 /* RISC-V keeps 32bit vals in registers sign-extended.
3386 So here we need a sign-extension for signed types and
3387 zero-extension. for unsigned types. */
3388 #if !defined(TCC_TARGET_RISCV64)
3389 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3398 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3404 bits
= (ss
- ds
) * 8;
3405 /* for unsigned, gen_op will convert SAR to SHR */
3406 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3409 vpushi(bits
- trunc
);
3416 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3419 /* return type size as known at compile time. Put alignment at 'a' */
3420 ST_FUNC
int type_size(CType
*type
, int *a
)
3425 bt
= type
->t
& VT_BTYPE
;
3426 if (bt
== VT_STRUCT
) {
3431 } else if (bt
== VT_PTR
) {
3432 if (type
->t
& VT_ARRAY
) {
3435 ts
= type_size(&s
->type
, a
);
3436 if (ts
< 0 && s
->c
< 0)
3443 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3445 return -1; /* incomplete enum */
3446 } else if (bt
== VT_LDOUBLE
) {
3448 return LDOUBLE_SIZE
;
3449 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3450 #if (defined TCC_TARGET_I386 && !defined TCC_TARGET_PE) \
3451 || (defined TCC_TARGET_ARM && !defined TCC_ARM_EABI)
3457 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3460 } else if (bt
== VT_SHORT
) {
3463 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3467 /* char, void, function, _Bool */
3473 /* push type size as known at runtime time on top of value stack. Put
3475 static void vpush_type_size(CType
*type
, int *a
)
3477 if (type
->t
& VT_VLA
) {
3478 type_size(&type
->ref
->type
, a
);
3479 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3481 int size
= type_size(type
, a
);
3483 tcc_error("unknown type size");
3488 /* return the pointed type of t */
3489 static inline CType
*pointed_type(CType
*type
)
3491 return &type
->ref
->type
;
3494 /* modify type so that its it is a pointer to type. */
3495 ST_FUNC
void mk_pointer(CType
*type
)
3498 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3499 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3503 /* return true if type1 and type2 are exactly the same (including
3506 static int is_compatible_types(CType
*type1
, CType
*type2
)
3508 return compare_types(type1
,type2
,0);
3511 /* return true if type1 and type2 are the same (ignoring qualifiers).
3513 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3515 return compare_types(type1
,type2
,1);
3518 static void cast_error(CType
*st
, CType
*dt
)
3520 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3523 /* verify type compatibility to store vtop in 'dt' type */
3524 static void verify_assign_cast(CType
*dt
)
3526 CType
*st
, *type1
, *type2
;
3527 int dbt
, sbt
, qualwarn
, lvl
;
3529 st
= &vtop
->type
; /* source type */
3530 dbt
= dt
->t
& VT_BTYPE
;
3531 sbt
= st
->t
& VT_BTYPE
;
3532 if (dt
->t
& VT_CONSTANT
)
3533 tcc_warning("assignment of read-only location");
3537 tcc_error("assignment to void expression");
3540 /* special cases for pointers */
3541 /* '0' can also be a pointer */
3542 if (is_null_pointer(vtop
))
3544 /* accept implicit pointer to integer cast with warning */
3545 if (is_integer_btype(sbt
)) {
3546 tcc_warning("assignment makes pointer from integer without a cast");
3549 type1
= pointed_type(dt
);
3551 type2
= pointed_type(st
);
3552 else if (sbt
== VT_FUNC
)
3553 type2
= st
; /* a function is implicitly a function pointer */
3556 if (is_compatible_types(type1
, type2
))
3558 for (qualwarn
= lvl
= 0;; ++lvl
) {
3559 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3560 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3562 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3563 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3564 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3566 type1
= pointed_type(type1
);
3567 type2
= pointed_type(type2
);
3569 if (!is_compatible_unqualified_types(type1
, type2
)) {
3570 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3571 /* void * can match anything */
3572 } else if (dbt
== sbt
3573 && is_integer_btype(sbt
& VT_BTYPE
)
3574 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3575 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3576 /* Like GCC don't warn by default for merely changes
3577 in pointer target signedness. Do warn for different
3578 base types, though, in particular for unsigned enums
3579 and signed int targets. */
3581 tcc_warning("assignment from incompatible pointer type");
3586 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3592 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3593 tcc_warning("assignment makes integer from pointer without a cast");
3594 } else if (sbt
== VT_STRUCT
) {
3595 goto case_VT_STRUCT
;
3597 /* XXX: more tests */
3601 if (!is_compatible_unqualified_types(dt
, st
)) {
3609 static void gen_assign_cast(CType
*dt
)
3611 verify_assign_cast(dt
);
3615 /* store vtop in lvalue pushed on stack */
3616 ST_FUNC
void vstore(void)
3618 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3620 ft
= vtop
[-1].type
.t
;
3621 sbt
= vtop
->type
.t
& VT_BTYPE
;
3622 dbt
= ft
& VT_BTYPE
;
3623 verify_assign_cast(&vtop
[-1].type
);
3625 if (sbt
== VT_STRUCT
) {
3626 /* if structure, only generate pointer */
3627 /* structure assignment : generate memcpy */
3628 size
= type_size(&vtop
->type
, &align
);
3629 /* destination, keep on stack() as result */
3631 #ifdef CONFIG_TCC_BCHECK
3632 if (vtop
->r
& VT_MUSTBOUND
)
3633 gbound(); /* check would be wrong after gaddrof() */
3635 vtop
->type
.t
= VT_PTR
;
3639 #ifdef CONFIG_TCC_BCHECK
3640 if (vtop
->r
& VT_MUSTBOUND
)
3643 vtop
->type
.t
= VT_PTR
;
3646 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3648 #ifdef CONFIG_TCC_BCHECK
3649 && !tcc_state
->do_bounds_check
3652 gen_struct_copy(size
);
3658 /* Use memmove, rather than memcpy, as dest and src may be same: */
3661 vpush_helper_func(TOK_memmove8
);
3662 else if(!(align
& 3))
3663 vpush_helper_func(TOK_memmove4
);
3666 vpush_helper_func(TOK_memmove
);
3671 } else if (ft
& VT_BITFIELD
) {
3672 /* bitfield store handling */
3674 /* save lvalue as expression result (example: s.b = s.a = n;) */
3675 vdup(), vtop
[-1] = vtop
[-2];
3677 bit_pos
= BIT_POS(ft
);
3678 bit_size
= BIT_SIZE(ft
);
3679 /* remove bit field info to avoid loops */
3680 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3682 if (dbt
== VT_BOOL
) {
3683 gen_cast(&vtop
[-1].type
);
3684 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3686 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3687 if (dbt
!= VT_BOOL
) {
3688 gen_cast(&vtop
[-1].type
);
3689 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3691 if (r
== VT_STRUCT
) {
3692 store_packed_bf(bit_pos
, bit_size
);
3694 unsigned long long mask
= (1ULL << bit_size
) - 1;
3695 if (dbt
!= VT_BOOL
) {
3697 if (dbt
== VT_LLONG
)
3700 vpushi((unsigned)mask
);
3707 /* duplicate destination */
3710 /* load destination, mask and or with source */
3711 if (dbt
== VT_LLONG
)
3712 vpushll(~(mask
<< bit_pos
));
3714 vpushi(~((unsigned)mask
<< bit_pos
));
3719 /* ... and discard */
3722 } else if (dbt
== VT_VOID
) {
3725 /* optimize char/short casts */
3727 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3728 && is_integer_btype(sbt
)
3730 if ((vtop
->r
& VT_MUSTCAST
)
3731 && btype_size(dbt
) > btype_size(sbt
)
3733 force_charshort_cast();
3736 gen_cast(&vtop
[-1].type
);
3739 #ifdef CONFIG_TCC_BCHECK
3740 /* bound check case */
3741 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3747 gv(RC_TYPE(dbt
)); /* generate value */
3750 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3751 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3752 vtop
->type
.t
= ft
& VT_TYPE
;
3755 /* if lvalue was saved on stack, must read it */
3756 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3758 r
= get_reg(RC_INT
);
3759 sv
.type
.t
= VT_PTRDIFF_T
;
3760 sv
.r
= VT_LOCAL
| VT_LVAL
;
3761 sv
.c
.i
= vtop
[-1].c
.i
;
3763 vtop
[-1].r
= r
| VT_LVAL
;
3766 r
= vtop
->r
& VT_VALMASK
;
3767 /* two word case handling :
3768 store second register at word + 4 (or +8 for x86-64) */
3769 if (USING_TWO_WORDS(dbt
)) {
3770 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3771 vtop
[-1].type
.t
= load_type
;
3774 incr_offset(PTR_SIZE
);
3776 /* XXX: it works because r2 is spilled last ! */
3777 store(vtop
->r2
, vtop
- 1);
3783 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3787 /* post defines POST/PRE add. c is the token ++ or -- */
3788 ST_FUNC
void inc(int post
, int c
)
3791 vdup(); /* save lvalue */
3793 gv_dup(); /* duplicate value */
3798 vpushi(c
- TOK_MID
);
3800 vstore(); /* store value */
3802 vpop(); /* if post op, return saved value */
3805 ST_FUNC CString
* parse_mult_str (const char *msg
)
3807 /* read the string */
3810 cstr_reset(&initstr
);
3811 while (tok
== TOK_STR
) {
3812 /* XXX: add \0 handling too ? */
3813 cstr_cat(&initstr
, tokc
.str
.data
, -1);
3816 cstr_ccat(&initstr
, '\0');
3820 /* If I is >= 1 and a power of two, returns log2(i)+1.
3821 If I is 0 returns 0. */
3822 ST_FUNC
int exact_log2p1(int i
)
3827 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3838 /* Parse __attribute__((...)) GNUC extension. */
3839 static void parse_attribute(AttributeDef
*ad
)
3845 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3850 while (tok
!= ')') {
3851 if (tok
< TOK_IDENT
)
3852 expect("attribute name");
3864 tcc_warning_c(warn_implicit_function_declaration
)(
3865 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3866 s
= external_global_sym(tok
, &func_old_type
);
3867 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3868 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3869 ad
->cleanup_func
= s
;
3874 case TOK_CONSTRUCTOR1
:
3875 case TOK_CONSTRUCTOR2
:
3876 ad
->f
.func_ctor
= 1;
3878 case TOK_DESTRUCTOR1
:
3879 case TOK_DESTRUCTOR2
:
3880 ad
->f
.func_dtor
= 1;
3882 case TOK_ALWAYS_INLINE1
:
3883 case TOK_ALWAYS_INLINE2
:
3884 ad
->f
.func_alwinl
= 1;
3889 astr
= parse_mult_str("section name")->data
;
3890 ad
->section
= find_section(tcc_state
, astr
);
3896 astr
= parse_mult_str("alias(\"target\")")->data
;
3897 /* save string as token, for later */
3898 ad
->alias_target
= tok_alloc_const(astr
);
3901 case TOK_VISIBILITY1
:
3902 case TOK_VISIBILITY2
:
3904 astr
= parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data
;
3905 if (!strcmp (astr
, "default"))
3906 ad
->a
.visibility
= STV_DEFAULT
;
3907 else if (!strcmp (astr
, "hidden"))
3908 ad
->a
.visibility
= STV_HIDDEN
;
3909 else if (!strcmp (astr
, "internal"))
3910 ad
->a
.visibility
= STV_INTERNAL
;
3911 else if (!strcmp (astr
, "protected"))
3912 ad
->a
.visibility
= STV_PROTECTED
;
3914 expect("visibility(\"default|hidden|internal|protected\")");
3922 if (n
<= 0 || (n
& (n
- 1)) != 0)
3923 tcc_error("alignment must be a positive power of two");
3928 ad
->a
.aligned
= exact_log2p1(n
);
3929 if (n
!= 1 << (ad
->a
.aligned
- 1))
3930 tcc_error("alignment of %d is larger than implemented", n
);
3946 /* currently, no need to handle it because tcc does not
3947 track unused objects */
3951 ad
->f
.func_noreturn
= 1;
3956 ad
->f
.func_call
= FUNC_CDECL
;
3961 ad
->f
.func_call
= FUNC_STDCALL
;
3963 #ifdef TCC_TARGET_I386
3973 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3979 ad
->f
.func_call
= FUNC_FASTCALLW
;
3984 ad
->f
.func_call
= FUNC_THISCALL
;
3991 ad
->attr_mode
= VT_LLONG
+ 1;
3994 ad
->attr_mode
= VT_BYTE
+ 1;
3997 ad
->attr_mode
= VT_SHORT
+ 1;
4001 ad
->attr_mode
= VT_INT
+ 1;
4004 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4011 ad
->a
.dllexport
= 1;
4013 case TOK_NODECORATE
:
4014 ad
->a
.nodecorate
= 1;
4017 ad
->a
.dllimport
= 1;
4020 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
4021 /* skip parameters */
4023 int parenthesis
= 0;
4027 else if (tok
== ')')
4030 } while (parenthesis
&& tok
!= -1);
4043 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4046 int v1
= v
| SYM_FIELD
;
4047 if (!(v
& SYM_FIELD
)) { /* top-level call */
4048 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
4049 expect("struct or union");
4051 expect("field name");
4053 tcc_error("dereferencing incomplete type '%s'",
4054 get_tok_str(s
->v
& ~SYM_STRUCT
, 0));
4056 while ((s
= s
->next
) != NULL
) {
4061 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
4062 && s
->v
>= (SYM_FIRST_ANOM
| SYM_FIELD
)) {
4063 /* try to find field in anonymous sub-struct/union */
4064 Sym
*ret
= find_field (&s
->type
, v1
, cumofs
);
4071 if (!(v
& SYM_FIELD
))
4072 tcc_error("field not found: %s", get_tok_str(v
, NULL
));
4076 static void check_fields (CType
*type
, int check
)
4080 while ((s
= s
->next
) != NULL
) {
4081 int v
= s
->v
& ~SYM_FIELD
;
4082 if (v
< SYM_FIRST_ANOM
) {
4083 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4084 if (check
&& (ts
->tok
& SYM_FIELD
))
4085 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4086 ts
->tok
^= SYM_FIELD
;
4087 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4088 check_fields (&s
->type
, check
);
4092 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4094 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4095 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4096 int pcc
= !tcc_state
->ms_bitfields
;
4097 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4104 prevbt
= VT_STRUCT
; /* make it never match */
4109 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4110 if (f
->type
.t
& VT_BITFIELD
)
4111 bit_size
= BIT_SIZE(f
->type
.t
);
4114 size
= type_size(&f
->type
, &align
);
4115 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4118 if (pcc
&& bit_size
== 0) {
4119 /* in pcc mode, packing does not affect zero-width bitfields */
4122 /* in pcc mode, attribute packed overrides if set. */
4123 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4126 /* pragma pack overrides align if lesser and packs bitfields always */
4129 if (pragma_pack
< align
)
4130 align
= pragma_pack
;
4131 /* in pcc mode pragma pack also overrides individual align */
4132 if (pcc
&& pragma_pack
< a
)
4136 /* some individual align was specified */
4140 if (type
->ref
->type
.t
== VT_UNION
) {
4141 if (pcc
&& bit_size
>= 0)
4142 size
= (bit_size
+ 7) >> 3;
4147 } else if (bit_size
< 0) {
4149 c
+= (bit_pos
+ 7) >> 3;
4150 c
= (c
+ align
- 1) & -align
;
4159 /* A bit-field. Layout is more complicated. There are two
4160 options: PCC (GCC) compatible and MS compatible */
4162 /* In PCC layout a bit-field is placed adjacent to the
4163 preceding bit-fields, except if:
4165 - an individual alignment was given
4166 - it would overflow its base type container and
4167 there is no packing */
4168 if (bit_size
== 0) {
4170 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4172 } else if (f
->a
.aligned
) {
4174 } else if (!packed
) {
4176 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4177 if (ofs
> size
/ align
)
4181 /* in pcc mode, long long bitfields have type int if they fit */
4182 if (size
== 8 && bit_size
<= 32)
4183 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4185 while (bit_pos
>= align
* 8)
4186 c
+= align
, bit_pos
-= align
* 8;
4189 /* In PCC layout named bit-fields influence the alignment
4190 of the containing struct using the base types alignment,
4191 except for packed fields (which here have correct align). */
4192 if (f
->v
& SYM_FIRST_ANOM
4193 // && bit_size // ??? gcc on ARM/rpi does that
4198 bt
= f
->type
.t
& VT_BTYPE
;
4199 if ((bit_pos
+ bit_size
> size
* 8)
4200 || (bit_size
> 0) == (bt
!= prevbt
)
4202 c
= (c
+ align
- 1) & -align
;
4205 /* In MS bitfield mode a bit-field run always uses
4206 at least as many bits as the underlying type.
4207 To start a new run it's also required that this
4208 or the last bit-field had non-zero width. */
4209 if (bit_size
|| prev_bit_size
)
4212 /* In MS layout the records alignment is normally
4213 influenced by the field, except for a zero-width
4214 field at the start of a run (but by further zero-width
4215 fields it is again). */
4216 if (bit_size
== 0 && prevbt
!= bt
)
4219 prev_bit_size
= bit_size
;
4222 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4223 | (bit_pos
<< VT_STRUCT_SHIFT
);
4224 bit_pos
+= bit_size
;
4226 if (align
> maxalign
)
4230 printf("set field %s offset %-2d size %-2d align %-2d",
4231 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4232 if (f
->type
.t
& VT_BITFIELD
) {
4233 printf(" pos %-2d bits %-2d",
4246 c
+= (bit_pos
+ 7) >> 3;
4248 /* store size and alignment */
4249 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4253 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4254 /* can happen if individual align for some member was given. In
4255 this case MSVC ignores maxalign when aligning the size */
4260 c
= (c
+ a
- 1) & -a
;
4264 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4267 /* check whether we can access bitfields by their type */
4268 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4272 if (0 == (f
->type
.t
& VT_BITFIELD
))
4276 bit_size
= BIT_SIZE(f
->type
.t
);
4279 bit_pos
= BIT_POS(f
->type
.t
);
4280 size
= type_size(&f
->type
, &align
);
4282 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4283 #ifdef TCC_TARGET_ARM
4284 && !(f
->c
& (align
- 1))
4289 /* try to access the field using a different type */
4290 c0
= -1, s
= align
= 1;
4293 px
= f
->c
* 8 + bit_pos
;
4294 cx
= (px
>> 3) & -align
;
4295 px
= px
- (cx
<< 3);
4298 s
= (px
+ bit_size
+ 7) >> 3;
4308 s
= type_size(&t
, &align
);
4312 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4313 #ifdef TCC_TARGET_ARM
4314 && !(cx
& (align
- 1))
4317 /* update offset and bit position */
4320 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4321 | (bit_pos
<< VT_STRUCT_SHIFT
);
4325 printf("FIX field %s offset %-2d size %-2d align %-2d "
4326 "pos %-2d bits %-2d\n",
4327 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4328 cx
, s
, align
, px
, bit_size
);
4331 /* fall back to load/store single-byte wise */
4332 f
->auxtype
= VT_STRUCT
;
4334 printf("FIX field %s : load byte-wise\n",
4335 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4341 static void do_Static_assert(void);
4343 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4344 static void struct_decl(CType
*type
, int u
)
4346 int v
, c
, size
, align
, flexible
;
4347 int bit_size
, bsize
, bt
;
4349 AttributeDef ad
, ad1
;
4352 memset(&ad
, 0, sizeof ad
);
4354 parse_attribute(&ad
);
4358 /* struct already defined ? return it */
4360 expect("struct/union/enum name");
4362 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4365 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4367 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4372 /* Record the original enum/struct/union token. */
4373 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4375 /* we put an undefined size for struct/union */
4376 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4377 s
->r
= 0; /* default alignment is zero as gcc */
4379 type
->t
= s
->type
.t
;
4385 tcc_error("struct/union/enum already defined");
4387 /* cannot be empty */
4388 /* non empty enums are not allowed */
4391 long long ll
= 0, pl
= 0, nl
= 0;
4394 /* enum symbols have static storage */
4395 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4399 expect("identifier");
4401 if (ss
&& !local_stack
)
4402 tcc_error("redefinition of enumerator '%s'",
4403 get_tok_str(v
, NULL
));
4407 ll
= expr_const64();
4409 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4411 *ps
= ss
, ps
= &ss
->next
;
4420 /* NOTE: we accept a trailing comma */
4425 /* set integral type of the enum */
4428 if (pl
!= (unsigned)pl
)
4429 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4431 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4432 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4433 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4435 /* set type for enum members */
4436 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4438 if (ll
== (int)ll
) /* default is int if it fits */
4440 if (t
.t
& VT_UNSIGNED
) {
4441 ss
->type
.t
|= VT_UNSIGNED
;
4442 if (ll
== (unsigned)ll
)
4445 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4446 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4451 while (tok
!= '}') {
4452 if (!parse_btype(&btype
, &ad1
, 0)) {
4453 if (tok
== TOK_STATIC_ASSERT
) {
4462 tcc_error("flexible array member '%s' not at the end of struct",
4463 get_tok_str(v
, NULL
));
4469 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4471 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4472 expect("identifier");
4474 int v
= btype
.ref
->v
;
4475 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4476 if (tcc_state
->ms_extensions
== 0)
4477 expect("identifier");
4481 if (type_size(&type1
, &align
) < 0) {
4482 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4485 tcc_error("field '%s' has incomplete type",
4486 get_tok_str(v
, NULL
));
4488 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4489 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4490 (type1
.t
& VT_STORAGE
))
4491 tcc_error("invalid type for '%s'",
4492 get_tok_str(v
, NULL
));
4496 bit_size
= expr_const();
4497 /* XXX: handle v = 0 case for messages */
4499 tcc_error("negative width in bit-field '%s'",
4500 get_tok_str(v
, NULL
));
4501 if (v
&& bit_size
== 0)
4502 tcc_error("zero width for bit-field '%s'",
4503 get_tok_str(v
, NULL
));
4504 parse_attribute(&ad1
);
4506 size
= type_size(&type1
, &align
);
4507 if (bit_size
>= 0) {
4508 bt
= type1
.t
& VT_BTYPE
;
4514 tcc_error("bitfields must have scalar type");
4516 if (bit_size
> bsize
) {
4517 tcc_error("width of '%s' exceeds its type",
4518 get_tok_str(v
, NULL
));
4519 } else if (bit_size
== bsize
4520 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4521 /* no need for bit fields */
4523 } else if (bit_size
== 64) {
4524 tcc_error("field width 64 not implemented");
4526 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4528 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4531 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4532 /* Remember we've seen a real field to check
4533 for placement of flexible array member. */
4536 /* If member is a struct or bit-field, enforce
4537 placing into the struct (as anonymous). */
4539 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4544 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4549 if (tok
== ';' || tok
== TOK_EOF
)
4556 parse_attribute(&ad
);
4557 if (ad
.cleanup_func
) {
4558 tcc_warning("attribute '__cleanup__' ignored on type");
4560 check_fields(type
, 1);
4561 check_fields(type
, 0);
4562 struct_layout(type
, &ad
);
4564 tcc_debug_fix_anon(tcc_state
, type
);
4569 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4571 merge_symattr(&ad
->a
, &s
->a
);
4572 merge_funcattr(&ad
->f
, &s
->f
);
4575 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4576 are added to the element type, copied because it could be a typedef. */
4577 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4579 while (type
->t
& VT_ARRAY
) {
4580 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4581 type
= &type
->ref
->type
;
4583 type
->t
|= qualifiers
;
4586 /* return 0 if no type declaration. otherwise, return the basic type
4589 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4591 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4595 memset(ad
, 0, sizeof(AttributeDef
));
4605 /* currently, we really ignore extension */
4615 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4616 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4617 tmbt
: tcc_error("too many basic types");
4620 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4625 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4642 memset(&ad1
, 0, sizeof(AttributeDef
));
4643 if (parse_btype(&type1
, &ad1
, 0)) {
4644 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4646 n
= 1 << (ad1
.a
.aligned
- 1);
4648 type_size(&type1
, &n
);
4651 if (n
< 0 || (n
& (n
- 1)) != 0)
4652 tcc_error("alignment must be a positive power of two");
4655 ad
->a
.aligned
= exact_log2p1(n
);
4659 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4660 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4661 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4662 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4669 #ifdef TCC_TARGET_ARM64
4671 /* GCC's __uint128_t appears in some Linux header files. Make it a
4672 synonym for long double to get the size and alignment right. */
4680 tcc_error("_Complex is not yet supported");
4685 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4686 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4694 struct_decl(&type1
, VT_ENUM
);
4697 type
->ref
= type1
.ref
;
4700 struct_decl(&type1
, VT_STRUCT
);
4703 struct_decl(&type1
, VT_UNION
);
4706 /* type modifiers */
4710 parse_btype_qualify(type
, VT_ATOMIC
);
4713 parse_expr_type(&type1
);
4714 /* remove all storage modifiers except typedef */
4715 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4717 sym_to_attr(ad
, type1
.ref
);
4725 parse_btype_qualify(type
, VT_CONSTANT
);
4733 parse_btype_qualify(type
, VT_VOLATILE
);
4740 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4741 tcc_error("signed and unsigned modifier");
4754 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4755 tcc_error("signed and unsigned modifier");
4756 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4772 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4773 tcc_error("multiple storage classes");
4785 ad
->f
.func_noreturn
= 1;
4787 /* GNUC attribute */
4788 case TOK_ATTRIBUTE1
:
4789 case TOK_ATTRIBUTE2
:
4790 parse_attribute(ad
);
4791 if (ad
->attr_mode
) {
4792 u
= ad
->attr_mode
-1;
4793 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4801 parse_expr_type(&type1
);
4802 /* remove all storage modifiers except typedef */
4803 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4805 sym_to_attr(ad
, type1
.ref
);
4807 case TOK_THREAD_LOCAL
:
4808 tcc_error("_Thread_local is not implemented");
4813 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4817 if (tok
== ':' && ignore_label
) {
4818 /* ignore if it's a label */
4823 t
&= ~(VT_BTYPE
|VT_LONG
);
4824 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4825 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4826 type
->ref
= s
->type
.ref
;
4828 parse_btype_qualify(type
, t
);
4830 /* get attributes from typedef */
4839 if (tcc_state
->char_is_unsigned
) {
4840 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4843 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4844 bt
= t
& (VT_BTYPE
|VT_LONG
);
4846 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4847 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4848 if (bt
== VT_LDOUBLE
)
4849 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4855 /* convert a function parameter type (array to pointer and function to
4856 function pointer) */
4857 static inline void convert_parameter_type(CType
*pt
)
4859 /* remove const and volatile qualifiers (XXX: const could be used
4860 to indicate a const function parameter */
4861 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4862 /* array must be transformed to pointer according to ANSI C */
4863 pt
->t
&= ~(VT_ARRAY
| VT_VLA
);
4864 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4869 ST_FUNC CString
* parse_asm_str(void)
4872 return parse_mult_str("string constant");
4875 /* Parse an asm label and return the token */
4876 static int asm_label_instr(void)
4882 astr
= parse_asm_str()->data
;
4885 printf("asm_alias: \"%s\"\n", astr
);
4887 v
= tok_alloc_const(astr
);
4891 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4893 int n
, l
, t1
, arg_size
, align
;
4894 Sym
**plast
, *s
, *first
;
4897 TokenString
*vla_array_tok
= NULL
;
4898 int *vla_array_str
= NULL
;
4901 /* function type, or recursive declarator (return if so) */
4903 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4907 else if (parse_btype(&pt
, &ad1
, 0))
4909 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4910 merge_attr (ad
, &ad1
);
4921 /* read param name and compute offset */
4922 if (l
!= FUNC_OLD
) {
4923 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4925 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4926 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4927 tcc_error("parameter declared as void");
4932 pt
.t
= VT_VOID
; /* invalid type */
4937 expect("identifier");
4938 convert_parameter_type(&pt
);
4939 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4940 /* these symbols may be evaluated for VLArrays (see below, under
4941 nocode_wanted) which is why we push them here as normal symbols
4942 temporarily. Example: int func(int a, int b[++a]); */
4943 s
= sym_push(n
, &pt
, VT_LOCAL
|VT_LVAL
, 0);
4949 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4954 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4955 tcc_error("invalid type");
4958 /* if no parameters, then old type prototype */
4961 /* remove parameter symbols from token table, keep on stack */
4963 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4964 for (s
= first
; s
; s
= s
->next
)
4968 /* NOTE: const is ignored in returned type as it has a special
4969 meaning in gcc / C++ */
4970 type
->t
&= ~VT_CONSTANT
;
4971 /* some ancient pre-K&R C allows a function to return an array
4972 and the array brackets to be put after the arguments, such
4973 that "int c()[]" means something like "int[] c()" */
4976 skip(']'); /* only handle simple "[]" */
4979 /* we push a anonymous symbol which will contain the function prototype */
4980 ad
->f
.func_args
= arg_size
;
4981 ad
->f
.func_type
= l
;
4982 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4988 } else if (tok
== '[') {
4989 int saved_nocode_wanted
= nocode_wanted
;
4990 /* array definition */
4994 if (td
& TYPE_PARAM
) while (1) {
4995 /* XXX The optional type-quals and static should only be accepted
4996 in parameter decls. The '*' as well, and then even only
4997 in prototypes (not function defs). */
4999 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5010 /* Code generation is not done now but has to be done
5011 at start of function. Save code here for later use. */
5013 skip_or_save_block(&vla_array_tok
);
5015 vla_array_str
= vla_array_tok
->str
;
5016 begin_macro(vla_array_tok
, 2);
5025 } else if (tok
!= ']') {
5026 if (!local_stack
|| (storage
& VT_STATIC
))
5027 vpushi(expr_const());
5029 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5030 length must always be evaluated, even under nocode_wanted,
5031 so that its size slot is initialized (e.g. under sizeof
5037 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5040 tcc_error("invalid array size");
5042 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5043 tcc_error("size of variable length array should be an integer");
5049 /* parse next post type */
5050 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
5052 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5053 tcc_error("declaration of an array of functions");
5054 if ((type
->t
& VT_BTYPE
) == VT_VOID
5055 || type_size(type
, &align
) < 0)
5056 tcc_error("declaration of an array of incomplete type elements");
5058 t1
|= type
->t
& VT_VLA
;
5063 tcc_error("need explicit inner array size in VLAs");
5066 loc
-= type_size(&int_type
, &align
);
5070 vpush_type_size(type
, &align
);
5072 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5079 nocode_wanted
= saved_nocode_wanted
;
5081 /* we push an anonymous symbol which will contain the array
5083 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5084 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5087 if (vla_array_str
) {
5088 /* for function args, the top dimension is converted to pointer */
5089 if ((t1
& VT_VLA
) && (td
& TYPE_NEST
))
5090 s
->vla_array_str
= vla_array_str
;
5092 tok_str_free_str(vla_array_str
);
5098 /* Parse a type declarator (except basic type), and return the type
5099 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5100 expected. 'type' should contain the basic type. 'ad' is the
5101 attribute definition of the basic type. It can be modified by
5102 type_decl(). If this (possibly abstract) declarator is a pointer chain
5103 it returns the innermost pointed to type (equals *type, but is a different
5104 pointer), otherwise returns type itself, that's used for recursive calls. */
5105 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5108 int qualifiers
, storage
;
5110 /* recursive type, remove storage bits first, apply them later again */
5111 storage
= type
->t
& VT_STORAGE
;
5112 type
->t
&= ~VT_STORAGE
;
5115 while (tok
== '*') {
5121 qualifiers
|= VT_ATOMIC
;
5126 qualifiers
|= VT_CONSTANT
;
5131 qualifiers
|= VT_VOLATILE
;
5137 /* XXX: clarify attribute handling */
5138 case TOK_ATTRIBUTE1
:
5139 case TOK_ATTRIBUTE2
:
5140 parse_attribute(ad
);
5144 type
->t
|= qualifiers
;
5146 /* innermost pointed to type is the one for the first derivation */
5147 ret
= pointed_type(type
);
5151 /* This is possibly a parameter type list for abstract declarators
5152 ('int ()'), use post_type for testing this. */
5153 if (!post_type(type
, ad
, 0, td
)) {
5154 /* It's not, so it's a nested declarator, and the post operations
5155 apply to the innermost pointed to type (if any). */
5156 /* XXX: this is not correct to modify 'ad' at this point, but
5157 the syntax is not clear */
5158 parse_attribute(ad
);
5159 post
= type_decl(type
, ad
, v
, td
);
5163 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5164 /* type identifier */
5169 if (!(td
& TYPE_ABSTRACT
))
5170 expect("identifier");
5173 post_type(post
, ad
, post
!= ret
? 0 : storage
,
5174 td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5175 parse_attribute(ad
);
5180 /* indirection with full error checking and bound check */
5181 ST_FUNC
void indir(void)
5183 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5184 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5188 if (vtop
->r
& VT_LVAL
)
5190 vtop
->type
= *pointed_type(&vtop
->type
);
5191 /* Arrays and functions are never lvalues */
5192 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5193 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5195 /* if bound checking, the referenced pointer must be checked */
5196 #ifdef CONFIG_TCC_BCHECK
5197 if (tcc_state
->do_bounds_check
)
5198 vtop
->r
|= VT_MUSTBOUND
;
5203 /* pass a parameter to a function and do type checking and casting */
5204 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5209 func_type
= func
->f
.func_type
;
5210 if (func_type
== FUNC_OLD
||
5211 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5212 /* default casting : only need to convert float to double */
5213 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5214 gen_cast_s(VT_DOUBLE
);
5215 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5216 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5217 type
.ref
= vtop
->type
.ref
;
5219 } else if (vtop
->r
& VT_MUSTCAST
) {
5220 force_charshort_cast();
5222 } else if (arg
== NULL
) {
5223 tcc_error("too many arguments to function");
5226 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5227 gen_assign_cast(&type
);
5231 /* parse an expression and return its type without any side effect. */
5232 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5241 /* parse an expression of the form '(type)' or '(expr)' and return its
5243 static void parse_expr_type(CType
*type
)
5249 if (parse_btype(type
, &ad
, 0)) {
5250 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5252 expr_type(type
, gexpr
);
5257 static void parse_type(CType
*type
)
5262 if (!parse_btype(type
, &ad
, 0)) {
5265 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5268 static void parse_builtin_params(int nc
, const char *args
)
5277 while ((c
= *args
++)) {
5292 type
.t
= VT_CONSTANT
;
5298 type
.t
= VT_CONSTANT
;
5300 type
.t
|= char_type
.t
;
5312 gen_assign_cast(&type
);
5319 static void parse_atomic(int atok
)
5321 int size
, align
, arg
, t
, save
= 0;
5322 CType
*atom
, *atom_ptr
, ct
= {0};
5325 static const char *const templates
[] = {
5327 * Each entry consists of callback and function template.
5328 * The template represents argument types and return type.
5330 * ? void (return-only)
5333 * A read-only atomic
5334 * p pointer to memory
5341 /* keep in order of appearance in tcctok.h: */
5342 /* __atomic_store */ "alm.?",
5343 /* __atomic_load */ "Asm.v",
5344 /* __atomic_exchange */ "alsm.v",
5345 /* __atomic_compare_exchange */ "aplbmm.b",
5346 /* __atomic_fetch_add */ "avm.v",
5347 /* __atomic_fetch_sub */ "avm.v",
5348 /* __atomic_fetch_or */ "avm.v",
5349 /* __atomic_fetch_xor */ "avm.v",
5350 /* __atomic_fetch_and */ "avm.v",
5351 /* __atomic_fetch_nand */ "avm.v",
5352 /* __atomic_and_fetch */ "avm.v",
5353 /* __atomic_sub_fetch */ "avm.v",
5354 /* __atomic_or_fetch */ "avm.v",
5355 /* __atomic_xor_fetch */ "avm.v",
5356 /* __atomic_and_fetch */ "avm.v",
5357 /* __atomic_nand_fetch */ "avm.v"
5359 const char *template = templates
[(atok
- TOK___atomic_store
)];
5361 atom
= atom_ptr
= NULL
;
5362 size
= 0; /* pacify compiler */
5367 switch (template[arg
]) {
5370 atom_ptr
= &vtop
->type
;
5371 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5373 atom
= pointed_type(atom_ptr
);
5374 size
= type_size(atom
, &align
);
5376 || (size
& (size
- 1))
5377 || (atok
> TOK___atomic_compare_exchange
5378 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5379 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5380 expect("integral or integer-sized pointer target type");
5381 /* GCC does not care either: */
5382 /* if (!(atom->t & VT_ATOMIC))
5383 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5387 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5388 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5389 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5390 gen_assign_cast(atom_ptr
);
5393 gen_assign_cast(atom
);
5397 gen_assign_cast(atom
);
5406 gen_assign_cast(&int_type
);
5410 gen_assign_cast(&ct
);
5413 if ('.' == template[++arg
])
5420 switch (template[arg
+ 1]) {
5429 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5430 vpush_helper_func(tok_alloc_const(buf
));
5431 vrott(arg
- save
+ 1);
5432 gfunc_call(arg
- save
);
5435 PUT_R_RET(vtop
, ct
.t
);
5436 t
= ct
.t
& VT_BTYPE
;
5437 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5439 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5441 vtop
->type
.t
= VT_INT
;
5453 ST_FUNC
void unary(void)
5455 int n
, t
, align
, size
, r
;
5460 /* generate line number info */
5462 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5465 /* XXX: GCC 2.95.3 does not generate a table although it should be
5473 #ifdef TCC_TARGET_PE
5474 t
= VT_SHORT
|VT_UNSIGNED
;
5482 vsetc(&type
, VT_CONST
, &tokc
);
5486 t
= VT_INT
| VT_UNSIGNED
;
5492 t
= VT_LLONG
| VT_UNSIGNED
;
5501 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5502 t
= VT_DOUBLE
| VT_LONG
;
5508 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5511 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5513 case TOK___FUNCTION__
:
5515 goto tok_identifier
;
5519 cstr_reset(&tokcstr
);
5520 cstr_cat(&tokcstr
, funcname
, 0);
5521 tokc
.str
.size
= tokcstr
.size
;
5522 tokc
.str
.data
= tokcstr
.data
;
5525 #ifdef TCC_TARGET_PE
5526 t
= VT_SHORT
| VT_UNSIGNED
;
5533 /* string parsing */
5536 if (tcc_state
->warn_write_strings
& WARN_ON
)
5541 memset(&ad
, 0, sizeof(AttributeDef
));
5542 ad
.section
= rodata_section
;
5543 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5550 if (parse_btype(&type
, &ad
, 0)) {
5551 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5553 /* check ISOC99 compound literal */
5555 /* data is allocated locally by default */
5560 /* all except arrays are lvalues */
5561 if (!(type
.t
& VT_ARRAY
))
5563 memset(&ad
, 0, sizeof(AttributeDef
));
5564 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5565 } else if (t
== TOK_SOTYPE
) { /* from sizeof/alignof (...) */
5572 } else if (tok
== '{') {
5573 int saved_nocode_wanted
= nocode_wanted
;
5574 if (CONST_WANTED
&& !NOEVAL_WANTED
)
5576 if (0 == local_scope
)
5577 tcc_error("statement expression outside of function");
5578 /* save all registers */
5580 /* statement expression : we do not accept break/continue
5581 inside as GCC does. We do retain the nocode_wanted state,
5582 as statement expressions can't ever be entered from the
5583 outside, so any reactivation of code emission (from labels
5584 or loop heads) can be disabled again after the end of it. */
5586 /* If the statement expr can be entered, then we retain the current
5587 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5588 If it can't be entered then the state is that from before the
5589 statement expression. */
5590 if (saved_nocode_wanted
)
5591 nocode_wanted
= saved_nocode_wanted
;
5606 /* functions names must be treated as function pointers,
5607 except for unary '&' and sizeof. Since we consider that
5608 functions are not lvalues, we only have to handle it
5609 there and in function calls. */
5610 /* arrays can also be used although they are not lvalues */
5611 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5612 !(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
)))
5615 vtop
->sym
->a
.addrtaken
= 1;
5616 mk_pointer(&vtop
->type
);
5622 gen_test_zero(TOK_EQ
);
5633 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5634 tcc_error("pointer not accepted for unary plus");
5635 /* In order to force cast, we add zero, except for floating point
5636 where we really need an noop (otherwise -0.0 will be transformed
5638 if (!is_float(vtop
->type
.t
)) {
5651 expr_type(&type
, unary
);
5652 if (t
== TOK_SIZEOF
) {
5653 vpush_type_size(&type
, &align
);
5654 gen_cast_s(VT_SIZE_T
);
5656 type_size(&type
, &align
);
5658 if (vtop
[1].r
& VT_SYM
)
5659 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5660 if (s
&& s
->a
.aligned
)
5661 align
= 1 << (s
->a
.aligned
- 1);
5666 case TOK_builtin_expect
:
5667 /* __builtin_expect is a no-op for now */
5668 parse_builtin_params(0, "ee");
5671 case TOK_builtin_types_compatible_p
:
5672 parse_builtin_params(0, "tt");
5673 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5674 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5675 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5679 case TOK_builtin_choose_expr
:
5706 case TOK_builtin_constant_p
:
5707 parse_builtin_params(1, "e");
5709 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5710 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
)
5716 case TOK_builtin_frame_address
:
5717 case TOK_builtin_return_address
:
5723 level
= expr_const();
5725 tcc_error("%s only takes positive integers", get_tok_str(tok1
, 0));
5729 vset(&type
, VT_LOCAL
, 0); /* local frame */
5731 #ifdef TCC_TARGET_RISCV64
5735 mk_pointer(&vtop
->type
);
5736 indir(); /* -> parent frame */
5738 if (tok1
== TOK_builtin_return_address
) {
5739 // assume return address is just above frame pointer on stack
5740 #ifdef TCC_TARGET_ARM
5743 #elif defined TCC_TARGET_RISCV64
5750 mk_pointer(&vtop
->type
);
5755 #ifdef TCC_TARGET_RISCV64
5756 case TOK_builtin_va_start
:
5757 parse_builtin_params(0, "ee");
5758 r
= vtop
->r
& VT_VALMASK
;
5762 tcc_error("__builtin_va_start expects a local variable");
5767 #ifdef TCC_TARGET_X86_64
5768 #ifdef TCC_TARGET_PE
5769 case TOK_builtin_va_start
:
5770 parse_builtin_params(0, "ee");
5771 r
= vtop
->r
& VT_VALMASK
;
5775 tcc_error("__builtin_va_start expects a local variable");
5777 vtop
->type
= char_pointer_type
;
5782 case TOK_builtin_va_arg_types
:
5783 parse_builtin_params(0, "t");
5784 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5791 #ifdef TCC_TARGET_ARM64
5792 case TOK_builtin_va_start
: {
5793 parse_builtin_params(0, "ee");
5797 vtop
->type
.t
= VT_VOID
;
5800 case TOK_builtin_va_arg
: {
5801 parse_builtin_params(0, "et");
5809 case TOK___arm64_clear_cache
: {
5810 parse_builtin_params(0, "ee");
5813 vtop
->type
.t
= VT_VOID
;
5818 /* atomic operations */
5819 case TOK___atomic_store
:
5820 case TOK___atomic_load
:
5821 case TOK___atomic_exchange
:
5822 case TOK___atomic_compare_exchange
:
5823 case TOK___atomic_fetch_add
:
5824 case TOK___atomic_fetch_sub
:
5825 case TOK___atomic_fetch_or
:
5826 case TOK___atomic_fetch_xor
:
5827 case TOK___atomic_fetch_and
:
5828 case TOK___atomic_fetch_nand
:
5829 case TOK___atomic_add_fetch
:
5830 case TOK___atomic_sub_fetch
:
5831 case TOK___atomic_or_fetch
:
5832 case TOK___atomic_xor_fetch
:
5833 case TOK___atomic_and_fetch
:
5834 case TOK___atomic_nand_fetch
:
5838 /* pre operations */
5849 if (is_float(vtop
->type
.t
)) {
5859 goto tok_identifier
;
5861 /* allow to take the address of a label */
5862 if (tok
< TOK_UIDENT
)
5863 expect("label identifier");
5864 s
= label_find(tok
);
5866 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5868 if (s
->r
== LABEL_DECLARED
)
5869 s
->r
= LABEL_FORWARD
;
5871 if ((s
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5872 s
->type
.t
= VT_VOID
;
5873 mk_pointer(&s
->type
);
5874 s
->type
.t
|= VT_STATIC
;
5876 vpushsym(&s
->type
, s
);
5882 CType controlling_type
;
5883 int has_default
= 0;
5886 TokenString
*str
= NULL
;
5887 int saved_nocode_wanted
= nocode_wanted
;
5888 nocode_wanted
&= ~CONST_WANTED_MASK
;
5892 expr_type(&controlling_type
, expr_eq
);
5893 convert_parameter_type (&controlling_type
);
5895 nocode_wanted
= saved_nocode_wanted
;
5900 if (tok
== TOK_DEFAULT
) {
5902 tcc_error("too many 'default'");
5908 AttributeDef ad_tmp
;
5912 parse_btype(&cur_type
, &ad_tmp
, 0);
5913 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5914 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5916 tcc_error("type match twice");
5926 skip_or_save_block(&str
);
5928 skip_or_save_block(NULL
);
5935 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5936 tcc_error("type '%s' does not match any association", buf
);
5938 begin_macro(str
, 1);
5947 // special qnan , snan and infinity values
5952 vtop
->type
.t
= VT_FLOAT
;
5957 goto special_math_val
;
5960 goto special_math_val
;
5964 if (tok
< TOK_UIDENT
)
5965 tcc_error("expression expected before '%s'", get_tok_str(tok
, &tokc
));
5969 if (!s
|| IS_ASM_SYM(s
)) {
5970 const char *name
= get_tok_str(t
, NULL
);
5972 tcc_error("'%s' undeclared", name
);
5973 /* for simple function calls, we tolerate undeclared
5974 external reference to int() function */
5975 tcc_warning_c(warn_implicit_function_declaration
)(
5976 "implicit declaration of function '%s'", name
);
5977 s
= external_global_sym(t
, &func_old_type
);
5981 /* A symbol that has a register is a local register variable,
5982 which starts out as VT_LOCAL value. */
5983 if ((r
& VT_VALMASK
) < VT_CONST
)
5984 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5986 vset(&s
->type
, r
, s
->c
);
5987 /* Point to s as backpointer (even without r&VT_SYM).
5988 Will be used by at least the x86 inline asm parser for
5994 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5995 vtop
->c
.i
= s
->enum_val
;
6000 /* post operations */
6002 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6005 } else if (tok
== '.' || tok
== TOK_ARROW
) {
6006 int qualifiers
, cumofs
;
6008 if (tok
== TOK_ARROW
)
6010 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6012 /* expect pointer on structure */
6014 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6015 /* add field offset to pointer */
6017 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6020 /* change type to field type, and set to lvalue */
6021 vtop
->type
= s
->type
;
6022 vtop
->type
.t
|= qualifiers
;
6023 /* an array is never an lvalue */
6024 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6026 #ifdef CONFIG_TCC_BCHECK
6027 /* if bound checking, the referenced pointer must be checked */
6028 if (tcc_state
->do_bounds_check
)
6029 vtop
->r
|= VT_MUSTBOUND
;
6033 } else if (tok
== '[') {
6039 } else if (tok
== '(') {
6042 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6045 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6046 /* pointer test (no array accepted) */
6047 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6048 vtop
->type
= *pointed_type(&vtop
->type
);
6049 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6053 expect("function pointer");
6056 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6058 /* get return type */
6061 sa
= s
->next
; /* first parameter */
6062 nb_args
= regsize
= 0;
6064 /* compute first implicit argument if a structure is returned */
6065 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6066 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6067 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6068 &ret_align
, ®size
);
6069 if (ret_nregs
<= 0) {
6070 /* get some space for the returned structure */
6071 size
= type_size(&s
->type
, &align
);
6072 #ifdef TCC_TARGET_ARM64
6073 /* On arm64, a small struct is return in registers.
6074 It is much easier to write it to memory if we know
6075 that we are allowed to write some extra bytes, so
6076 round the allocated space up to a power of 2: */
6078 while (size
& (size
- 1))
6079 size
= (size
| (size
- 1)) + 1;
6081 loc
= (loc
- size
) & -align
;
6083 ret
.r
= VT_LOCAL
| VT_LVAL
;
6084 /* pass it as 'int' to avoid structure arg passing
6086 vseti(VT_LOCAL
, loc
);
6087 #ifdef CONFIG_TCC_BCHECK
6088 if (tcc_state
->do_bounds_check
)
6102 if (ret_nregs
> 0) {
6103 /* return in register */
6105 PUT_R_RET(&ret
, ret
.type
.t
);
6110 gfunc_param_typed(s
, sa
);
6120 tcc_error("too few arguments to function");
6122 gfunc_call(nb_args
);
6124 if (ret_nregs
< 0) {
6125 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6126 #ifdef TCC_TARGET_RISCV64
6127 arch_transfer_ret_regs(1);
6133 int rc
= reg_classes
[ret
.r
] & ~(RC_INT
| RC_FLOAT
);
6134 /* We assume that when a structure is returned in multiple
6135 registers, their classes are consecutive values of the
6138 for (r
= 0; r
< NB_REGS
; ++r
)
6139 if (reg_classes
[r
] & rc
)
6141 vsetc(&ret
.type
, r
, &ret
.c
);
6143 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6146 /* handle packed struct return */
6147 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6150 size
= type_size(&s
->type
, &align
);
6151 /* We're writing whole regs often, make sure there's enough
6152 space. Assume register size is power of 2. */
6153 size
= (size
+ regsize
- 1) & -regsize
;
6154 if (ret_align
> align
)
6156 loc
= (loc
- size
) & -align
;
6160 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6164 if (--ret_nregs
== 0)
6168 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6171 /* Promote char/short return values. This is matters only
6172 for calling function that were not compiled by TCC and
6173 only on some architectures. For those where it doesn't
6174 matter we expect things to be already promoted to int,
6176 t
= s
->type
.t
& VT_BTYPE
;
6177 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6179 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6181 vtop
->type
.t
= VT_INT
;
6185 if (s
->f
.func_noreturn
) {
6187 tcc_tcov_block_end(tcc_state
, -1);
6196 #ifndef precedence_parser /* original top-down parser */
6198 static void expr_prod(void)
6203 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6210 static void expr_sum(void)
6215 while ((t
= tok
) == '+' || t
== '-') {
6222 static void expr_shift(void)
6227 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6234 static void expr_cmp(void)
6239 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6240 t
== TOK_ULT
|| t
== TOK_UGE
) {
6247 static void expr_cmpeq(void)
6252 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6259 static void expr_and(void)
6262 while (tok
== '&') {
6269 static void expr_xor(void)
6272 while (tok
== '^') {
6279 static void expr_or(void)
6282 while (tok
== '|') {
6289 static void expr_landor(int op
);
6291 static void expr_land(void)
6294 if (tok
== TOK_LAND
)
6298 static void expr_lor(void)
6305 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6306 #else /* defined precedence_parser */
6307 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6308 # define expr_lor() unary(), expr_infix(1)
6310 static int precedence(int tok
)
6313 case TOK_LOR
: return 1;
6314 case TOK_LAND
: return 2;
6318 case TOK_EQ
: case TOK_NE
: return 6;
6319 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6320 case TOK_SHL
: case TOK_SAR
: return 8;
6321 case '+': case '-': return 9;
6322 case '*': case '/': case '%': return 10;
6324 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6329 static unsigned char prec
[256];
6330 static void init_prec(void)
6333 for (i
= 0; i
< 256; i
++)
6334 prec
[i
] = precedence(i
);
6336 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6338 static void expr_landor(int op
);
6340 static void expr_infix(int p
)
6343 while ((p2
= precedence(t
)) >= p
) {
6344 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6349 if (precedence(tok
) > p2
)
6358 /* Assuming vtop is a value used in a conditional context
6359 (i.e. compared with zero) return 0 if it's false, 1 if
6360 true and -1 if it can't be statically determined. */
6361 static int condition_3way(void)
6364 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6365 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6367 gen_cast_s(VT_BOOL
);
6374 static void expr_landor(int op
)
6376 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6378 c
= f
? i
: condition_3way();
6380 save_regs(1), cc
= 0;
6382 nocode_wanted
++, f
= 1;
6390 expr_landor_next(op
);
6402 static int is_cond_bool(SValue
*sv
)
6404 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6405 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6406 return (unsigned)sv
->c
.i
< 2;
6407 if (sv
->r
== VT_CMP
)
6412 static void expr_cond(void)
6414 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6421 c
= condition_3way();
6422 g
= (tok
== ':' && gnu_ext
);
6432 /* needed to avoid having different registers saved in
6444 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6445 mk_pointer(&vtop
->type
);
6446 sv
= *vtop
; /* save value to handle it later */
6447 vtop
--; /* no vpop so that FP stack is not flushed */
6464 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6465 mk_pointer(&vtop
->type
);
6467 /* cast operands to correct type according to ISOC rules */
6468 if (!combine_types(&type
, &sv
, vtop
, '?'))
6469 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6470 "type mismatch in conditional expression (have '%s' and '%s')");
6472 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6473 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6474 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6475 this code jumps directly to the if's then/else branches. */
6480 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6484 // tcc_warning("two conditions expr_cond");
6488 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6489 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6490 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6492 /* now we convert second operand */
6496 mk_pointer(&vtop
->type
);
6498 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6502 rc
= RC_TYPE(type
.t
);
6503 /* for long longs, we use fixed registers to avoid having
6504 to handle a complicated move */
6505 if (USING_TWO_WORDS(type
.t
))
6506 rc
= RC_RET(type
.t
);
6517 /* this is horrible, but we must also convert first
6523 mk_pointer(&vtop
->type
);
6525 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6531 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6541 static void expr_eq(void)
6546 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6554 gen_op(TOK_ASSIGN_OP(t
));
6560 ST_FUNC
void gexpr(void)
6568 } while (tok
== ',');
6570 /* convert array & function to pointer */
6571 convert_parameter_type(&vtop
->type
);
6573 /* make builtin_constant_p((1,2)) return 0 (like on gcc) */
6574 if ((vtop
->r
& VT_VALMASK
) == VT_CONST
&& nocode_wanted
&& !CONST_WANTED
)
6575 gv(RC_TYPE(vtop
->type
.t
));
6579 /* parse a constant expression and return value in vtop. */
6580 static void expr_const1(void)
6582 nocode_wanted
+= CONST_WANTED_BIT
;
6584 nocode_wanted
-= CONST_WANTED_BIT
;
6587 /* parse an integer constant and return its value. */
6588 static inline int64_t expr_const64(void)
6592 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
6593 expect("constant expression");
6599 /* parse an integer constant and return its value.
6600 Complain if it doesn't fit 32bit (signed or unsigned). */
6601 ST_FUNC
int expr_const(void)
6604 int64_t wc
= expr_const64();
6606 if (c
!= wc
&& (unsigned)c
!= wc
)
6607 tcc_error("constant exceeds 32 bit");
6611 /* ------------------------------------------------------------------------- */
6612 /* return from function */
6614 #ifndef TCC_TARGET_ARM64
6615 static void gfunc_return(CType
*func_type
)
6617 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6618 CType type
, ret_type
;
6619 int ret_align
, ret_nregs
, regsize
;
6620 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6621 &ret_align
, ®size
);
6622 if (ret_nregs
< 0) {
6623 #ifdef TCC_TARGET_RISCV64
6624 arch_transfer_ret_regs(0);
6626 } else if (0 == ret_nregs
) {
6627 /* if returning structure, must copy it to implicit
6628 first pointer arg location */
6631 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6634 /* copy structure value to pointer */
6637 /* returning structure packed into registers */
6638 int size
, addr
, align
, rc
, n
;
6639 size
= type_size(func_type
,&align
);
6640 if ((align
& (ret_align
- 1))
6641 && ((vtop
->r
& VT_VALMASK
) < VT_CONST
/* pointer to struct */
6642 || (vtop
->c
.i
& (ret_align
- 1))
6644 loc
= (loc
- size
) & -ret_align
;
6647 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6651 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6653 vtop
->type
= ret_type
;
6654 rc
= RC_RET(ret_type
.t
);
6655 //printf("struct return: n:%d t:%02x rc:%02x\n", ret_nregs, ret_type.t, rc);
6656 for (n
= ret_nregs
; --n
> 0;) {
6660 incr_offset(regsize
);
6661 /* We assume that when a structure is returned in multiple
6662 registers, their classes are consecutive values of the
6667 vtop
-= ret_nregs
- 1;
6670 gv(RC_RET(func_type
->t
));
6672 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6676 static void check_func_return(void)
6678 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6680 if (!strcmp (funcname
, "main")
6681 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6682 /* main returns 0 by default */
6684 gen_assign_cast(&func_vt
);
6685 gfunc_return(&func_vt
);
6687 tcc_warning("function might return no value: '%s'", funcname
);
6691 /* ------------------------------------------------------------------------- */
6694 static int case_cmpi(const void *pa
, const void *pb
)
6696 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6697 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6698 return a
< b
? -1 : a
> b
;
6701 static int case_cmpu(const void *pa
, const void *pb
)
6703 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6704 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6705 return a
< b
? -1 : a
> b
;
6708 static void gtst_addr(int t
, int a
)
6710 gsym_addr(gvtst(0, t
), a
);
6713 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6717 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6734 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6736 gcase(base
, len
/2, bsym
);
6740 base
+= e
; len
-= e
;
6750 if (p
->v1
== p
->v2
) {
6752 gtst_addr(0, p
->sym
);
6762 gtst_addr(0, p
->sym
);
6766 *bsym
= gjmp(*bsym
);
6769 static void end_switch(void)
6771 struct switch_t
*sw
= cur_switch
;
6772 dynarray_reset(&sw
->p
, &sw
->n
);
6773 cur_switch
= sw
->prev
;
6777 /* ------------------------------------------------------------------------- */
6778 /* __attribute__((cleanup(fn))) */
6780 static void try_call_scope_cleanup(Sym
*stop
)
6782 Sym
*cls
= cur_scope
->cl
.s
;
6784 for (; cls
!= stop
; cls
= cls
->ncl
) {
6785 Sym
*fs
= cls
->next
;
6786 Sym
*vs
= cls
->prev_tok
;
6788 vpushsym(&fs
->type
, fs
);
6789 vset(&vs
->type
, vs
->r
, vs
->c
);
6791 mk_pointer(&vtop
->type
);
6797 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6802 if (!cur_scope
->cl
.s
)
6805 /* search NCA of both cleanup chains given parents and initial depth */
6806 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6807 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6809 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6811 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6814 try_call_scope_cleanup(cc
);
6817 /* call 'func' for each __attribute__((cleanup(func))) */
6818 static void block_cleanup(struct scope
*o
)
6822 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6823 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6828 try_call_scope_cleanup(o
->cl
.s
);
6829 pcl
->jnext
= gjmp(0);
6831 goto remove_pending
;
6841 try_call_scope_cleanup(o
->cl
.s
);
6844 /* ------------------------------------------------------------------------- */
6847 static void vla_restore(int loc
)
6850 gen_vla_sp_restore(loc
);
6853 static void vla_leave(struct scope
*o
)
6855 struct scope
*c
= cur_scope
, *v
= NULL
;
6856 for (; c
!= o
&& c
; c
= c
->prev
)
6860 vla_restore(v
->vla
.locorig
);
6863 /* ------------------------------------------------------------------------- */
6866 static void new_scope(struct scope
*o
)
6868 /* copy and link previous scope */
6870 o
->prev
= cur_scope
;
6872 cur_scope
->vla
.num
= 0;
6874 /* record local declaration stack position */
6875 o
->lstk
= local_stack
;
6876 o
->llstk
= local_label_stack
;
6880 static void prev_scope(struct scope
*o
, int is_expr
)
6884 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6885 block_cleanup(o
->prev
);
6887 /* pop locally defined labels */
6888 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6890 /* In the is_expr case (a statement expression is finished here),
6891 vtop might refer to symbols on the local_stack. Either via the
6892 type or via vtop->sym. We can't pop those nor any that in turn
6893 might be referred to. To make it easier we don't roll back
6894 any symbols in that case; some upper level call to block() will
6895 do that. We do have to remove such symbols from the lookup
6896 tables, though. sym_pop will do that. */
6898 /* pop locally defined symbols */
6899 pop_local_syms(o
->lstk
, is_expr
);
6900 cur_scope
= o
->prev
;
6904 /* leave a scope via break/continue(/goto) */
6905 static void leave_scope(struct scope
*o
)
6909 try_call_scope_cleanup(o
->cl
.s
);
6913 /* short versiona for scopes with 'if/do/while/switch' which can
6914 declare only types (of struct/union/enum) */
6915 static void new_scope_s(struct scope
*o
)
6917 o
->lstk
= local_stack
;
6921 static void prev_scope_s(struct scope
*o
)
6923 sym_pop(&local_stack
, o
->lstk
, 0);
6927 /* ------------------------------------------------------------------------- */
6928 /* call block from 'for do while' loops */
6930 static void lblock(int *bsym
, int *csym
)
6932 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6933 int *b
= co
->bsym
, *c
= co
->csym
;
6947 static void block(int flags
)
6949 int a
, b
, c
, d
, e
, t
;
6953 if (flags
& STMT_EXPR
) {
6954 /* default return value is (void) */
6956 vtop
->type
.t
= VT_VOID
;
6961 /* If the token carries a value, next() might destroy it. Only with
6962 invalid code such as f(){"123"4;} */
6963 if (TOK_HAS_VALUE(t
))
6968 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6977 if (tok
== TOK_ELSE
) {
6982 gsym(d
); /* patch else jmp */
6988 } else if (t
== TOK_WHILE
) {
7002 } else if (t
== '{') {
7004 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
7007 /* handle local labels declarations */
7008 while (tok
== TOK_LABEL
) {
7011 if (tok
< TOK_UIDENT
)
7012 expect("label identifier");
7013 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7015 } while (tok
== ',');
7019 while (tok
!= '}') {
7022 if (flags
& STMT_EXPR
)
7024 block(flags
| STMT_COMPOUND
);
7028 prev_scope(&o
, flags
& STMT_EXPR
);
7030 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
7033 else if (!nocode_wanted
)
7034 check_func_return();
7036 } else if (t
== TOK_RETURN
) {
7037 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7041 gen_assign_cast(&func_vt
);
7043 if (vtop
->type
.t
!= VT_VOID
)
7044 tcc_warning("void function returns a value");
7048 tcc_warning("'return' with no value");
7051 leave_scope(root_scope
);
7053 gfunc_return(&func_vt
);
7055 /* jump unless last stmt in top-level block */
7056 if (tok
!= '}' || local_scope
!= 1)
7059 tcc_tcov_block_end (tcc_state
, -1);
7062 } else if (t
== TOK_BREAK
) {
7064 if (!cur_scope
->bsym
)
7065 tcc_error("cannot break");
7066 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7067 leave_scope(cur_switch
->scope
);
7069 leave_scope(loop_scope
);
7070 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7073 } else if (t
== TOK_CONTINUE
) {
7075 if (!cur_scope
->csym
)
7076 tcc_error("cannot continue");
7077 leave_scope(loop_scope
);
7078 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7081 } else if (t
== TOK_FOR
) {
7086 /* c99 for-loop init decl? */
7087 if (!decl(VT_JMP
)) {
7088 /* no, regular for-loop init expr */
7116 } else if (t
== TOK_DO
) {
7132 } else if (t
== TOK_SWITCH
) {
7133 struct switch_t
*sw
;
7135 sw
= tcc_mallocz(sizeof *sw
);
7137 sw
->scope
= cur_scope
;
7138 sw
->prev
= cur_switch
;
7139 sw
->nocode_wanted
= nocode_wanted
;
7146 sw
->sv
= *vtop
--; /* save switch value */
7148 b
= gjmp(0); /* jump to first case */
7150 a
= gjmp(a
); /* add implicit break */
7155 if (sw
->nocode_wanted
)
7157 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7158 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7160 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7161 for (b
= 1; b
< sw
->n
; b
++)
7162 if (sw
->sv
.type
.t
& VT_UNSIGNED
7163 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7164 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7165 tcc_error("duplicate case value");
7168 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7171 gsym_addr(d
, sw
->def_sym
);
7179 } else if (t
== TOK_CASE
) {
7183 cr
= tcc_malloc(sizeof(struct case_t
));
7184 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7185 cr
->v1
= cr
->v2
= expr_const64();
7186 if (gnu_ext
&& tok
== TOK_DOTS
) {
7188 cr
->v2
= expr_const64();
7189 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7190 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7191 tcc_warning("empty case range");
7193 /* case and default are unreachable from a switch under nocode_wanted */
7194 if (!cur_switch
->nocode_wanted
)
7197 goto block_after_label
;
7199 } else if (t
== TOK_DEFAULT
) {
7202 if (cur_switch
->def_sym
)
7203 tcc_error("too many 'default'");
7204 cur_switch
->def_sym
= cur_switch
->nocode_wanted
? 1 : gind();
7206 goto block_after_label
;
7208 } else if (t
== TOK_GOTO
) {
7209 vla_restore(cur_scope
->vla
.locorig
);
7210 if (tok
== '*' && gnu_ext
) {
7214 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7218 } else if (tok
>= TOK_UIDENT
) {
7219 s
= label_find(tok
);
7220 /* put forward definition if needed */
7222 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7223 else if (s
->r
== LABEL_DECLARED
)
7224 s
->r
= LABEL_FORWARD
;
7226 if (s
->r
& LABEL_FORWARD
) {
7227 /* start new goto chain for cleanups, linked via label->next */
7228 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7229 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7230 pending_gotos
->prev_tok
= s
;
7231 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7232 pending_gotos
->next
= s
;
7234 s
->jnext
= gjmp(s
->jnext
);
7236 try_call_cleanup_goto(s
->cleanupstate
);
7237 gjmp_addr(s
->jnext
);
7242 expect("label identifier");
7246 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7250 if (tok
== ':' && t
>= TOK_UIDENT
) {
7255 if (s
->r
== LABEL_DEFINED
)
7256 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7257 s
->r
= LABEL_DEFINED
;
7259 Sym
*pcl
; /* pending cleanup goto */
7260 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7262 sym_pop(&s
->next
, NULL
, 0);
7266 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7269 s
->cleanupstate
= cur_scope
->cl
.s
;
7273 /* Accept attributes after labels (e.g. 'unused') */
7274 AttributeDef ad_tmp
;
7275 parse_attribute(&ad_tmp
);
7278 tcc_tcov_reset_ind(tcc_state
);
7279 vla_restore(cur_scope
->vla
.loc
);
7282 if (0 == (flags
& STMT_COMPOUND
))
7284 /* C23: insert implicit null-statement whithin compound statement */
7286 /* we accept this, but it is a mistake */
7287 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7290 /* expression case */
7294 if (flags
& STMT_EXPR
) {
7307 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7310 /* This skips over a stream of tokens containing balanced {} and ()
7311 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7312 with a '{'). If STR then allocates and stores the skipped tokens
7313 in *STR. This doesn't check if () and {} are nested correctly,
7314 i.e. "({)}" is accepted. */
7315 static void skip_or_save_block(TokenString
**str
)
7317 int braces
= tok
== '{';
7320 *str
= tok_str_alloc();
7332 if (str
|| level
> 0)
7333 tcc_error("unexpected end of file");
7338 tok_str_add_tok(*str
);
7340 if (t
== '{' || t
== '(' || t
== '[') {
7342 } else if (t
== '}' || t
== ')' || t
== ']') {
7344 if (level
== 0 && braces
&& t
== '}')
7349 tok_str_add(*str
, TOK_EOF
);
7352 #define EXPR_CONST 1
7355 static void parse_init_elem(int expr_type
)
7357 int saved_global_expr
;
7360 /* compound literals must be allocated globally in this case */
7361 saved_global_expr
= global_expr
;
7364 global_expr
= saved_global_expr
;
7365 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7366 (compound literals). */
7367 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7368 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7369 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7370 #ifdef TCC_TARGET_PE
7371 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7374 tcc_error("initializer element is not constant");
7383 static void init_assert(init_params
*p
, int offset
)
7385 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7386 : !nocode_wanted
&& offset
> p
->local_offset
)
7387 tcc_internal_error("initializer overflow");
7390 #define init_assert(sec, offset)
7393 /* put zeros for variable based init */
7394 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7396 init_assert(p
, c
+ size
);
7398 /* nothing to do because globals are already set to zero */
7400 vpush_helper_func(TOK_memset
);
7404 #if defined TCC_TARGET_ARM && defined TCC_ARM_EABI
7405 vswap(); /* using __aeabi_memset(void*, size_t, int) */
7412 #define DIF_SIZE_ONLY 2
7413 #define DIF_HAVE_ELEM 4
7416 /* delete relocations for specified range c ... c + size. Unfortunatly
7417 in very special cases, relocations may occur unordered */
7418 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7420 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7421 if (!sec
|| !sec
->reloc
)
7423 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7424 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7425 while (rel
< rel_end
) {
7426 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7427 sec
->reloc
->data_offset
-= sizeof *rel
;
7430 memcpy(rel2
, rel
, sizeof *rel
);
7437 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7439 if (ref
== p
->flex_array_ref
) {
7440 if (index
>= ref
->c
)
7442 } else if (ref
->c
< 0)
7443 tcc_error("flexible array has zero size in this context");
7446 /* t is the array or struct type. c is the array or struct
7447 address. cur_field is the pointer to the current
7448 field, for arrays the 'c' member contains the current start
7449 index. 'flags' is as in decl_initializer.
7450 'al' contains the already initialized length of the
7451 current container (starting at c). This returns the new length of that. */
7452 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7453 Sym
**cur_field
, int flags
, int al
)
7456 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7457 unsigned long corig
= c
;
7462 if (flags
& DIF_HAVE_ELEM
)
7465 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7472 /* NOTE: we only support ranges for last designator */
7473 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7475 if (!(type
->t
& VT_ARRAY
))
7476 expect("array type");
7478 index
= index_last
= expr_const();
7479 if (tok
== TOK_DOTS
&& gnu_ext
) {
7481 index_last
= expr_const();
7485 decl_design_flex(p
, s
, index_last
);
7486 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7487 tcc_error("index exceeds array bounds or range is empty");
7489 (*cur_field
)->c
= index_last
;
7490 type
= pointed_type(type
);
7491 elem_size
= type_size(type
, &align
);
7492 c
+= index
* elem_size
;
7493 nb_elems
= index_last
- index
+ 1;
7500 f
= find_field(type
, l
, &cumofs
);
7511 } else if (!gnu_ext
) {
7516 if (type
->t
& VT_ARRAY
) {
7517 index
= (*cur_field
)->c
;
7519 decl_design_flex(p
, s
, index
);
7521 tcc_error("too many initializers");
7522 type
= pointed_type(type
);
7523 elem_size
= type_size(type
, &align
);
7524 c
+= index
* elem_size
;
7527 /* Skip bitfield padding. Also with size 32 and 64. */
7528 while (f
&& (f
->v
& SYM_FIRST_ANOM
) &&
7529 is_integer_btype(f
->type
.t
& VT_BTYPE
))
7530 *cur_field
= f
= f
->next
;
7532 tcc_error("too many initializers");
7538 if (!elem_size
) /* for structs */
7539 elem_size
= type_size(type
, &align
);
7541 /* Using designators the same element can be initialized more
7542 than once. In that case we need to delete possibly already
7543 existing relocations. */
7544 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7545 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7546 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7549 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7551 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7555 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7556 /* make init_putv/vstore believe it were a struct */
7558 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7562 vpush_ref(type
, p
->sec
, c
, elem_size
);
7564 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7565 for (i
= 1; i
< nb_elems
; i
++) {
7567 init_putv(p
, type
, c
+ elem_size
* i
);
7572 c
+= nb_elems
* elem_size
;
7578 /* store a value or an expression directly in global data or in local array */
7579 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7585 Section
*sec
= p
->sec
;
7589 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7591 size
= type_size(type
, &align
);
7592 if (type
->t
& VT_BITFIELD
)
7593 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7594 init_assert(p
, c
+ size
);
7597 /* XXX: not portable */
7598 /* XXX: generate error if incorrect relocation */
7599 gen_assign_cast(&dtype
);
7600 bt
= type
->t
& VT_BTYPE
;
7602 if ((vtop
->r
& VT_SYM
)
7604 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7605 || (type
->t
& VT_BITFIELD
))
7606 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7608 tcc_error("initializer element is not computable at load time");
7610 if (NODATA_WANTED
) {
7615 ptr
= sec
->data
+ c
;
7618 /* XXX: make code faster ? */
7619 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7620 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7621 /* XXX This rejects compound literals like
7622 '(void *){ptr}'. The problem is that '&sym' is
7623 represented the same way, which would be ruled out
7624 by the SYM_FIRST_ANOM check above, but also '"string"'
7625 in 'char *p = "string"' is represented the same
7626 with the type being VT_PTR and the symbol being an
7627 anonymous one. That is, there's no difference in vtop
7628 between '(void *){x}' and '&(void *){x}'. Ignore
7629 pointer typed entities here. Hopefully no real code
7630 will ever use compound literals with scalar type. */
7631 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7632 /* These come from compound literals, memcpy stuff over. */
7636 esym
= elfsym(vtop
->sym
);
7637 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7638 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7640 /* We need to copy over all memory contents, and that
7641 includes relocations. Use the fact that relocs are
7642 created it order, so look from the end of relocs
7643 until we hit one before the copied region. */
7644 unsigned long relofs
= ssec
->reloc
->data_offset
;
7645 while (relofs
>= sizeof(*rel
)) {
7646 relofs
-= sizeof(*rel
);
7647 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7648 if (rel
->r_offset
>= esym
->st_value
+ size
)
7650 if (rel
->r_offset
< esym
->st_value
)
7652 put_elf_reloca(symtab_section
, sec
,
7653 c
+ rel
->r_offset
- esym
->st_value
,
7654 ELFW(R_TYPE
)(rel
->r_info
),
7655 ELFW(R_SYM
)(rel
->r_info
),
7665 if (type
->t
& VT_BITFIELD
) {
7666 int bit_pos
, bit_size
, bits
, n
;
7667 unsigned char *p
, v
, m
;
7668 bit_pos
= BIT_POS(vtop
->type
.t
);
7669 bit_size
= BIT_SIZE(vtop
->type
.t
);
7670 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7671 bit_pos
&= 7, bits
= 0;
7676 v
= val
>> bits
<< bit_pos
;
7677 m
= ((1 << n
) - 1) << bit_pos
;
7678 *p
= (*p
& ~m
) | (v
& m
);
7679 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7684 *(char *)ptr
= val
!= 0;
7690 write16le(ptr
, val
);
7693 write32le(ptr
, val
);
7696 write64le(ptr
, val
);
7699 #if defined TCC_IS_NATIVE_387
7700 /* Host and target platform may be different but both have x87.
7701 On windows, tcc does not use VT_LDOUBLE, except when it is a
7702 cross compiler. In this case a mingw gcc as host compiler
7703 comes here with 10-byte long doubles, while msvc or tcc won't.
7704 tcc itself can still translate by asm.
7705 In any case we avoid possibly random bytes 11 and 12.
7707 if (sizeof (long double) >= 10)
7708 memcpy(ptr
, &vtop
->c
.ld
, 10);
7710 else if (sizeof (long double) == sizeof (double))
7711 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7715 /* For other platforms it should work natively, but may not work
7716 for cross compilers */
7717 if (sizeof(long double) == LDOUBLE_SIZE
)
7718 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7719 else if (sizeof(double) == LDOUBLE_SIZE
)
7720 *(double*)ptr
= (double)vtop
->c
.ld
;
7721 else if (0 == memcmp(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
))
7722 ; /* nothing to do for 0.0 */
7723 #ifndef TCC_CROSS_TEST
7725 tcc_error("can't cross compile long double constants");
7730 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7733 if (vtop
->r
& VT_SYM
)
7734 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7736 write64le(ptr
, val
);
7739 write32le(ptr
, val
);
7743 write64le(ptr
, val
);
7747 if (vtop
->r
& VT_SYM
)
7748 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7749 write32le(ptr
, val
);
7753 //tcc_internal_error("unexpected type");
7759 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7766 /* 't' contains the type and storage info. 'c' is the offset of the
7767 object in section 'sec'. If 'sec' is NULL, it means stack based
7768 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7769 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7770 size only evaluation is wanted (only for arrays). */
7771 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7773 int len
, n
, no_oblock
, i
;
7779 /* generate line number info */
7780 if (debug_modes
&& !(flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7781 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7783 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7784 /* In case of strings we have special handling for arrays, so
7785 don't consume them as initializer value (which would commit them
7786 to some anonymous symbol). */
7787 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7788 (!(flags
& DIF_SIZE_ONLY
)
7789 /* a struct may be initialized from a struct of same type, as in
7790 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7791 In that case we need to parse the element in order to check
7792 it for compatibility below */
7793 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7795 int ncw_prev
= nocode_wanted
;
7796 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7798 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7799 nocode_wanted
= ncw_prev
;
7800 flags
|= DIF_HAVE_ELEM
;
7803 if (type
->t
& VT_ARRAY
) {
7805 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7813 t1
= pointed_type(type
);
7814 size1
= type_size(t1
, &align1
);
7816 /* only parse strings here if correct type (otherwise: handle
7817 them as ((w)char *) expressions */
7818 if ((tok
== TOK_LSTR
&&
7819 #ifdef TCC_TARGET_PE
7820 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7822 (t1
->t
& VT_BTYPE
) == VT_INT
7824 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7826 cstr_reset(&initstr
);
7827 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7828 tcc_error("unhandled string literal merging");
7829 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7831 initstr
.size
-= size1
;
7833 len
+= tokc
.str
.size
;
7835 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7837 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7840 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7841 && tok
!= TOK_EOF
) {
7842 /* Not a lone literal but part of a bigger expression. */
7843 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7844 tokc
.str
.size
= initstr
.size
;
7845 tokc
.str
.data
= initstr
.data
;
7849 decl_design_flex(p
, s
, len
);
7850 if (!(flags
& DIF_SIZE_ONLY
)) {
7855 tcc_warning("initializer-string for array is too long");
7856 /* in order to go faster for common case (char
7857 string in global variable, we handle it
7859 if (p
->sec
&& size1
== 1) {
7860 init_assert(p
, c
+ nb
);
7862 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7866 /* only add trailing zero if enough storage (no
7867 warning in this case since it is standard) */
7868 if (flags
& DIF_CLEAR
)
7871 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7875 } else if (size1
== 1)
7876 ch
= ((unsigned char *)initstr
.data
)[i
];
7878 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7880 init_putv(p
, t1
, c
+ i
* size1
);
7891 /* zero memory once in advance */
7892 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7893 init_putz(p
, c
, n
*size1
);
7898 /* GNU extension: if the initializer is empty for a flex array,
7899 it's size is zero. We won't enter the loop, so set the size
7901 decl_design_flex(p
, s
, len
);
7902 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7903 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7904 flags
&= ~DIF_HAVE_ELEM
;
7905 if (type
->t
& VT_ARRAY
) {
7907 /* special test for multi dimensional arrays (may not
7908 be strictly correct if designators are used at the
7910 if (no_oblock
&& len
>= n
*size1
)
7913 if (s
->type
.t
== VT_UNION
)
7917 if (no_oblock
&& f
== NULL
)
7929 } else if ((flags
& DIF_HAVE_ELEM
)
7930 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7931 The source type might have VT_CONSTANT set, which is
7932 of course assignable to non-const elements. */
7933 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7936 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7938 if ((flags
& DIF_FIRST
) || tok
== '{') {
7948 } else if (tok
== '{') {
7949 if (flags
& DIF_HAVE_ELEM
)
7952 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7955 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7956 /* If we supported only ISO C we wouldn't have to accept calling
7957 this on anything than an array if DIF_SIZE_ONLY (and even then
7958 only on the outermost level, so no recursion would be needed),
7959 because initializing a flex array member isn't supported.
7960 But GNU C supports it, so we need to recurse even into
7961 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7962 /* just skip expression */
7963 if (flags
& DIF_HAVE_ELEM
)
7966 skip_or_save_block(NULL
);
7969 if (!(flags
& DIF_HAVE_ELEM
)) {
7970 /* This should happen only when we haven't parsed
7971 the init element above for fear of committing a
7972 string constant to memory too early. */
7973 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7974 expect("string constant");
7975 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7977 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7978 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7980 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7984 init_putv(p
, type
, c
);
7988 /* parse an initializer for type 't' if 'has_init' is non zero, and
7989 allocate space in local or global data space ('r' is either
7990 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7991 variable 'v' of scope 'scope' is declared before initializers
7992 are parsed. If 'v' is zero, then a reference to the new object
7993 is put in the value stack. If 'has_init' is 2, a special parsing
7994 is done to handle string constants. */
7995 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7996 int has_init
, int v
, int global
)
7998 int size
, align
, addr
;
7999 TokenString
*init_str
= NULL
;
8002 Sym
*flexible_array
;
8004 int saved_nocode_wanted
= nocode_wanted
;
8005 #ifdef CONFIG_TCC_BCHECK
8006 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
8008 init_params p
= {0};
8010 /* Always allocate static or global variables */
8011 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
8012 nocode_wanted
|= DATA_ONLY_WANTED
;
8014 flexible_array
= NULL
;
8015 size
= type_size(type
, &align
);
8017 /* exactly one flexible array may be initialized, either the
8018 toplevel array or the last member of the toplevel struct */
8021 // error out except for top-level incomplete arrays
8022 // (arrays of incomplete types are handled in array parsing)
8023 if (!(type
->t
& VT_ARRAY
))
8024 tcc_error("initialization of incomplete type");
8026 /* If the base type itself was an array type of unspecified size
8027 (like in 'typedef int arr[]; arr x = {1};') then we will
8028 overwrite the unknown size by the real one for this decl.
8029 We need to unshare the ref symbol holding that size. */
8030 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
8031 p
.flex_array_ref
= type
->ref
;
8033 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8034 Sym
*field
= type
->ref
->next
;
8037 field
= field
->next
;
8038 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
8039 flexible_array
= field
;
8040 p
.flex_array_ref
= field
->type
.ref
;
8047 /* If unknown size, do a dry-run 1st pass */
8049 tcc_error("unknown type size");
8050 if (has_init
== 2) {
8051 /* only get strings */
8052 init_str
= tok_str_alloc();
8053 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8054 tok_str_add_tok(init_str
);
8057 tok_str_add(init_str
, TOK_EOF
);
8059 skip_or_save_block(&init_str
);
8063 begin_macro(init_str
, 1);
8065 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8066 /* prepare second initializer parsing */
8067 macro_ptr
= init_str
->str
;
8070 /* if still unknown size, error */
8071 size
= type_size(type
, &align
);
8073 tcc_error("unknown type size");
8075 /* If there's a flex member and it was used in the initializer
8077 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8078 size
+= flexible_array
->type
.ref
->c
8079 * pointed_size(&flexible_array
->type
);
8082 /* take into account specified alignment if bigger */
8083 if (ad
->a
.aligned
) {
8084 int speca
= 1 << (ad
->a
.aligned
- 1);
8087 } else if (ad
->a
.packed
) {
8091 if (!v
&& NODATA_WANTED
)
8092 size
= 0, align
= 1;
8094 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8096 #ifdef CONFIG_TCC_BCHECK
8098 /* add padding between stack variables for bound checking */
8102 loc
= (loc
- size
) & -align
;
8104 p
.local_offset
= addr
+ size
;
8105 #ifdef CONFIG_TCC_BCHECK
8107 /* add padding between stack variables for bound checking */
8112 /* local variable */
8113 #ifdef CONFIG_TCC_ASM
8114 if (ad
->asm_label
) {
8115 int reg
= asm_parse_regvar(ad
->asm_label
);
8117 r
= (r
& ~VT_VALMASK
) | reg
;
8120 sym
= sym_push(v
, type
, r
, addr
);
8121 if (ad
->cleanup_func
) {
8122 Sym
*cls
= sym_push2(&all_cleanups
,
8123 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8124 cls
->prev_tok
= sym
;
8125 cls
->next
= ad
->cleanup_func
;
8126 cls
->ncl
= cur_scope
->cl
.s
;
8127 cur_scope
->cl
.s
= cls
;
8132 /* push local reference */
8133 vset(type
, r
, addr
);
8138 /* see if the symbol was already defined */
8141 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
8142 && sym
->type
.ref
->c
> type
->ref
->c
) {
8143 /* flex array was already declared with explicit size
8145 int arr[] = { 1,2,3 }; */
8146 type
->ref
->c
= sym
->type
.ref
->c
;
8147 size
= type_size(type
, &align
);
8149 patch_storage(sym
, ad
, type
);
8150 /* we accept several definitions of the same global variable. */
8151 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8156 /* allocate symbol in corresponding section */
8160 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8161 tp
= &tp
->ref
->type
;
8162 if (tp
->t
& VT_CONSTANT
) {
8163 sec
= rodata_section
;
8164 } else if (has_init
) {
8166 /*if (tcc_state->g_debug & 4)
8167 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8168 } else if (tcc_state
->nocommon
)
8173 addr
= section_add(sec
, size
, align
);
8174 #ifdef CONFIG_TCC_BCHECK
8175 /* add padding if bound check */
8177 section_add(sec
, 1, 1);
8180 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8181 sec
= common_section
;
8186 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8187 patch_storage(sym
, ad
, NULL
);
8189 /* update symbol definition */
8190 put_extern_sym(sym
, sec
, addr
, size
);
8192 /* push global reference */
8193 vpush_ref(type
, sec
, addr
, size
);
8198 #ifdef CONFIG_TCC_BCHECK
8199 /* handles bounds now because the symbol must be defined
8200 before for the relocation */
8204 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8205 /* then add global bound info */
8206 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8207 bounds_ptr
[0] = 0; /* relocated */
8208 bounds_ptr
[1] = size
;
8213 if (type
->t
& VT_VLA
) {
8219 /* save before-VLA stack pointer if needed */
8220 if (cur_scope
->vla
.num
== 0) {
8221 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8222 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8224 gen_vla_sp_save(loc
-= PTR_SIZE
);
8225 cur_scope
->vla
.locorig
= loc
;
8229 vpush_type_size(type
, &a
);
8230 gen_vla_alloc(type
, a
);
8231 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8232 /* on _WIN64, because of the function args scratch area, the
8233 result of alloca differs from RSP and is returned in RAX. */
8234 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8236 gen_vla_sp_save(addr
);
8237 cur_scope
->vla
.loc
= addr
;
8238 cur_scope
->vla
.num
++;
8239 } else if (has_init
) {
8241 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8242 /* patch flexible array member size back to -1, */
8243 /* for possible subsequent similar declarations */
8245 flexible_array
->type
.ref
->c
= -1;
8249 /* restore parse state if needed */
8255 nocode_wanted
= saved_nocode_wanted
;
8258 /* generate vla code saved in post_type() */
8259 static void func_vla_arg_code(Sym
*arg
)
8262 TokenString
*vla_array_tok
= NULL
;
8265 func_vla_arg_code(arg
->type
.ref
);
8267 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8268 loc
-= type_size(&int_type
, &align
);
8270 arg
->type
.ref
->c
= loc
;
8273 vla_array_tok
= tok_str_alloc();
8274 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8275 begin_macro(vla_array_tok
, 1);
8280 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8282 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8289 static void func_vla_arg(Sym
*sym
)
8293 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8294 if ((arg
->type
.t
& VT_BTYPE
) == VT_PTR
&& (arg
->type
.ref
->type
.t
& VT_VLA
))
8295 func_vla_arg_code(arg
->type
.ref
);
8298 /* parse a function defined by symbol 'sym' and generate its code in
8299 'cur_text_section' */
8300 static void gen_function(Sym
*sym
)
8302 struct scope f
= { 0 };
8303 cur_scope
= root_scope
= &f
;
8306 cur_text_section
->sh_flags
|= SHF_EXECINSTR
;
8307 ind
= cur_text_section
->data_offset
;
8308 if (sym
->a
.aligned
) {
8309 size_t newoff
= section_add(cur_text_section
, 0,
8310 1 << (sym
->a
.aligned
- 1));
8311 gen_fill_nops(newoff
- ind
);
8314 funcname
= get_tok_str(sym
->v
, NULL
);
8316 func_vt
= sym
->type
.ref
->type
;
8317 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8319 /* NOTE: we patch the symbol size later */
8320 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8322 if (sym
->type
.ref
->f
.func_ctor
)
8323 add_array (tcc_state
, ".init_array", sym
->c
);
8324 if (sym
->type
.ref
->f
.func_dtor
)
8325 add_array (tcc_state
, ".fini_array", sym
->c
);
8327 /* put debug symbol */
8328 tcc_debug_funcstart(tcc_state
, sym
);
8330 /* push a dummy symbol to enable local sym storage */
8331 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8332 local_scope
= 1; /* for function parameters */
8334 tcc_debug_prolog_epilog(tcc_state
, 0);
8338 clear_temp_local_var_list();
8344 /* reset local stack */
8345 pop_local_syms(NULL
, 0);
8346 tcc_debug_prolog_epilog(tcc_state
, 1);
8349 /* end of function */
8350 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8352 /* patch symbol size */
8353 elfsym(sym
)->st_size
= ind
- func_ind
;
8355 cur_text_section
->data_offset
= ind
;
8357 label_pop(&global_label_stack
, NULL
, 0);
8358 sym_pop(&all_cleanups
, NULL
, 0);
8360 /* It's better to crash than to generate wrong code */
8361 cur_text_section
= NULL
;
8362 funcname
= ""; /* for safety */
8363 func_vt
.t
= VT_VOID
; /* for safety */
8364 func_var
= 0; /* for safety */
8365 ind
= 0; /* for safety */
8367 nocode_wanted
= DATA_ONLY_WANTED
;
8370 /* do this after funcend debug info */
8374 static void gen_inline_functions(TCCState
*s
)
8377 int inline_generated
, i
;
8378 struct InlineFunc
*fn
;
8380 tcc_open_bf(s
, ":inline:", 0);
8381 /* iterate while inline function are referenced */
8383 inline_generated
= 0;
8384 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8385 fn
= s
->inline_fns
[i
];
8387 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8388 /* the function was used or forced (and then not internal):
8389 generate its code and convert it to a normal function */
8391 tccpp_putfile(fn
->filename
);
8392 begin_macro(fn
->func_str
, 1);
8394 cur_text_section
= text_section
;
8398 inline_generated
= 1;
8401 } while (inline_generated
);
8405 static void free_inline_functions(TCCState
*s
)
8408 /* free tokens of unused inline functions */
8409 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8410 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8412 tok_str_free(fn
->func_str
);
8414 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8417 static void do_Static_assert(void)
8425 msg
= "_Static_assert fail";
8428 msg
= parse_mult_str("string constant")->data
;
8432 tcc_error("%s", msg
);
8436 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8437 or VT_CMP if parsing old style parameter list
8438 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8439 static int decl(int l
)
8441 int v
, has_init
, r
, oldint
;
8444 AttributeDef ad
, adbase
;
8449 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8452 /* skip redundant ';' if not in old parameter decl scope */
8453 if (tok
== ';' && l
!= VT_CMP
) {
8457 if (tok
== TOK_STATIC_ASSERT
) {
8463 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8464 /* global asm block */
8468 if (tok
>= TOK_UIDENT
) {
8469 /* special test for old K&R protos without explicit int
8470 type. Only accepted when defining global data */
8475 expect("declaration");
8481 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8483 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8484 tcc_warning("unnamed struct/union that defines no instances");
8488 if (IS_ENUM(btype
.t
)) {
8494 while (1) { /* iterate thru each declaration */
8497 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8501 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8502 printf("type = '%s'\n", buf
);
8505 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8506 if ((type
.t
& VT_STATIC
) && (l
!= VT_CONST
))
8507 tcc_error("function without file scope cannot be static");
8508 /* if old style function prototype, we accept a
8511 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
) {
8515 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8516 if (sym
->f
.func_alwinl
8517 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8518 == (VT_EXTERN
| VT_INLINE
))) {
8519 /* always_inline functions must be handled as if they
8520 don't generate multiple global defs, even if extern
8521 inline, i.e. GNU inline semantics for those. Rewrite
8522 them into static inline. */
8523 type
.t
&= ~VT_EXTERN
;
8524 type
.t
|= VT_STATIC
;
8527 /* always compile 'extern inline' */
8528 if (type
.t
& VT_EXTERN
)
8529 type
.t
&= ~VT_INLINE
;
8531 } else if (oldint
) {
8532 tcc_warning("type defaults to int");
8535 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8536 ad
.asm_label
= asm_label_instr();
8537 /* parse one last attribute list, after asm label */
8538 parse_attribute(&ad
);
8540 /* gcc does not allow __asm__("label") with function definition,
8547 #ifdef TCC_TARGET_PE
8548 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8549 if (type
.t
& VT_STATIC
)
8550 tcc_error("cannot have dll linkage with static");
8551 if (type
.t
& VT_TYPEDEF
) {
8552 tcc_warning("'%s' attribute ignored for typedef",
8553 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8554 (ad
.a
.dllexport
= 0, "dllexport"));
8555 } else if (ad
.a
.dllimport
) {
8556 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8559 type
.t
|= VT_EXTERN
;
8565 tcc_error("cannot use local functions");
8566 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8567 expect("function definition");
8569 /* reject abstract declarators in function definition
8570 make old style params without decl have int type */
8572 while ((sym
= sym
->next
) != NULL
) {
8573 if (!(sym
->v
& ~SYM_FIELD
))
8574 expect("identifier");
8575 if (sym
->type
.t
== VT_VOID
)
8576 sym
->type
= int_type
;
8579 /* apply post-declaraton attributes */
8580 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8582 /* put function symbol */
8583 type
.t
&= ~VT_EXTERN
;
8584 sym
= external_sym(v
, &type
, 0, &ad
);
8586 /* static inline functions are just recorded as a kind
8587 of macro. Their code will be emitted at the end of
8588 the compilation unit only if they are used */
8589 if (sym
->type
.t
& VT_INLINE
) {
8590 struct InlineFunc
*fn
;
8591 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8592 strcpy(fn
->filename
, file
->filename
);
8594 dynarray_add(&tcc_state
->inline_fns
,
8595 &tcc_state
->nb_inline_fns
, fn
);
8596 skip_or_save_block(&fn
->func_str
);
8598 /* compute text section */
8599 cur_text_section
= ad
.section
;
8600 if (!cur_text_section
)
8601 cur_text_section
= text_section
;
8607 /* find parameter in function parameter list */
8608 for (sym
= func_vt
.ref
->next
; sym
; sym
= sym
->next
)
8609 if ((sym
->v
& ~SYM_FIELD
) == v
)
8611 tcc_error("declaration for parameter '%s' but no such parameter",
8612 get_tok_str(v
, NULL
));
8614 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8615 tcc_error("storage class specified for '%s'",
8616 get_tok_str(v
, NULL
));
8617 if (sym
->type
.t
!= VT_VOID
)
8618 tcc_error("redefinition of parameter '%s'",
8619 get_tok_str(v
, NULL
));
8620 convert_parameter_type(&type
);
8622 } else if (type
.t
& VT_TYPEDEF
) {
8623 /* save typedefed type */
8624 /* XXX: test storage specifiers ? */
8626 if (sym
&& sym
->sym_scope
== local_scope
) {
8627 if (!is_compatible_types(&sym
->type
, &type
)
8628 || !(sym
->type
.t
& VT_TYPEDEF
))
8629 tcc_error("incompatible redefinition of '%s'",
8630 get_tok_str(v
, NULL
));
8633 sym
= sym_push(v
, &type
, 0, 0);
8636 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8637 merge_funcattr(&sym
->type
.ref
->f
, &ad
.f
);
8639 tcc_debug_typedef (tcc_state
, sym
);
8640 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8641 && !(type
.t
& VT_EXTERN
)) {
8642 tcc_error("declaration of void object");
8645 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8646 /* external function definition */
8647 /* specific case for func_call attribute */
8648 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8649 } else if (!(type
.t
& VT_ARRAY
)) {
8650 /* not lvalue if array */
8653 has_init
= (tok
== '=');
8654 if (has_init
&& (type
.t
& VT_VLA
))
8655 tcc_error("variable length array cannot be initialized");
8657 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8658 || (type
.t
& VT_BTYPE
) == VT_FUNC
8659 /* as with GCC, uninitialized global arrays with no size
8660 are considered extern: */
8661 || ((type
.t
& VT_ARRAY
) && !has_init
8662 && l
== VT_CONST
&& type
.ref
->c
< 0)
8664 /* external variable or function */
8665 type
.t
|= VT_EXTERN
;
8666 sym
= external_sym(v
, &type
, r
, &ad
);
8668 if (l
== VT_CONST
|| (type
.t
& VT_STATIC
))
8674 else if (l
== VT_CONST
)
8675 /* uninitialized global variables may be overridden */
8676 type
.t
|= VT_EXTERN
;
8677 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
== VT_CONST
);
8680 if (ad
.alias_target
&& l
== VT_CONST
) {
8681 /* Aliases need to be emitted when their target symbol
8682 is emitted, even if perhaps unreferenced.
8683 We only support the case where the base is already
8684 defined, otherwise we would need deferring to emit
8685 the aliases until the end of the compile unit. */
8686 Sym
*alias_target
= sym_find(ad
.alias_target
);
8687 ElfSym
*esym
= elfsym(alias_target
);
8689 tcc_error("unsupported forward __alias__ attribute");
8690 put_extern_sym2(sym_find(v
), esym
->st_shndx
,
8691 esym
->st_value
, esym
->st_size
, 1);
8707 /* ------------------------------------------------------------------------- */
8710 /* ------------------------------------------------------------------------- */