2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
46 ST_DATA
char debug_modes
;
49 static SValue _vstack
[1 + VSTACK_SIZE
];
50 #define vstack (_vstack + 1)
52 ST_DATA
int nocode_wanted
; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
56 /* no code output after unconditional jumps such as with if (0) ... */
57 #define CODE_OFF_BIT 0x20000000
58 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
59 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
61 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
62 #define NOEVAL_MASK 0x0000FFFF
63 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
65 /* no code output when parsing constant expressions */
66 #define CONST_WANTED_BIT 0x00010000
67 #define CONST_WANTED_MASK 0x0FFF0000
68 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
70 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
71 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
72 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
75 ST_DATA
const char *funcname
;
76 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
77 static CString initstr
;
80 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
81 #define VT_PTRDIFF_T VT_INT
83 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
84 #define VT_PTRDIFF_T VT_LLONG
86 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
87 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
90 static struct switch_t
{
94 } **p
; int n
; /* list of case ranges */
95 int def_sym
; /* default symbol */
99 struct switch_t
*prev
;
101 } *cur_switch
; /* current switch */
103 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
104 /*list of temporary local variables on the stack in current function. */
105 static struct temp_local_variable
{
106 int location
; //offset on stack. Svalue.c.i
109 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
110 static int nb_temp_local_vars
;
112 static struct scope
{
114 struct { int loc
, locorig
, num
; } vla
;
115 struct { Sym
*s
; int n
; } cl
;
118 } *cur_scope
, *loop_scope
, *root_scope
;
127 #define precedence_parser
128 static void init_prec(void);
131 static void block(int flags
);
133 #define STMT_COMPOUND 2
135 static void gen_cast(CType
*type
);
136 static void gen_cast_s(int t
);
137 static inline CType
*pointed_type(CType
*type
);
138 static int is_compatible_types(CType
*type1
, CType
*type2
);
139 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
140 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
141 static void parse_expr_type(CType
*type
);
142 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
143 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
144 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
145 static int decl(int l
);
146 static void expr_eq(void);
147 static void vpush_type_size(CType
*type
, int *a
);
148 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
149 static inline int64_t expr_const64(void);
150 static void vpush64(int ty
, unsigned long long v
);
151 static void vpush(CType
*type
);
152 static int gvtst(int inv
, int t
);
153 static void gen_inline_functions(TCCState
*s
);
154 static void free_inline_functions(TCCState
*s
);
155 static void skip_or_save_block(TokenString
**str
);
156 static void gv_dup(void);
157 static int get_temp_local_var(int size
,int align
);
158 static void clear_temp_local_var_list();
159 static void cast_error(CType
*st
, CType
*dt
);
160 static void end_switch(void);
162 /* ------------------------------------------------------------------------- */
163 /* Automagical code suppression */
165 /* Clear 'nocode_wanted' at forward label if it was used */
166 ST_FUNC
void gsym(int t
)
174 /* Clear 'nocode_wanted' if current pc is a label */
180 tcc_tcov_block_begin(tcc_state
);
184 /* Set 'nocode_wanted' after unconditional (backwards) jump */
185 static void gjmp_addr_acs(int t
)
191 /* Set 'nocode_wanted' after unconditional (forwards) jump */
192 static int gjmp_acs(int t
)
199 /* These are #undef'd at the end of this file */
200 #define gjmp_addr gjmp_addr_acs
201 #define gjmp gjmp_acs
202 /* ------------------------------------------------------------------------- */
204 ST_INLN
int is_float(int t
)
206 int bt
= t
& VT_BTYPE
;
207 return bt
== VT_LDOUBLE
213 static inline int is_integer_btype(int bt
)
222 static int btype_size(int bt
)
224 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
228 bt
== VT_PTR
? PTR_SIZE
: 0;
231 /* returns function return register from type */
232 static int R_RET(int t
)
236 #ifdef TCC_TARGET_X86_64
237 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
239 #elif defined TCC_TARGET_RISCV64
240 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
246 /* returns 2nd function return register, if any */
247 static int R2_RET(int t
)
253 #elif defined TCC_TARGET_X86_64
258 #elif defined TCC_TARGET_RISCV64
265 /* returns true for two-word types */
266 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
268 /* put function return registers to stack value */
269 static void PUT_R_RET(SValue
*sv
, int t
)
271 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
274 /* returns function return register class for type t */
275 static int RC_RET(int t
)
277 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
280 /* returns generic register class for type t */
281 static int RC_TYPE(int t
)
285 #ifdef TCC_TARGET_X86_64
286 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
288 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
290 #elif defined TCC_TARGET_RISCV64
291 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
297 /* returns 2nd register class corresponding to t and rc */
298 static int RC2_TYPE(int t
, int rc
)
300 if (!USING_TWO_WORDS(t
))
315 /* we use our own 'finite' function to avoid potential problems with
316 non standard math libs */
317 /* XXX: endianness dependent */
318 ST_FUNC
int ieee_finite(double d
)
321 memcpy(p
, &d
, sizeof(double));
322 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
325 /* compiling intel long double natively */
326 #if (defined __i386__ || defined __x86_64__) \
327 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
328 # define TCC_IS_NATIVE_387
331 ST_FUNC
void test_lvalue(void)
333 if (!(vtop
->r
& VT_LVAL
))
337 ST_FUNC
void check_vstack(void)
339 if (vtop
!= vstack
- 1)
340 tcc_error("internal compiler error: vstack leak (%d)",
341 (int)(vtop
- vstack
+ 1));
344 /* vstack debugging aid */
346 void pv (const char *lbl
, int a
, int b
)
349 for (i
= a
; i
< a
+ b
; ++i
) {
350 SValue
*p
= &vtop
[-i
];
351 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
352 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
357 /* ------------------------------------------------------------------------- */
358 /* initialize vstack and types. This must be done also for tcc -E */
359 ST_FUNC
void tccgen_init(TCCState
*s1
)
362 memset(vtop
, 0, sizeof *vtop
);
364 /* define some often used types */
367 char_type
.t
= VT_BYTE
;
368 if (s1
->char_is_unsigned
)
369 char_type
.t
|= VT_UNSIGNED
;
370 char_pointer_type
= char_type
;
371 mk_pointer(&char_pointer_type
);
373 func_old_type
.t
= VT_FUNC
;
374 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
375 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
376 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
377 #ifdef precedence_parser
383 ST_FUNC
int tccgen_compile(TCCState
*s1
)
387 anon_sym
= SYM_FIRST_ANOM
;
388 nocode_wanted
= DATA_ONLY_WANTED
; /* no code outside of functions */
389 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
393 #ifdef TCC_TARGET_ARM
397 printf("%s: **** new file\n", file
->filename
);
399 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
402 gen_inline_functions(s1
);
404 /* end of translation unit info */
410 ST_FUNC
void tccgen_finish(TCCState
*s1
)
412 tcc_debug_end(s1
); /* just in case of errors: free memory */
413 free_inline_functions(s1
);
414 sym_pop(&global_stack
, NULL
, 0);
415 sym_pop(&local_stack
, NULL
, 0);
416 /* free preprocessor macros */
419 dynarray_reset(&sym_pools
, &nb_sym_pools
);
421 dynarray_reset(&stk_data
, &nb_stk_data
);
427 pending_gotos
= NULL
;
428 nb_temp_local_vars
= 0;
429 global_label_stack
= NULL
;
430 local_label_stack
= NULL
;
431 cur_text_section
= NULL
;
432 sym_free_first
= NULL
;
435 /* ------------------------------------------------------------------------- */
436 ST_FUNC ElfSym
*elfsym(Sym
*s
)
440 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
443 /* apply storage attributes to Elf symbol */
444 ST_FUNC
void update_storage(Sym
*sym
)
447 int sym_bind
, old_sym_bind
;
453 if (sym
->a
.visibility
)
454 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
457 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
458 sym_bind
= STB_LOCAL
;
459 else if (sym
->a
.weak
)
462 sym_bind
= STB_GLOBAL
;
463 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
464 if (sym_bind
!= old_sym_bind
) {
465 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
469 if (sym
->a
.dllimport
)
470 esym
->st_other
|= ST_PE_IMPORT
;
471 if (sym
->a
.dllexport
)
472 esym
->st_other
|= ST_PE_EXPORT
;
476 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
477 get_tok_str(sym
->v
, NULL
),
478 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
486 /* ------------------------------------------------------------------------- */
487 /* update sym->c so that it points to an external symbol in section
488 'section' with value 'value' */
490 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
491 addr_t value
, unsigned long size
,
492 int can_add_underscore
)
494 int sym_type
, sym_bind
, info
, other
, t
;
500 name
= get_tok_str(sym
->v
, NULL
);
502 if ((t
& VT_BTYPE
) == VT_FUNC
) {
504 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
505 sym_type
= STT_NOTYPE
;
506 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
509 sym_type
= STT_OBJECT
;
511 if (t
& (VT_STATIC
| VT_INLINE
))
512 sym_bind
= STB_LOCAL
;
514 sym_bind
= STB_GLOBAL
;
518 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
519 Sym
*ref
= sym
->type
.ref
;
520 if (ref
->a
.nodecorate
) {
521 can_add_underscore
= 0;
523 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
524 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
526 other
|= ST_PE_STDCALL
;
527 can_add_underscore
= 0;
532 if (sym
->asm_label
) {
533 name
= get_tok_str(sym
->asm_label
, NULL
);
534 can_add_underscore
= 0;
537 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
539 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
543 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
544 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
547 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
551 esym
->st_value
= value
;
552 esym
->st_size
= size
;
553 esym
->st_shndx
= sh_num
;
558 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
560 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
562 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
565 /* add a new relocation entry to symbol 'sym' in section 's' */
566 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
571 if (nocode_wanted
&& s
== cur_text_section
)
576 put_extern_sym(sym
, NULL
, 0, 0);
580 /* now we can add ELF relocation info */
581 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
585 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
587 greloca(s
, sym
, offset
, type
, 0);
591 /* ------------------------------------------------------------------------- */
592 /* symbol allocator */
593 static Sym
*__sym_malloc(void)
595 Sym
*sym_pool
, *sym
, *last_sym
;
598 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
599 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
601 last_sym
= sym_free_first
;
603 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
604 sym
->next
= last_sym
;
608 sym_free_first
= last_sym
;
612 static inline Sym
*sym_malloc(void)
616 sym
= sym_free_first
;
618 sym
= __sym_malloc();
619 sym_free_first
= sym
->next
;
622 sym
= tcc_malloc(sizeof(Sym
));
627 ST_INLN
void sym_free(Sym
*sym
)
630 sym
->next
= sym_free_first
;
631 sym_free_first
= sym
;
637 /* push, without hashing */
638 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
643 memset(s
, 0, sizeof *s
);
653 /* find a symbol and return its associated structure. 's' is the top
654 of the symbol stack */
655 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
665 /* structure lookup */
666 ST_INLN Sym
*struct_find(int v
)
669 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
671 return table_ident
[v
]->sym_struct
;
674 /* find an identifier */
675 ST_INLN Sym
*sym_find(int v
)
678 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
680 return table_ident
[v
]->sym_identifier
;
683 static int sym_scope(Sym
*s
)
685 if (IS_ENUM_VAL (s
->type
.t
))
686 return s
->type
.ref
->sym_scope
;
691 /* push a given symbol on the symbol stack */
692 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
701 s
= sym_push2(ps
, v
, type
->t
, c
);
702 s
->type
.ref
= type
->ref
;
704 /* don't record fields or anonymous symbols */
706 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
707 /* record symbol in token array */
708 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
710 ps
= &ts
->sym_struct
;
712 ps
= &ts
->sym_identifier
;
715 s
->sym_scope
= local_scope
;
716 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
717 tcc_error("redeclaration of '%s'",
718 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
723 /* push a global identifier */
724 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
727 s
= sym_push2(&global_stack
, v
, t
, c
);
728 s
->r
= VT_CONST
| VT_SYM
;
729 /* don't record anonymous symbol */
730 if (v
< SYM_FIRST_ANOM
) {
731 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
732 /* modify the top most local identifier, so that sym_identifier will
733 point to 's' when popped; happens when called from inline asm */
734 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
735 ps
= &(*ps
)->prev_tok
;
742 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
743 pop them yet from the list, but do remove them from the token array. */
744 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
754 /* remove symbol in token array */
756 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
757 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
759 ps
= &ts
->sym_struct
;
761 ps
= &ts
->sym_identifier
;
773 ST_FUNC Sym
*label_find(int v
)
776 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
778 return table_ident
[v
]->sym_label
;
781 ST_FUNC Sym
*label_push(Sym
**ptop
, int v
, int flags
)
784 s
= sym_push2(ptop
, v
, VT_STATIC
, 0);
786 ps
= &table_ident
[v
- TOK_IDENT
]->sym_label
;
787 if (ptop
== &global_label_stack
) {
788 /* modify the top most local identifier, so that
789 sym_identifier will point to 's' when popped */
791 ps
= &(*ps
)->prev_tok
;
798 /* pop labels until element last is reached. Look if any labels are
799 undefined. Define symbols if '&&label' was used. */
800 ST_FUNC
void label_pop(Sym
**ptop
, Sym
*slast
, int keep
)
803 for(s
= *ptop
; s
!= slast
; s
= s1
) {
805 if (s
->r
== LABEL_DECLARED
) {
806 tcc_warning_c(warn_all
)("label '%s' declared but not used", get_tok_str(s
->v
, NULL
));
807 } else if (s
->r
== LABEL_FORWARD
) {
808 tcc_error("label '%s' used but not defined",
809 get_tok_str(s
->v
, NULL
));
812 /* define corresponding symbol. A size of
814 put_extern_sym(s
, cur_text_section
, s
->jnext
, 1);
818 if (s
->r
!= LABEL_GONE
)
819 table_ident
[s
->v
- TOK_IDENT
]->sym_label
= s
->prev_tok
;
829 /* ------------------------------------------------------------------------- */
830 static void vcheck_cmp(void)
832 /* cannot let cpu flags if other instruction are generated. Also
833 avoid leaving VT_JMP anywhere except on the top of the stack
834 because it would complicate the code generator.
836 Don't do this when nocode_wanted. vtop might come from
837 !nocode_wanted regions (see 88_codeopt.c) and transforming
838 it to a register without actually generating code is wrong
839 as their value might still be used for real. All values
840 we push under nocode_wanted will eventually be popped
841 again, so that the VT_CMP/VT_JMP value will be in vtop
842 when code is unsuppressed again. */
844 /* However if it's just automatic suppression via CODE_OFF/ON()
845 then it seems that we better let things work undisturbed.
846 How can it work at all under nocode_wanted? Well, gv() will
847 actually clear it at the gsym() in load()/VT_JMP in the
848 generator backends */
850 if (vtop
->r
== VT_CMP
&& 0 == (nocode_wanted
& ~CODE_OFF_BIT
))
854 static void vsetc(CType
*type
, int r
, CValue
*vc
)
856 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
857 tcc_error("memory full (vstack)");
867 ST_FUNC
void vswap(void)
877 /* pop stack value */
878 ST_FUNC
void vpop(void)
881 v
= vtop
->r
& VT_VALMASK
;
882 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
883 /* for x86, we need to pop the FP stack */
885 o(0xd8dd); /* fstp %st(0) */
889 /* need to put correct jump if && or || without test */
896 /* push constant of type "type" with useless value */
897 static void vpush(CType
*type
)
899 vset(type
, VT_CONST
, 0);
902 /* push arbitrary 64bit constant */
903 static void vpush64(int ty
, unsigned long long v
)
910 vsetc(&ctype
, VT_CONST
, &cval
);
913 /* push integer constant */
914 ST_FUNC
void vpushi(int v
)
919 /* push a pointer sized constant */
920 static void vpushs(addr_t v
)
922 vpush64(VT_SIZE_T
, v
);
925 /* push long long constant */
926 static inline void vpushll(long long v
)
928 vpush64(VT_LLONG
, v
);
931 ST_FUNC
void vset(CType
*type
, int r
, int v
)
935 vsetc(type
, r
, &cval
);
938 static void vseti(int r
, int v
)
946 ST_FUNC
void vpushv(SValue
*v
)
948 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
949 tcc_error("memory full (vstack)");
954 static void vdup(void)
959 /* rotate n first stack elements to the bottom
960 I1 ... In -> I2 ... In I1 [top is right]
962 ST_FUNC
void vrotb(int n
)
974 /* rotate the n elements before entry e towards the top
975 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
977 ST_FUNC
void vrote(SValue
*e
, int n
)
984 for(i
= 0;i
< n
- 1; i
++)
989 /* rotate n first stack elements to the top
990 I1 ... In -> In I1 ... I(n-1) [top is right]
992 ST_FUNC
void vrott(int n
)
997 /* ------------------------------------------------------------------------- */
998 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1000 /* called from generators to set the result from relational ops */
1001 ST_FUNC
void vset_VT_CMP(int op
)
1009 /* called once before asking generators to load VT_CMP to a register */
1010 static void vset_VT_JMP(void)
1012 int op
= vtop
->cmp_op
;
1014 if (vtop
->jtrue
|| vtop
->jfalse
) {
1015 int origt
= vtop
->type
.t
;
1016 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1017 int inv
= op
& (op
< 2); /* small optimization */
1018 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1019 vtop
->type
.t
|= origt
& (VT_UNSIGNED
| VT_DEFSIGN
);
1021 /* otherwise convert flags (rsp. 0/1) to register */
1023 if (op
< 2) /* doesn't seem to happen */
1028 /* Set CPU Flags, doesn't yet jump */
1029 static void gvtst_set(int inv
, int t
)
1033 if (vtop
->r
!= VT_CMP
) {
1036 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1037 vset_VT_CMP(vtop
->c
.i
!= 0);
1040 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1041 *p
= gjmp_append(*p
, t
);
1044 /* Generate value test
1046 * Generate a test for any value (jump, comparison and integers) */
1047 static int gvtst(int inv
, int t
)
1052 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1054 x
= u
, u
= t
, t
= x
;
1057 /* jump to the wanted target */
1059 t
= gjmp_cond(op
^ inv
, t
);
1062 /* resolve complementary jumps to here */
1069 /* generate a zero or nozero test */
1070 static void gen_test_zero(int op
)
1072 if (vtop
->r
== VT_CMP
) {
1076 vtop
->jfalse
= vtop
->jtrue
;
1086 /* ------------------------------------------------------------------------- */
1087 /* push a symbol value of TYPE */
1088 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1092 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1096 /* Return a static symbol pointing to a section */
1097 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1103 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1104 sym
->type
.t
|= VT_STATIC
;
1105 put_extern_sym(sym
, sec
, offset
, size
);
1109 /* push a reference to a section offset by adding a dummy symbol */
1110 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1112 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1115 /* define a new external reference to a symbol 'v' of type 'u' */
1116 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1122 /* push forward reference */
1123 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1124 s
->type
.ref
= type
->ref
;
1125 } else if (IS_ASM_SYM(s
)) {
1126 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1127 s
->type
.ref
= type
->ref
;
1133 /* create an external reference with no specific type similar to asm labels.
1134 This avoids type conflicts if the symbol is used from C too */
1135 ST_FUNC Sym
*external_helper_sym(int v
)
1137 CType ct
= { VT_ASM_FUNC
, NULL
};
1138 return external_global_sym(v
, &ct
);
1141 /* push a reference to an helper function (such as memmove) */
1142 ST_FUNC
void vpush_helper_func(int v
)
1144 vpushsym(&func_old_type
, external_helper_sym(v
));
1147 /* Merge symbol attributes. */
1148 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1150 if (sa1
->aligned
&& !sa
->aligned
)
1151 sa
->aligned
= sa1
->aligned
;
1152 sa
->packed
|= sa1
->packed
;
1153 sa
->weak
|= sa1
->weak
;
1154 sa
->nodebug
|= sa1
->nodebug
;
1155 if (sa1
->visibility
!= STV_DEFAULT
) {
1156 int vis
= sa
->visibility
;
1157 if (vis
== STV_DEFAULT
1158 || vis
> sa1
->visibility
)
1159 vis
= sa1
->visibility
;
1160 sa
->visibility
= vis
;
1162 sa
->dllexport
|= sa1
->dllexport
;
1163 sa
->nodecorate
|= sa1
->nodecorate
;
1164 sa
->dllimport
|= sa1
->dllimport
;
1167 /* Merge function attributes. */
1168 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1170 if (fa1
->func_call
&& !fa
->func_call
)
1171 fa
->func_call
= fa1
->func_call
;
1172 if (fa1
->func_type
&& !fa
->func_type
)
1173 fa
->func_type
= fa1
->func_type
;
1174 if (fa1
->func_args
&& !fa
->func_args
)
1175 fa
->func_args
= fa1
->func_args
;
1176 if (fa1
->func_noreturn
)
1177 fa
->func_noreturn
= 1;
1184 /* Merge attributes. */
1185 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1187 merge_symattr(&ad
->a
, &ad1
->a
);
1188 merge_funcattr(&ad
->f
, &ad1
->f
);
1191 ad
->section
= ad1
->section
;
1192 if (ad1
->alias_target
)
1193 ad
->alias_target
= ad1
->alias_target
;
1195 ad
->asm_label
= ad1
->asm_label
;
1197 ad
->attr_mode
= ad1
->attr_mode
;
1200 /* Merge some type attributes. */
1201 static void patch_type(Sym
*sym
, CType
*type
)
1203 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1204 if (!(sym
->type
.t
& VT_EXTERN
))
1205 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1206 sym
->type
.t
&= ~VT_EXTERN
;
1209 if (IS_ASM_SYM(sym
)) {
1210 /* stay static if both are static */
1211 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1212 sym
->type
.ref
= type
->ref
;
1213 if ((type
->t
& VT_BTYPE
) != VT_FUNC
&& !(type
->t
& VT_ARRAY
))
1217 if (!is_compatible_types(&sym
->type
, type
)) {
1218 tcc_error("incompatible types for redefinition of '%s'",
1219 get_tok_str(sym
->v
, NULL
));
1221 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1222 int static_proto
= sym
->type
.t
& VT_STATIC
;
1223 /* warn if static follows non-static function declaration */
1224 if ((type
->t
& VT_STATIC
) && !static_proto
1225 /* XXX this test for inline shouldn't be here. Until we
1226 implement gnu-inline mode again it silences a warning for
1227 mingw caused by our workarounds. */
1228 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1229 tcc_warning("static storage ignored for redefinition of '%s'",
1230 get_tok_str(sym
->v
, NULL
));
1232 /* set 'inline' if both agree or if one has static */
1233 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1234 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1235 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1236 static_proto
|= VT_INLINE
;
1239 if (0 == (type
->t
& VT_EXTERN
)) {
1240 struct FuncAttr f
= sym
->type
.ref
->f
;
1241 /* put complete type, use static from prototype */
1242 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1243 sym
->type
.ref
= type
->ref
;
1244 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1246 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1249 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1250 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1251 sym
->type
.ref
= type
->ref
;
1255 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1256 /* set array size if it was omitted in extern declaration */
1257 sym
->type
.ref
->c
= type
->ref
->c
;
1259 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1260 tcc_warning("storage mismatch for redefinition of '%s'",
1261 get_tok_str(sym
->v
, NULL
));
1265 /* Merge some storage attributes. */
1266 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1269 patch_type(sym
, type
);
1271 #ifdef TCC_TARGET_PE
1272 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1273 tcc_error("incompatible dll linkage for redefinition of '%s'",
1274 get_tok_str(sym
->v
, NULL
));
1276 merge_symattr(&sym
->a
, &ad
->a
);
1278 sym
->asm_label
= ad
->asm_label
;
1279 update_storage(sym
);
1282 /* copy sym to other stack */
1283 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1286 s
= sym_malloc(), *s
= *s0
;
1287 s
->prev
= *ps
, *ps
= s
;
1288 if (s
->v
< SYM_FIRST_ANOM
) {
1289 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1290 s
->prev_tok
= *ps
, *ps
= s
;
1295 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1296 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1298 int bt
= s
->type
.t
& VT_BTYPE
;
1299 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1300 Sym
**sp
= &s
->type
.ref
;
1301 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1302 Sym
*s2
= sym_copy(s
, ps
);
1303 sp
= &(*sp
= s2
)->next
;
1304 sym_copy_ref(s2
, ps
);
1309 /* define a new external reference to a symbol 'v' */
1310 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1314 /* look for global symbol */
1316 while (s
&& s
->sym_scope
)
1320 /* push forward reference */
1321 s
= global_identifier_push(v
, type
->t
, 0);
1324 s
->asm_label
= ad
->asm_label
;
1325 s
->type
.ref
= type
->ref
;
1326 /* copy type to the global stack */
1328 sym_copy_ref(s
, &global_stack
);
1330 patch_storage(s
, ad
, type
);
1332 /* push variables on local_stack if any */
1333 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1334 s
= sym_copy(s
, &local_stack
);
1338 /* save registers up to (vtop - n) stack entry */
1339 ST_FUNC
void save_regs(int n
)
1342 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1346 /* save r to the memory stack, and mark it as being free */
1347 ST_FUNC
void save_reg(int r
)
1349 save_reg_upstack(r
, 0);
1352 /* save r to the memory stack, and mark it as being free,
1353 if seen up to (vtop - n) stack entry */
1354 ST_FUNC
void save_reg_upstack(int r
, int n
)
1356 int l
, size
, align
, bt
;
1359 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1364 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1365 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1366 /* must save value on stack if not already done */
1368 bt
= p
->type
.t
& VT_BTYPE
;
1371 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1374 size
= type_size(&sv
.type
, &align
);
1375 l
= get_temp_local_var(size
,align
);
1376 sv
.r
= VT_LOCAL
| VT_LVAL
;
1378 store(p
->r
& VT_VALMASK
, &sv
);
1379 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1380 /* x86 specific: need to pop fp register ST0 if saved */
1381 if (r
== TREG_ST0
) {
1382 o(0xd8dd); /* fstp %st(0) */
1385 /* special long long case */
1386 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1391 /* mark that stack entry as being saved on the stack */
1392 if (p
->r
& VT_LVAL
) {
1393 /* also clear the bounded flag because the
1394 relocation address of the function was stored in
1396 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1398 p
->r
= VT_LVAL
| VT_LOCAL
;
1399 p
->type
.t
&= ~VT_ARRAY
; /* cannot combine VT_LVAL with VT_ARRAY */
1408 #ifdef TCC_TARGET_ARM
1409 /* find a register of class 'rc2' with at most one reference on stack.
1410 * If none, call get_reg(rc) */
1411 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1416 for(r
=0;r
<NB_REGS
;r
++) {
1417 if (reg_classes
[r
] & rc2
) {
1420 for(p
= vstack
; p
<= vtop
; p
++) {
1421 if ((p
->r
& VT_VALMASK
) == r
||
1433 /* find a free register of class 'rc'. If none, save one register */
1434 ST_FUNC
int get_reg(int rc
)
1439 /* find a free register */
1440 for(r
=0;r
<NB_REGS
;r
++) {
1441 if (reg_classes
[r
] & rc
) {
1444 for(p
=vstack
;p
<=vtop
;p
++) {
1445 if ((p
->r
& VT_VALMASK
) == r
||
1454 /* no register left : free the first one on the stack (VERY
1455 IMPORTANT to start from the bottom to ensure that we don't
1456 spill registers used in gen_opi()) */
1457 for(p
=vstack
;p
<=vtop
;p
++) {
1458 /* look at second register (if long long) */
1460 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1462 r
= p
->r
& VT_VALMASK
;
1463 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1469 /* Should never comes here */
1473 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1474 static int get_temp_local_var(int size
,int align
){
1476 struct temp_local_variable
*temp_var
;
1483 for(i
=0;i
<nb_temp_local_vars
;i
++){
1484 temp_var
=&arr_temp_local_vars
[i
];
1485 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1488 /*check if temp_var is free*/
1490 for(p
=vstack
;p
<=vtop
;p
++) {
1492 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1493 if(p
->c
.i
==temp_var
->location
){
1500 found_var
=temp_var
->location
;
1506 loc
= (loc
- size
) & -align
;
1507 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1508 temp_var
=&arr_temp_local_vars
[i
];
1509 temp_var
->location
=loc
;
1510 temp_var
->size
=size
;
1511 temp_var
->align
=align
;
1512 nb_temp_local_vars
++;
1519 static void clear_temp_local_var_list(){
1520 nb_temp_local_vars
=0;
1523 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1525 static void move_reg(int r
, int s
, int t
)
1539 /* get address of vtop (vtop MUST BE an lvalue) */
1540 ST_FUNC
void gaddrof(void)
1542 vtop
->r
&= ~VT_LVAL
;
1543 /* tricky: if saved lvalue, then we can go back to lvalue */
1544 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1545 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1548 #ifdef CONFIG_TCC_BCHECK
1549 /* generate a bounded pointer addition */
1550 static void gen_bounded_ptr_add(void)
1552 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1557 vpush_helper_func(TOK___bound_ptr_add
);
1562 /* returned pointer is in REG_IRET */
1563 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1566 /* relocation offset of the bounding function call point */
1567 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1570 /* patch pointer addition in vtop so that pointer dereferencing is
1572 static void gen_bounded_ptr_deref(void)
1582 size
= type_size(&vtop
->type
, &align
);
1584 case 1: func
= TOK___bound_ptr_indir1
; break;
1585 case 2: func
= TOK___bound_ptr_indir2
; break;
1586 case 4: func
= TOK___bound_ptr_indir4
; break;
1587 case 8: func
= TOK___bound_ptr_indir8
; break;
1588 case 12: func
= TOK___bound_ptr_indir12
; break;
1589 case 16: func
= TOK___bound_ptr_indir16
; break;
1591 /* may happen with struct member access */
1594 sym
= external_helper_sym(func
);
1596 put_extern_sym(sym
, NULL
, 0, 0);
1597 /* patch relocation */
1598 /* XXX: find a better solution ? */
1599 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1600 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1603 /* generate lvalue bound code */
1604 static void gbound(void)
1608 vtop
->r
&= ~VT_MUSTBOUND
;
1609 /* if lvalue, then use checking code before dereferencing */
1610 if (vtop
->r
& VT_LVAL
) {
1611 /* if not VT_BOUNDED value, then make one */
1612 if (!(vtop
->r
& VT_BOUNDED
)) {
1613 /* must save type because we must set it to int to get pointer */
1615 vtop
->type
.t
= VT_PTR
;
1618 gen_bounded_ptr_add();
1622 /* then check for dereferencing */
1623 gen_bounded_ptr_deref();
1627 /* we need to call __bound_ptr_add before we start to load function
1628 args into registers */
1629 ST_FUNC
void gbound_args(int nb_args
)
1634 for (i
= 1; i
<= nb_args
; ++i
)
1635 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1641 sv
= vtop
- nb_args
;
1642 if (sv
->r
& VT_SYM
) {
1646 #ifndef TCC_TARGET_PE
1647 || v
== TOK_sigsetjmp
1648 || v
== TOK___sigsetjmp
1651 vpush_helper_func(TOK___bound_setjmp
);
1654 func_bound_add_epilog
= 1;
1656 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1657 if (v
== TOK_alloca
)
1658 func_bound_add_epilog
= 1;
1661 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1662 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1667 /* Add bounds for local symbols from S to E (via ->prev) */
1668 static void add_local_bounds(Sym
*s
, Sym
*e
)
1670 for (; s
!= e
; s
= s
->prev
) {
1671 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1673 /* Add arrays/structs/unions because we always take address */
1674 if ((s
->type
.t
& VT_ARRAY
)
1675 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1676 || s
->a
.addrtaken
) {
1677 /* add local bound info */
1678 int align
, size
= type_size(&s
->type
, &align
);
1679 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1680 2 * sizeof(addr_t
));
1681 bounds_ptr
[0] = s
->c
;
1682 bounds_ptr
[1] = size
;
1688 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1689 static void pop_local_syms(Sym
*b
, int keep
)
1691 #ifdef CONFIG_TCC_BCHECK
1692 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1693 add_local_bounds(local_stack
, b
);
1696 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1697 sym_pop(&local_stack
, b
, keep
);
1700 /* increment an lvalue pointer */
1701 static void incr_offset(int offset
)
1703 int t
= vtop
->type
.t
;
1704 gaddrof(); /* remove VT_LVAL */
1705 vtop
->type
.t
= VT_PTRDIFF_T
; /* set scalar type */
1712 static void incr_bf_adr(int o
)
1714 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1718 /* single-byte load mode for packed or otherwise unaligned bitfields */
1719 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1722 save_reg_upstack(vtop
->r
, 1);
1723 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1724 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1733 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1735 vpushi((1 << n
) - 1), gen_op('&');
1738 vpushi(bits
), gen_op(TOK_SHL
);
1741 bits
+= n
, bit_size
-= n
, o
= 1;
1744 if (!(type
->t
& VT_UNSIGNED
)) {
1745 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1746 vpushi(n
), gen_op(TOK_SHL
);
1747 vpushi(n
), gen_op(TOK_SAR
);
1751 /* single-byte store mode for packed or otherwise unaligned bitfields */
1752 static void store_packed_bf(int bit_pos
, int bit_size
)
1754 int bits
, n
, o
, m
, c
;
1755 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1757 save_reg_upstack(vtop
->r
, 1);
1758 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1760 incr_bf_adr(o
); // X B
1762 c
? vdup() : gv_dup(); // B V X
1765 vpushi(bits
), gen_op(TOK_SHR
);
1767 vpushi(bit_pos
), gen_op(TOK_SHL
);
1772 m
= ((1 << n
) - 1) << bit_pos
;
1773 vpushi(m
), gen_op('&'); // X B V1
1774 vpushv(vtop
-1); // X B V1 B
1775 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1776 gen_op('&'); // X B V1 B1
1777 gen_op('|'); // X B V2
1779 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1780 vstore(), vpop(); // X B
1781 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1786 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1789 if (0 == sv
->type
.ref
)
1791 t
= sv
->type
.ref
->auxtype
;
1792 if (t
!= -1 && t
!= VT_STRUCT
) {
1793 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1799 /* store vtop a register belonging to class 'rc'. lvalues are
1800 converted to values. Cannot be used if cannot be converted to
1801 register value (such as structures). */
1802 ST_FUNC
int gv(int rc
)
1804 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1805 int bit_pos
, bit_size
, size
, align
;
1807 /* NOTE: get_reg can modify vstack[] */
1808 if (vtop
->type
.t
& VT_BITFIELD
) {
1811 bit_pos
= BIT_POS(vtop
->type
.t
);
1812 bit_size
= BIT_SIZE(vtop
->type
.t
);
1813 /* remove bit field info to avoid loops */
1814 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1817 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1818 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1819 type
.t
|= VT_UNSIGNED
;
1821 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1823 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1828 if (r
== VT_STRUCT
) {
1829 load_packed_bf(&type
, bit_pos
, bit_size
);
1831 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1832 /* cast to int to propagate signedness in following ops */
1834 /* generate shifts */
1835 vpushi(bits
- (bit_pos
+ bit_size
));
1837 vpushi(bits
- bit_size
);
1838 /* NOTE: transformed to SHR if unsigned */
1843 if (is_float(vtop
->type
.t
) &&
1844 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1845 /* CPUs usually cannot use float constants, so we store them
1846 generically in data segment */
1847 init_params p
= { rodata_section
};
1848 unsigned long offset
;
1849 size
= type_size(&vtop
->type
, &align
);
1851 size
= 0, align
= 1;
1852 offset
= section_add(p
.sec
, size
, align
);
1853 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1855 init_putv(&p
, &vtop
->type
, offset
);
1858 #ifdef CONFIG_TCC_BCHECK
1859 if (vtop
->r
& VT_MUSTBOUND
)
1863 bt
= vtop
->type
.t
& VT_BTYPE
;
1865 #ifdef TCC_TARGET_RISCV64
1867 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1870 rc2
= RC2_TYPE(bt
, rc
);
1872 /* need to reload if:
1874 - lvalue (need to dereference pointer)
1875 - already a register, but not in the right class */
1876 r
= vtop
->r
& VT_VALMASK
;
1877 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1878 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1880 if (!r_ok
|| !r2_ok
) {
1883 if (1 /* we can 'mov (r),r' in cases */
1885 && (reg_classes
[r
] & rc
)
1888 save_reg_upstack(r
, 1);
1894 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1895 int original_type
= vtop
->type
.t
;
1897 /* two register type load :
1898 expand to two words temporarily */
1899 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1901 unsigned long long ll
= vtop
->c
.i
;
1902 vtop
->c
.i
= ll
; /* first word */
1904 vtop
->r
= r
; /* save register value */
1905 vpushi(ll
>> 32); /* second word */
1906 } else if (vtop
->r
& VT_LVAL
) {
1907 /* We do not want to modifier the long long pointer here.
1908 So we save any other instances down the stack */
1909 save_reg_upstack(vtop
->r
, 1);
1910 /* load from memory */
1911 vtop
->type
.t
= load_type
;
1914 vtop
[-1].r
= r
; /* save register value */
1915 /* increment pointer to get second word */
1916 incr_offset(PTR_SIZE
);
1918 /* move registers */
1921 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1924 vtop
[-1].r
= r
; /* save register value */
1925 vtop
->r
= vtop
[-1].r2
;
1927 /* Allocate second register. Here we rely on the fact that
1928 get_reg() tries first to free r2 of an SValue. */
1932 /* write second register */
1935 vtop
->type
.t
= original_type
;
1937 if (vtop
->r
== VT_CMP
)
1939 /* one register type load */
1944 #ifdef TCC_TARGET_C67
1945 /* uses register pairs for doubles */
1946 if (bt
== VT_DOUBLE
)
1953 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1954 ST_FUNC
void gv2(int rc1
, int rc2
)
1956 /* generate more generic register first. But VT_JMP or VT_CMP
1957 values must be generated first in all cases to avoid possible
1959 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1964 /* test if reload is needed for first register */
1965 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1975 /* test if reload is needed for first register */
1976 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1983 /* expand 64bit on stack in two ints */
1984 ST_FUNC
void lexpand(void)
1987 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1988 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1989 if (v
== VT_CONST
) {
1992 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1998 vtop
[0].r
= vtop
[-1].r2
;
1999 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2001 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2006 /* build a long long from two ints */
2007 static void lbuild(int t
)
2009 gv2(RC_INT
, RC_INT
);
2010 vtop
[-1].r2
= vtop
[0].r
;
2011 vtop
[-1].type
.t
= t
;
2016 /* convert stack entry to register and duplicate its value in another
2018 static void gv_dup(void)
2024 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2025 if (t
& VT_BITFIELD
) {
2035 /* stack: H L L1 H1 */
2045 /* duplicate value */
2055 /* generate CPU independent (unsigned) long long operations */
2056 static void gen_opl(int op
)
2058 int t
, a
, b
, op1
, c
, i
;
2060 unsigned short reg_iret
= REG_IRET
;
2061 unsigned short reg_lret
= REG_IRE2
;
2067 func
= TOK___divdi3
;
2070 func
= TOK___udivdi3
;
2073 func
= TOK___moddi3
;
2076 func
= TOK___umoddi3
;
2083 /* call generic long long function */
2084 vpush_helper_func(func
);
2089 vtop
->r2
= reg_lret
;
2097 //pv("gen_opl A",0,2);
2103 /* stack: L1 H1 L2 H2 */
2108 vtop
[-2] = vtop
[-3];
2111 /* stack: H1 H2 L1 L2 */
2112 //pv("gen_opl B",0,4);
2118 /* stack: H1 H2 L1 L2 ML MH */
2121 /* stack: ML MH H1 H2 L1 L2 */
2125 /* stack: ML MH H1 L2 H2 L1 */
2130 /* stack: ML MH M1 M2 */
2133 } else if (op
== '+' || op
== '-') {
2134 /* XXX: add non carry method too (for MIPS or alpha) */
2140 /* stack: H1 H2 (L1 op L2) */
2143 gen_op(op1
+ 1); /* TOK_xxxC2 */
2146 /* stack: H1 H2 (L1 op L2) */
2149 /* stack: (L1 op L2) H1 H2 */
2151 /* stack: (L1 op L2) (H1 op H2) */
2159 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2160 t
= vtop
[-1].type
.t
;
2164 /* stack: L H shift */
2166 /* constant: simpler */
2167 /* NOTE: all comments are for SHL. the other cases are
2168 done by swapping words */
2179 if (op
!= TOK_SAR
) {
2212 /* XXX: should provide a faster fallback on x86 ? */
2215 func
= TOK___ashrdi3
;
2218 func
= TOK___lshrdi3
;
2221 func
= TOK___ashldi3
;
2227 /* compare operations */
2233 /* stack: L1 H1 L2 H2 */
2235 vtop
[-1] = vtop
[-2];
2237 /* stack: L1 L2 H1 H2 */
2241 /* when values are equal, we need to compare low words. since
2242 the jump is inverted, we invert the test too. */
2245 else if (op1
== TOK_GT
)
2247 else if (op1
== TOK_ULT
)
2249 else if (op1
== TOK_UGT
)
2259 /* generate non equal test */
2261 vset_VT_CMP(TOK_NE
);
2265 /* compare low. Always unsigned */
2269 else if (op1
== TOK_LE
)
2271 else if (op1
== TOK_GT
)
2273 else if (op1
== TOK_GE
)
2276 #if 0//def TCC_TARGET_I386
2277 if (op
== TOK_NE
) { gsym(b
); break; }
2278 if (op
== TOK_EQ
) { gsym(a
); break; }
2287 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2289 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2290 return (a
^ b
) >> 63 ? -x
: x
;
2293 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2295 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2298 /* handle integer constant optimizations and various machine
2300 static void gen_opic(int op
)
2302 SValue
*v1
= vtop
- 1;
2304 int t1
= v1
->type
.t
& VT_BTYPE
;
2305 int t2
= v2
->type
.t
& VT_BTYPE
;
2306 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2307 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2308 uint64_t l1
= c1
? v1
->c
.i
: 0;
2309 uint64_t l2
= c2
? v2
->c
.i
: 0;
2310 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2313 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2314 l1
= ((uint32_t)l1
|
2315 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2316 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2317 l2
= ((uint32_t)l2
|
2318 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2322 case '+': l1
+= l2
; break;
2323 case '-': l1
-= l2
; break;
2324 case '&': l1
&= l2
; break;
2325 case '^': l1
^= l2
; break;
2326 case '|': l1
|= l2
; break;
2327 case '*': l1
*= l2
; break;
2334 /* if division by zero, generate explicit division */
2336 if (CONST_WANTED
&& !NOEVAL_WANTED
)
2337 tcc_error("division by zero in constant");
2341 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2342 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2343 case TOK_UDIV
: l1
= l1
/ l2
; break;
2344 case TOK_UMOD
: l1
= l1
% l2
; break;
2347 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2348 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2350 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2353 case TOK_ULT
: l1
= l1
< l2
; break;
2354 case TOK_UGE
: l1
= l1
>= l2
; break;
2355 case TOK_EQ
: l1
= l1
== l2
; break;
2356 case TOK_NE
: l1
= l1
!= l2
; break;
2357 case TOK_ULE
: l1
= l1
<= l2
; break;
2358 case TOK_UGT
: l1
= l1
> l2
; break;
2359 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2360 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2361 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2362 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2364 case TOK_LAND
: l1
= l1
&& l2
; break;
2365 case TOK_LOR
: l1
= l1
|| l2
; break;
2369 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2370 l1
= ((uint32_t)l1
|
2371 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2373 v1
->r
|= v2
->r
& VT_NONCONST
;
2376 /* if commutative ops, put c2 as constant */
2377 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2378 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2380 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2381 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2383 if (c1
&& ((l1
== 0 &&
2384 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2385 (l1
== -1 && op
== TOK_SAR
))) {
2386 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2388 } else if (c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2390 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2391 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2392 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2397 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2400 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2401 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2404 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2405 /* filter out NOP operations like x*1, x-0, x&-1... */
2407 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2408 /* try to use shifts instead of muls or divs */
2409 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2418 else if (op
== TOK_PDIV
)
2424 } else if (c2
&& (op
== '+' || op
== '-') &&
2425 (r
= vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
),
2426 r
== (VT_CONST
| VT_SYM
) || r
== VT_LOCAL
)) {
2427 /* symbol + constant case */
2431 /* The backends can't always deal with addends to symbols
2432 larger than +-1<<31. Don't construct such. */
2439 /* call low level op generator */
2440 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2441 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2446 if (vtop
->r
== VT_CONST
)
2447 vtop
->r
|= VT_NONCONST
; /* is const, but only by optimization */
2451 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2452 # define gen_negf gen_opf
2453 #elif defined TCC_TARGET_ARM
2454 void gen_negf(int op
)
2456 /* arm will detect 0-x and replace by vneg */
2457 vpushi(0), vswap(), gen_op('-');
2460 /* XXX: implement in gen_opf() for other backends too */
2461 void gen_negf(int op
)
2463 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2464 subtract(-0, x), but with them it's really a sign flip
2465 operation. We implement this with bit manipulation and have
2466 to do some type reinterpretation for this, which TCC can do
2469 int align
, size
, bt
;
2471 size
= type_size(&vtop
->type
, &align
);
2472 bt
= vtop
->type
.t
& VT_BTYPE
;
2473 save_reg(gv(RC_TYPE(bt
)));
2475 incr_bf_adr(size
- 1);
2477 vpushi(0x80); /* flip sign */
2484 /* generate a floating point operation with constant propagation */
2485 static void gen_opif(int op
)
2489 #if defined _MSC_VER && defined __x86_64__
2490 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2499 bt
= v1
->type
.t
& VT_BTYPE
;
2501 /* currently, we cannot do computations with forward symbols */
2502 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2503 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2505 if (bt
== VT_FLOAT
) {
2508 } else if (bt
== VT_DOUBLE
) {
2515 /* NOTE: we only do constant propagation if finite number (not
2516 NaN or infinity) (ANSI spec) */
2517 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !CONST_WANTED
)
2520 case '+': f1
+= f2
; break;
2521 case '-': f1
-= f2
; break;
2522 case '*': f1
*= f2
; break;
2525 union { float f
; unsigned u
; } x1
, x2
, y
;
2526 /* If not in initializer we need to potentially generate
2527 FP exceptions at runtime, otherwise we want to fold. */
2530 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2531 when used to compile the f1 /= f2 below, would be -nan */
2532 x1
.f
= f1
, x2
.f
= f2
;
2534 y
.u
= 0x7fc00000; /* nan */
2536 y
.u
= 0x7f800000; /* infinity */
2537 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2572 /* XXX: overflow test ? */
2573 if (bt
== VT_FLOAT
) {
2575 } else if (bt
== VT_DOUBLE
) {
2582 if (op
== TOK_NEG
) {
2590 /* print a type. If 'varstr' is not NULL, then the variable is also
2591 printed in the type */
2593 /* XXX: add array and function pointers */
2594 static void type_to_str(char *buf
, int buf_size
,
2595 CType
*type
, const char *varstr
)
2607 pstrcat(buf
, buf_size
, "extern ");
2609 pstrcat(buf
, buf_size
, "static ");
2611 pstrcat(buf
, buf_size
, "typedef ");
2613 pstrcat(buf
, buf_size
, "inline ");
2615 if (t
& VT_VOLATILE
)
2616 pstrcat(buf
, buf_size
, "volatile ");
2617 if (t
& VT_CONSTANT
)
2618 pstrcat(buf
, buf_size
, "const ");
2620 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2621 || ((t
& VT_UNSIGNED
)
2622 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2625 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2627 buf_size
-= strlen(buf
);
2663 tstr
= "long double";
2665 pstrcat(buf
, buf_size
, tstr
);
2672 pstrcat(buf
, buf_size
, tstr
);
2673 v
= type
->ref
->v
& ~SYM_STRUCT
;
2674 if (v
>= SYM_FIRST_ANOM
)
2675 pstrcat(buf
, buf_size
, "<anonymous>");
2677 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2682 if (varstr
&& '*' == *varstr
) {
2683 pstrcat(buf1
, sizeof(buf1
), "(");
2684 pstrcat(buf1
, sizeof(buf1
), varstr
);
2685 pstrcat(buf1
, sizeof(buf1
), ")");
2687 pstrcat(buf1
, buf_size
, "(");
2689 while (sa
!= NULL
) {
2691 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2692 pstrcat(buf1
, sizeof(buf1
), buf2
);
2695 pstrcat(buf1
, sizeof(buf1
), ", ");
2697 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2698 pstrcat(buf1
, sizeof(buf1
), ", ...");
2699 pstrcat(buf1
, sizeof(buf1
), ")");
2700 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2704 if (t
& (VT_ARRAY
|VT_VLA
)) {
2705 if (varstr
&& '*' == *varstr
)
2706 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2708 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2709 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2712 pstrcpy(buf1
, sizeof(buf1
), "*");
2713 if (t
& VT_CONSTANT
)
2714 pstrcat(buf1
, buf_size
, "const ");
2715 if (t
& VT_VOLATILE
)
2716 pstrcat(buf1
, buf_size
, "volatile ");
2718 pstrcat(buf1
, sizeof(buf1
), varstr
);
2719 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2723 pstrcat(buf
, buf_size
, " ");
2724 pstrcat(buf
, buf_size
, varstr
);
2729 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2731 char buf1
[256], buf2
[256];
2732 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2733 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2734 tcc_error(fmt
, buf1
, buf2
);
2737 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2739 char buf1
[256], buf2
[256];
2740 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2741 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2742 tcc_warning(fmt
, buf1
, buf2
);
2745 static int pointed_size(CType
*type
)
2748 return type_size(pointed_type(type
), &align
);
2751 static inline int is_null_pointer(SValue
*p
)
2753 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
2755 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2756 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2757 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2758 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2759 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2760 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2764 /* compare function types. OLD functions match any new functions */
2765 static int is_compatible_func(CType
*type1
, CType
*type2
)
2771 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2773 if (s1
->f
.func_type
!= s2
->f
.func_type
2774 && s1
->f
.func_type
!= FUNC_OLD
2775 && s2
->f
.func_type
!= FUNC_OLD
)
2778 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2780 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2791 /* return true if type1 and type2 are the same. If unqualified is
2792 true, qualifiers on the types are ignored.
2794 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2798 t1
= type1
->t
& VT_TYPE
;
2799 t2
= type2
->t
& VT_TYPE
;
2801 /* strip qualifiers before comparing */
2802 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2803 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2806 /* Default Vs explicit signedness only matters for char */
2807 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2811 /* XXX: bitfields ? */
2816 && !(type1
->ref
->c
< 0
2817 || type2
->ref
->c
< 0
2818 || type1
->ref
->c
== type2
->ref
->c
))
2821 /* test more complicated cases */
2822 bt1
= t1
& VT_BTYPE
;
2823 if (bt1
== VT_PTR
) {
2824 type1
= pointed_type(type1
);
2825 type2
= pointed_type(type2
);
2826 return is_compatible_types(type1
, type2
);
2827 } else if (bt1
== VT_STRUCT
) {
2828 return (type1
->ref
== type2
->ref
);
2829 } else if (bt1
== VT_FUNC
) {
2830 return is_compatible_func(type1
, type2
);
2831 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2832 /* If both are enums then they must be the same, if only one is then
2833 t1 and t2 must be equal, which was checked above already. */
2834 return type1
->ref
== type2
->ref
;
2841 #define SHIFT_OP 'S'
2843 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2844 type is stored in DEST if non-null (except for pointer plus/minus) . */
2845 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2847 CType
*type1
, *type2
, type
;
2848 int t1
, t2
, bt1
, bt2
;
2851 /* for shifts, 'combine' only left operand */
2855 type1
= &op1
->type
, type2
= &op2
->type
;
2856 t1
= type1
->t
, t2
= type2
->t
;
2857 bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2862 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2863 ret
= op
== '?' ? 1 : 0;
2864 /* NOTE: as an extension, we accept void on only one side */
2866 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2868 if (!is_integer_btype(bt1
== VT_PTR
? bt2
: bt1
))
2871 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2872 /* If one is a null ptr constant the result type is the other. */
2873 else if (is_null_pointer (op2
)) type
= *type1
;
2874 else if (is_null_pointer (op1
)) type
= *type2
;
2875 else if (bt1
!= bt2
) {
2876 /* accept comparison or cond-expr between pointer and integer
2878 if ((op
== '?' || op
== CMP_OP
)
2879 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2880 tcc_warning("pointer/integer mismatch in %s",
2881 op
== '?' ? "conditional expression" : "comparison");
2882 else if (op
!= '-' || !is_integer_btype(bt2
))
2884 type
= *(bt1
== VT_PTR
? type1
: type2
);
2886 CType
*pt1
= pointed_type(type1
);
2887 CType
*pt2
= pointed_type(type2
);
2888 int pbt1
= pt1
->t
& VT_BTYPE
;
2889 int pbt2
= pt2
->t
& VT_BTYPE
;
2890 int newquals
, copied
= 0;
2891 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2892 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2893 if (op
!= '?' && op
!= CMP_OP
)
2896 type_incompatibility_warning(type1
, type2
,
2898 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2899 : "pointer type mismatch in comparison('%s' and '%s')");
2902 /* pointers to void get preferred, otherwise the
2903 pointed to types minus qualifs should be compatible */
2904 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2905 /* combine qualifs */
2906 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2907 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2910 /* copy the pointer target symbol */
2911 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2914 pointed_type(&type
)->t
|= newquals
;
2916 /* pointers to incomplete arrays get converted to
2917 pointers to completed ones if possible */
2918 if (pt1
->t
& VT_ARRAY
2919 && pt2
->t
& VT_ARRAY
2920 && pointed_type(&type
)->ref
->c
< 0
2921 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2924 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2926 pointed_type(&type
)->ref
=
2927 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2928 0, pointed_type(&type
)->ref
->c
);
2929 pointed_type(&type
)->ref
->c
=
2930 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2936 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2937 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2940 } else if (is_float(bt1
) || is_float(bt2
)) {
2941 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2942 type
.t
= VT_LDOUBLE
;
2943 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2948 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2949 /* cast to biggest op */
2950 type
.t
= VT_LLONG
| VT_LONG
;
2951 if (bt1
== VT_LLONG
)
2953 if (bt2
== VT_LLONG
)
2955 /* convert to unsigned if it does not fit in a long long */
2956 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2957 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2958 type
.t
|= VT_UNSIGNED
;
2960 /* integer operations */
2961 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2962 /* convert to unsigned if it does not fit in an integer */
2963 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2964 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2965 type
.t
|= VT_UNSIGNED
;
2972 /* generic gen_op: handles types problems */
2973 ST_FUNC
void gen_op(int op
)
2975 int t1
, t2
, bt1
, bt2
, t
;
2976 CType type1
, combtype
;
2979 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2980 op_class
= SHIFT_OP
;
2981 else if (TOK_ISCOND(op
)) /* == != > ... */
2985 t1
= vtop
[-1].type
.t
;
2986 t2
= vtop
[0].type
.t
;
2987 bt1
= t1
& VT_BTYPE
;
2988 bt2
= t2
& VT_BTYPE
;
2990 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2991 if (bt2
== VT_FUNC
) {
2992 mk_pointer(&vtop
->type
);
2995 if (bt1
== VT_FUNC
) {
2997 mk_pointer(&vtop
->type
);
3002 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op_class
)) {
3004 tcc_error("invalid operand types for binary operation");
3005 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3006 /* at least one operand is a pointer */
3007 /* relational op: must be both pointers */
3009 if (op_class
== CMP_OP
)
3011 /* if both pointers, then it must be the '-' op */
3012 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3015 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
3016 vtop
->type
.t
&= ~VT_UNSIGNED
;
3019 vtop
->type
.t
= VT_PTRDIFF_T
;
3023 /* exactly one pointer : must be '+' or '-'. */
3024 if (op
!= '-' && op
!= '+')
3026 /* Put pointer as first operand */
3027 if (bt2
== VT_PTR
) {
3029 t
= t1
, t1
= t2
, t2
= t
;
3033 if (bt2
== VT_LLONG
)
3034 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3037 type1
= vtop
[-1].type
;
3038 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
3040 #ifdef CONFIG_TCC_BCHECK
3041 if (tcc_state
->do_bounds_check
&& !CONST_WANTED
) {
3042 /* if bounded pointers, we generate a special code to
3049 gen_bounded_ptr_add();
3055 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
3056 /* put again type if gen_opic() swaped operands */
3060 /* floats can only be used for a few operations */
3061 if (is_float(combtype
.t
)
3062 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3063 && op_class
!= CMP_OP
) {
3067 t
= t2
= combtype
.t
;
3068 /* special case for shifts and long long: we keep the shift as
3070 if (op_class
== SHIFT_OP
)
3072 /* XXX: currently, some unsigned operations are explicit, so
3073 we modify them here */
3074 if (t
& VT_UNSIGNED
) {
3081 else if (op
== TOK_LT
)
3083 else if (op
== TOK_GT
)
3085 else if (op
== TOK_LE
)
3087 else if (op
== TOK_GE
)
3098 if (op_class
== CMP_OP
) {
3099 /* relational op: the result is an int */
3100 vtop
->type
.t
= VT_INT
;
3105 // Make sure that we have converted to an rvalue:
3106 if (vtop
->r
& VT_LVAL
)
3107 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3110 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3111 #define gen_cvt_itof1 gen_cvt_itof
3113 /* generic itof for unsigned long long case */
3114 static void gen_cvt_itof1(int t
)
3116 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3117 (VT_LLONG
| VT_UNSIGNED
)) {
3120 vpush_helper_func(TOK___floatundisf
);
3121 #if LDOUBLE_SIZE != 8
3122 else if (t
== VT_LDOUBLE
)
3123 vpush_helper_func(TOK___floatundixf
);
3126 vpush_helper_func(TOK___floatundidf
);
3137 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3138 #define gen_cvt_ftoi1 gen_cvt_ftoi
3140 /* generic ftoi for unsigned long long case */
3141 static void gen_cvt_ftoi1(int t
)
3144 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3145 /* not handled natively */
3146 st
= vtop
->type
.t
& VT_BTYPE
;
3148 vpush_helper_func(TOK___fixunssfdi
);
3149 #if LDOUBLE_SIZE != 8
3150 else if (st
== VT_LDOUBLE
)
3151 vpush_helper_func(TOK___fixunsxfdi
);
3154 vpush_helper_func(TOK___fixunsdfdi
);
3165 /* special delayed cast for char/short */
3166 static void force_charshort_cast(void)
3168 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3169 int dbt
= vtop
->type
.t
;
3170 vtop
->r
&= ~VT_MUSTCAST
;
3172 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3176 static void gen_cast_s(int t
)
3184 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3185 static void gen_cast(CType
*type
)
3187 int sbt
, dbt
, sf
, df
, c
;
3188 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3190 /* special delayed cast for char/short */
3191 if (vtop
->r
& VT_MUSTCAST
)
3192 force_charshort_cast();
3194 /* bitfields first get cast to ints */
3195 if (vtop
->type
.t
& VT_BITFIELD
)
3198 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3199 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3207 dbt_bt
= dbt
& VT_BTYPE
;
3208 sbt_bt
= sbt
& VT_BTYPE
;
3209 if (dbt_bt
== VT_VOID
)
3211 if (sbt_bt
== VT_VOID
) {
3213 cast_error(&vtop
->type
, type
);
3216 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3217 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3218 /* don't try to convert to ldouble when cross-compiling
3219 (except when it's '0' which is needed for arm:gen_negf()) */
3220 if (dbt_bt
== VT_LDOUBLE
&& !nocode_wanted
&& (sf
|| vtop
->c
.i
!= 0))
3224 /* constant case: we can do it now */
3225 /* XXX: in ISOC, cannot do it if error in convert */
3226 if (sbt
== VT_FLOAT
)
3227 vtop
->c
.ld
= vtop
->c
.f
;
3228 else if (sbt
== VT_DOUBLE
)
3229 vtop
->c
.ld
= vtop
->c
.d
;
3232 if (sbt_bt
== VT_LLONG
) {
3233 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3234 vtop
->c
.ld
= vtop
->c
.i
;
3236 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3238 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3239 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3241 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3244 if (dbt
== VT_FLOAT
)
3245 vtop
->c
.f
= (float)vtop
->c
.ld
;
3246 else if (dbt
== VT_DOUBLE
)
3247 vtop
->c
.d
= (double)vtop
->c
.ld
;
3248 } else if (sf
&& dbt
== VT_BOOL
) {
3249 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3252 /* the range of [int64_t] is enough to hold the integer part of any float value.
3253 Meanwhile, converting negative double to unsigned integer is UB.
3254 So first convert to [int64_t] here. */
3255 vtop
->c
.i
= (int64_t)vtop
->c
.ld
;
3256 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3258 else if (sbt
& VT_UNSIGNED
)
3259 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3261 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3263 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3265 else if (dbt
== VT_BOOL
)
3266 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3268 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3269 dbt_bt
== VT_SHORT
? 0xffff :
3272 if (!(dbt
& VT_UNSIGNED
))
3273 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3278 } else if (dbt
== VT_BOOL
3279 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3280 == (VT_CONST
| VT_SYM
)) {
3281 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3287 /* cannot generate code for global or static initializers */
3288 if (nocode_wanted
& DATA_ONLY_WANTED
)
3291 /* non constant case: generate code */
3292 if (dbt
== VT_BOOL
) {
3293 gen_test_zero(TOK_NE
);
3299 /* convert from fp to fp */
3302 /* convert int to fp */
3305 /* convert fp to int */
3307 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3310 goto again
; /* may need char/short cast */
3315 ds
= btype_size(dbt_bt
);
3316 ss
= btype_size(sbt_bt
);
3317 if (ds
== 0 || ss
== 0)
3320 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3321 tcc_error("cast to incomplete type");
3323 /* same size and no sign conversion needed */
3324 if (ds
== ss
&& ds
>= 4)
3326 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3327 tcc_warning("cast between pointer and integer of different size");
3328 if (sbt_bt
== VT_PTR
) {
3329 /* put integer type to allow logical operations below */
3330 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3334 /* processor allows { int a = 0, b = *(char*)&a; }
3335 That means that if we cast to less width, we can just
3336 change the type and read it still later. */
3337 #define ALLOW_SUBTYPE_ACCESS 1
3339 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3340 /* value still in memory */
3344 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3346 goto done
; /* no 64bit envolved */
3354 /* generate high word */
3355 if (sbt
& VT_UNSIGNED
) {
3364 } else if (ss
== 8) {
3365 /* from long long: just take low order word */
3373 /* need to convert from 32bit to 64bit */
3374 if (sbt
& VT_UNSIGNED
) {
3375 #if defined(TCC_TARGET_RISCV64)
3376 /* RISC-V keeps 32bit vals in registers sign-extended.
3377 So here we need a zero-extension. */
3386 ss
= ds
, ds
= 4, dbt
= sbt
;
3387 } else if (ss
== 8) {
3388 /* RISC-V keeps 32bit vals in registers sign-extended.
3389 So here we need a sign-extension for signed types and
3390 zero-extension. for unsigned types. */
3391 #if !defined(TCC_TARGET_RISCV64)
3392 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3401 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3407 bits
= (ss
- ds
) * 8;
3408 /* for unsigned, gen_op will convert SAR to SHR */
3409 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3412 vpushi(bits
- trunc
);
3419 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3422 /* return type size as known at compile time. Put alignment at 'a' */
3423 ST_FUNC
int type_size(CType
*type
, int *a
)
3428 bt
= type
->t
& VT_BTYPE
;
3429 if (bt
== VT_STRUCT
) {
3434 } else if (bt
== VT_PTR
) {
3435 if (type
->t
& VT_ARRAY
) {
3438 ts
= type_size(&s
->type
, a
);
3439 if (ts
< 0 && s
->c
< 0)
3446 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3448 return -1; /* incomplete enum */
3449 } else if (bt
== VT_LDOUBLE
) {
3451 return LDOUBLE_SIZE
;
3452 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3453 #if (defined TCC_TARGET_I386 && !defined TCC_TARGET_PE) \
3454 || (defined TCC_TARGET_ARM && !defined TCC_ARM_EABI)
3460 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3463 } else if (bt
== VT_SHORT
) {
3466 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3470 /* char, void, function, _Bool */
3476 /* push type size as known at runtime time on top of value stack. Put
3478 static void vpush_type_size(CType
*type
, int *a
)
3480 if (type
->t
& VT_VLA
) {
3481 type_size(&type
->ref
->type
, a
);
3482 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3484 int size
= type_size(type
, a
);
3486 tcc_error("unknown type size");
3491 /* return the pointed type of t */
3492 static inline CType
*pointed_type(CType
*type
)
3494 return &type
->ref
->type
;
3497 /* modify type so that its it is a pointer to type. */
3498 ST_FUNC
void mk_pointer(CType
*type
)
3501 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3502 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3506 /* return true if type1 and type2 are exactly the same (including
3509 static int is_compatible_types(CType
*type1
, CType
*type2
)
3511 return compare_types(type1
,type2
,0);
3514 /* return true if type1 and type2 are the same (ignoring qualifiers).
3516 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3518 return compare_types(type1
,type2
,1);
3521 static void cast_error(CType
*st
, CType
*dt
)
3523 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3526 /* verify type compatibility to store vtop in 'dt' type */
3527 static void verify_assign_cast(CType
*dt
)
3529 CType
*st
, *type1
, *type2
;
3530 int dbt
, sbt
, qualwarn
, lvl
;
3532 st
= &vtop
->type
; /* source type */
3533 dbt
= dt
->t
& VT_BTYPE
;
3534 sbt
= st
->t
& VT_BTYPE
;
3535 if (dt
->t
& VT_CONSTANT
)
3536 tcc_warning("assignment of read-only location");
3540 tcc_error("assignment to void expression");
3543 /* special cases for pointers */
3544 /* '0' can also be a pointer */
3545 if (is_null_pointer(vtop
))
3547 /* accept implicit pointer to integer cast with warning */
3548 if (is_integer_btype(sbt
)) {
3549 tcc_warning("assignment makes pointer from integer without a cast");
3552 type1
= pointed_type(dt
);
3554 type2
= pointed_type(st
);
3555 else if (sbt
== VT_FUNC
)
3556 type2
= st
; /* a function is implicitly a function pointer */
3559 if (is_compatible_types(type1
, type2
))
3561 for (qualwarn
= lvl
= 0;; ++lvl
) {
3562 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3563 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3565 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3566 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3567 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3569 type1
= pointed_type(type1
);
3570 type2
= pointed_type(type2
);
3572 if (!is_compatible_unqualified_types(type1
, type2
)) {
3573 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3574 /* void * can match anything */
3575 } else if (dbt
== sbt
3576 && is_integer_btype(sbt
& VT_BTYPE
)
3577 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3578 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3579 /* Like GCC don't warn by default for merely changes
3580 in pointer target signedness. Do warn for different
3581 base types, though, in particular for unsigned enums
3582 and signed int targets. */
3584 tcc_warning("assignment from incompatible pointer type");
3589 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3595 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3596 tcc_warning("assignment makes integer from pointer without a cast");
3597 } else if (sbt
== VT_STRUCT
) {
3598 goto case_VT_STRUCT
;
3600 /* XXX: more tests */
3604 if (!is_compatible_unqualified_types(dt
, st
)) {
3612 static void gen_assign_cast(CType
*dt
)
3614 verify_assign_cast(dt
);
3618 /* store vtop in lvalue pushed on stack */
3619 ST_FUNC
void vstore(void)
3621 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3623 ft
= vtop
[-1].type
.t
;
3624 sbt
= vtop
->type
.t
& VT_BTYPE
;
3625 dbt
= ft
& VT_BTYPE
;
3626 verify_assign_cast(&vtop
[-1].type
);
3628 if (sbt
== VT_STRUCT
) {
3629 /* if structure, only generate pointer */
3630 /* structure assignment : generate memcpy */
3631 size
= type_size(&vtop
->type
, &align
);
3632 /* destination, keep on stack() as result */
3634 #ifdef CONFIG_TCC_BCHECK
3635 if (vtop
->r
& VT_MUSTBOUND
)
3636 gbound(); /* check would be wrong after gaddrof() */
3638 vtop
->type
.t
= VT_PTR
;
3642 #ifdef CONFIG_TCC_BCHECK
3643 if (vtop
->r
& VT_MUSTBOUND
)
3646 vtop
->type
.t
= VT_PTR
;
3649 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3651 #ifdef CONFIG_TCC_BCHECK
3652 && !tcc_state
->do_bounds_check
3655 gen_struct_copy(size
);
3661 /* Use memmove, rather than memcpy, as dest and src may be same: */
3664 vpush_helper_func(TOK_memmove8
);
3665 else if(!(align
& 3))
3666 vpush_helper_func(TOK_memmove4
);
3669 vpush_helper_func(TOK_memmove
);
3674 } else if (ft
& VT_BITFIELD
) {
3675 /* bitfield store handling */
3677 /* save lvalue as expression result (example: s.b = s.a = n;) */
3678 vdup(), vtop
[-1] = vtop
[-2];
3680 bit_pos
= BIT_POS(ft
);
3681 bit_size
= BIT_SIZE(ft
);
3682 /* remove bit field info to avoid loops */
3683 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3685 if (dbt
== VT_BOOL
) {
3686 gen_cast(&vtop
[-1].type
);
3687 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3689 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3690 if (dbt
!= VT_BOOL
) {
3691 gen_cast(&vtop
[-1].type
);
3692 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3694 if (r
== VT_STRUCT
) {
3695 store_packed_bf(bit_pos
, bit_size
);
3697 unsigned long long mask
= (1ULL << bit_size
) - 1;
3698 if (dbt
!= VT_BOOL
) {
3700 if (dbt
== VT_LLONG
)
3703 vpushi((unsigned)mask
);
3710 /* duplicate destination */
3713 /* load destination, mask and or with source */
3714 if (dbt
== VT_LLONG
)
3715 vpushll(~(mask
<< bit_pos
));
3717 vpushi(~((unsigned)mask
<< bit_pos
));
3722 /* ... and discard */
3725 } else if (dbt
== VT_VOID
) {
3728 /* optimize char/short casts */
3730 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3731 && is_integer_btype(sbt
)
3733 if ((vtop
->r
& VT_MUSTCAST
)
3734 && btype_size(dbt
) > btype_size(sbt
)
3736 force_charshort_cast();
3739 gen_cast(&vtop
[-1].type
);
3742 #ifdef CONFIG_TCC_BCHECK
3743 /* bound check case */
3744 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3750 gv(RC_TYPE(dbt
)); /* generate value */
3753 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3754 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3755 vtop
->type
.t
= ft
& VT_TYPE
;
3758 /* if lvalue was saved on stack, must read it */
3759 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3761 r
= get_reg(RC_INT
);
3762 sv
.type
.t
= VT_PTRDIFF_T
;
3763 sv
.r
= VT_LOCAL
| VT_LVAL
;
3764 sv
.c
.i
= vtop
[-1].c
.i
;
3766 vtop
[-1].r
= r
| VT_LVAL
;
3769 r
= vtop
->r
& VT_VALMASK
;
3770 /* two word case handling :
3771 store second register at word + 4 (or +8 for x86-64) */
3772 if (USING_TWO_WORDS(dbt
)) {
3773 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3774 vtop
[-1].type
.t
= load_type
;
3777 incr_offset(PTR_SIZE
);
3779 /* XXX: it works because r2 is spilled last ! */
3780 store(vtop
->r2
, vtop
- 1);
3786 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3790 /* post defines POST/PRE add. c is the token ++ or -- */
3791 ST_FUNC
void inc(int post
, int c
)
3794 vdup(); /* save lvalue */
3796 gv_dup(); /* duplicate value */
3801 vpushi(c
- TOK_MID
);
3803 vstore(); /* store value */
3805 vpop(); /* if post op, return saved value */
3808 ST_FUNC CString
* parse_mult_str (const char *msg
)
3810 /* read the string */
3813 cstr_reset(&initstr
);
3814 while (tok
== TOK_STR
) {
3815 /* XXX: add \0 handling too ? */
3816 cstr_cat(&initstr
, tokc
.str
.data
, -1);
3819 cstr_ccat(&initstr
, '\0');
3823 /* If I is >= 1 and a power of two, returns log2(i)+1.
3824 If I is 0 returns 0. */
3825 ST_FUNC
int exact_log2p1(int i
)
3830 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3841 /* Parse __attribute__((...)) GNUC extension. */
3842 static void parse_attribute(AttributeDef
*ad
)
3848 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3853 while (tok
!= ')') {
3854 if (tok
< TOK_IDENT
)
3855 expect("attribute name");
3867 tcc_warning_c(warn_implicit_function_declaration
)(
3868 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3869 s
= external_global_sym(tok
, &func_old_type
);
3870 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3871 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3872 ad
->cleanup_func
= s
;
3877 case TOK_CONSTRUCTOR1
:
3878 case TOK_CONSTRUCTOR2
:
3879 ad
->f
.func_ctor
= 1;
3881 case TOK_DESTRUCTOR1
:
3882 case TOK_DESTRUCTOR2
:
3883 ad
->f
.func_dtor
= 1;
3885 case TOK_ALWAYS_INLINE1
:
3886 case TOK_ALWAYS_INLINE2
:
3887 ad
->f
.func_alwinl
= 1;
3892 astr
= parse_mult_str("section name")->data
;
3893 ad
->section
= find_section(tcc_state
, astr
);
3899 astr
= parse_mult_str("alias(\"target\")")->data
;
3900 /* save string as token, for later */
3901 ad
->alias_target
= tok_alloc_const(astr
);
3904 case TOK_VISIBILITY1
:
3905 case TOK_VISIBILITY2
:
3907 astr
= parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data
;
3908 if (!strcmp (astr
, "default"))
3909 ad
->a
.visibility
= STV_DEFAULT
;
3910 else if (!strcmp (astr
, "hidden"))
3911 ad
->a
.visibility
= STV_HIDDEN
;
3912 else if (!strcmp (astr
, "internal"))
3913 ad
->a
.visibility
= STV_INTERNAL
;
3914 else if (!strcmp (astr
, "protected"))
3915 ad
->a
.visibility
= STV_PROTECTED
;
3917 expect("visibility(\"default|hidden|internal|protected\")");
3925 if (n
<= 0 || (n
& (n
- 1)) != 0)
3926 tcc_error("alignment must be a positive power of two");
3931 ad
->a
.aligned
= exact_log2p1(n
);
3932 if (n
!= 1 << (ad
->a
.aligned
- 1))
3933 tcc_error("alignment of %d is larger than implemented", n
);
3949 /* currently, no need to handle it because tcc does not
3950 track unused objects */
3954 ad
->f
.func_noreturn
= 1;
3959 ad
->f
.func_call
= FUNC_CDECL
;
3964 ad
->f
.func_call
= FUNC_STDCALL
;
3966 #ifdef TCC_TARGET_I386
3976 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3982 ad
->f
.func_call
= FUNC_FASTCALLW
;
3987 ad
->f
.func_call
= FUNC_THISCALL
;
3994 ad
->attr_mode
= VT_LLONG
+ 1;
3997 ad
->attr_mode
= VT_BYTE
+ 1;
4000 ad
->attr_mode
= VT_SHORT
+ 1;
4004 ad
->attr_mode
= VT_INT
+ 1;
4007 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4014 ad
->a
.dllexport
= 1;
4016 case TOK_NODECORATE
:
4017 ad
->a
.nodecorate
= 1;
4020 ad
->a
.dllimport
= 1;
4023 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
4024 /* skip parameters */
4026 int parenthesis
= 0;
4030 else if (tok
== ')')
4033 } while (parenthesis
&& tok
!= -1);
4046 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4049 int v1
= v
| SYM_FIELD
;
4050 if (!(v
& SYM_FIELD
)) { /* top-level call */
4051 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
4052 expect("struct or union");
4054 expect("field name");
4056 tcc_error("dereferencing incomplete type '%s'",
4057 get_tok_str(s
->v
& ~SYM_STRUCT
, 0));
4059 while ((s
= s
->next
) != NULL
) {
4064 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
4065 && s
->v
>= (SYM_FIRST_ANOM
| SYM_FIELD
)) {
4066 /* try to find field in anonymous sub-struct/union */
4067 Sym
*ret
= find_field (&s
->type
, v1
, cumofs
);
4074 if (!(v
& SYM_FIELD
))
4075 tcc_error("field not found: %s", get_tok_str(v
, NULL
));
4079 static void check_fields (CType
*type
, int check
)
4083 while ((s
= s
->next
) != NULL
) {
4084 int v
= s
->v
& ~SYM_FIELD
;
4085 if (v
< SYM_FIRST_ANOM
) {
4086 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4087 if (check
&& (ts
->tok
& SYM_FIELD
))
4088 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4089 ts
->tok
^= SYM_FIELD
;
4090 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4091 check_fields (&s
->type
, check
);
4095 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4097 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4098 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4099 int pcc
= !tcc_state
->ms_bitfields
;
4100 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4107 prevbt
= VT_STRUCT
; /* make it never match */
4112 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4113 if (f
->type
.t
& VT_BITFIELD
)
4114 bit_size
= BIT_SIZE(f
->type
.t
);
4117 size
= type_size(&f
->type
, &align
);
4118 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4121 if (pcc
&& bit_size
== 0) {
4122 /* in pcc mode, packing does not affect zero-width bitfields */
4125 /* in pcc mode, attribute packed overrides if set. */
4126 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4129 /* pragma pack overrides align if lesser and packs bitfields always */
4132 if (pragma_pack
< align
)
4133 align
= pragma_pack
;
4134 /* in pcc mode pragma pack also overrides individual align */
4135 if (pcc
&& pragma_pack
< a
)
4139 /* some individual align was specified */
4143 if (type
->ref
->type
.t
== VT_UNION
) {
4144 if (pcc
&& bit_size
>= 0)
4145 size
= (bit_size
+ 7) >> 3;
4150 } else if (bit_size
< 0) {
4152 c
+= (bit_pos
+ 7) >> 3;
4153 c
= (c
+ align
- 1) & -align
;
4162 /* A bit-field. Layout is more complicated. There are two
4163 options: PCC (GCC) compatible and MS compatible */
4165 /* In PCC layout a bit-field is placed adjacent to the
4166 preceding bit-fields, except if:
4168 - an individual alignment was given
4169 - it would overflow its base type container and
4170 there is no packing */
4171 if (bit_size
== 0) {
4173 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4175 } else if (f
->a
.aligned
) {
4177 } else if (!packed
) {
4179 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4180 if (ofs
> size
/ align
)
4184 /* in pcc mode, long long bitfields have type int if they fit */
4185 if (size
== 8 && bit_size
<= 32)
4186 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4188 while (bit_pos
>= align
* 8)
4189 c
+= align
, bit_pos
-= align
* 8;
4192 /* In PCC layout named bit-fields influence the alignment
4193 of the containing struct using the base types alignment,
4194 except for packed fields (which here have correct align). */
4195 if (f
->v
& SYM_FIRST_ANOM
4196 // && bit_size // ??? gcc on ARM/rpi does that
4201 bt
= f
->type
.t
& VT_BTYPE
;
4202 if ((bit_pos
+ bit_size
> size
* 8)
4203 || (bit_size
> 0) == (bt
!= prevbt
)
4205 c
= (c
+ align
- 1) & -align
;
4208 /* In MS bitfield mode a bit-field run always uses
4209 at least as many bits as the underlying type.
4210 To start a new run it's also required that this
4211 or the last bit-field had non-zero width. */
4212 if (bit_size
|| prev_bit_size
)
4215 /* In MS layout the records alignment is normally
4216 influenced by the field, except for a zero-width
4217 field at the start of a run (but by further zero-width
4218 fields it is again). */
4219 if (bit_size
== 0 && prevbt
!= bt
)
4222 prev_bit_size
= bit_size
;
4225 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4226 | (bit_pos
<< VT_STRUCT_SHIFT
);
4227 bit_pos
+= bit_size
;
4229 if (align
> maxalign
)
4233 printf("set field %s offset %-2d size %-2d align %-2d",
4234 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4235 if (f
->type
.t
& VT_BITFIELD
) {
4236 printf(" pos %-2d bits %-2d",
4249 c
+= (bit_pos
+ 7) >> 3;
4251 /* store size and alignment */
4252 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4256 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4257 /* can happen if individual align for some member was given. In
4258 this case MSVC ignores maxalign when aligning the size */
4263 c
= (c
+ a
- 1) & -a
;
4267 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4270 /* check whether we can access bitfields by their type */
4271 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4275 if (0 == (f
->type
.t
& VT_BITFIELD
))
4279 bit_size
= BIT_SIZE(f
->type
.t
);
4282 bit_pos
= BIT_POS(f
->type
.t
);
4283 size
= type_size(&f
->type
, &align
);
4285 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4286 #ifdef TCC_TARGET_ARM
4287 && !(f
->c
& (align
- 1))
4292 /* try to access the field using a different type */
4293 c0
= -1, s
= align
= 1;
4296 px
= f
->c
* 8 + bit_pos
;
4297 cx
= (px
>> 3) & -align
;
4298 px
= px
- (cx
<< 3);
4301 s
= (px
+ bit_size
+ 7) >> 3;
4311 s
= type_size(&t
, &align
);
4315 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4316 #ifdef TCC_TARGET_ARM
4317 && !(cx
& (align
- 1))
4320 /* update offset and bit position */
4323 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4324 | (bit_pos
<< VT_STRUCT_SHIFT
);
4328 printf("FIX field %s offset %-2d size %-2d align %-2d "
4329 "pos %-2d bits %-2d\n",
4330 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4331 cx
, s
, align
, px
, bit_size
);
4334 /* fall back to load/store single-byte wise */
4335 f
->auxtype
= VT_STRUCT
;
4337 printf("FIX field %s : load byte-wise\n",
4338 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4344 static void do_Static_assert(void);
4346 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4347 static void struct_decl(CType
*type
, int u
)
4349 int v
, c
, size
, align
, flexible
;
4350 int bit_size
, bsize
, bt
;
4352 AttributeDef ad
, ad1
;
4355 memset(&ad
, 0, sizeof ad
);
4357 parse_attribute(&ad
);
4361 /* struct already defined ? return it */
4363 expect("struct/union/enum name");
4365 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4368 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4370 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4375 /* Record the original enum/struct/union token. */
4376 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4378 /* we put an undefined size for struct/union */
4379 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4380 s
->r
= 0; /* default alignment is zero as gcc */
4382 type
->t
= s
->type
.t
;
4388 tcc_error("struct/union/enum already defined");
4390 /* cannot be empty */
4391 /* non empty enums are not allowed */
4394 long long ll
= 0, pl
= 0, nl
= 0;
4397 /* enum symbols have static storage */
4398 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4402 expect("identifier");
4404 if (ss
&& !local_stack
)
4405 tcc_error("redefinition of enumerator '%s'",
4406 get_tok_str(v
, NULL
));
4410 ll
= expr_const64();
4412 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4414 *ps
= ss
, ps
= &ss
->next
;
4423 /* NOTE: we accept a trailing comma */
4428 /* set integral type of the enum */
4431 if (pl
!= (unsigned)pl
)
4432 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4434 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4435 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4436 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4438 /* set type for enum members */
4439 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4441 if (ll
== (int)ll
) /* default is int if it fits */
4443 if (t
.t
& VT_UNSIGNED
) {
4444 ss
->type
.t
|= VT_UNSIGNED
;
4445 if (ll
== (unsigned)ll
)
4448 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4449 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4454 while (tok
!= '}') {
4455 if (!parse_btype(&btype
, &ad1
, 0)) {
4456 if (tok
== TOK_STATIC_ASSERT
) {
4465 tcc_error("flexible array member '%s' not at the end of struct",
4466 get_tok_str(v
, NULL
));
4472 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4474 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4475 expect("identifier");
4477 int v
= btype
.ref
->v
;
4478 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4479 if (tcc_state
->ms_extensions
== 0)
4480 expect("identifier");
4484 if (type_size(&type1
, &align
) < 0) {
4485 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4488 tcc_error("field '%s' has incomplete type",
4489 get_tok_str(v
, NULL
));
4491 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4492 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4493 (type1
.t
& VT_STORAGE
))
4494 tcc_error("invalid type for '%s'",
4495 get_tok_str(v
, NULL
));
4499 bit_size
= expr_const();
4500 /* XXX: handle v = 0 case for messages */
4502 tcc_error("negative width in bit-field '%s'",
4503 get_tok_str(v
, NULL
));
4504 if (v
&& bit_size
== 0)
4505 tcc_error("zero width for bit-field '%s'",
4506 get_tok_str(v
, NULL
));
4507 parse_attribute(&ad1
);
4509 size
= type_size(&type1
, &align
);
4510 if (bit_size
>= 0) {
4511 bt
= type1
.t
& VT_BTYPE
;
4517 tcc_error("bitfields must have scalar type");
4519 if (bit_size
> bsize
) {
4520 tcc_error("width of '%s' exceeds its type",
4521 get_tok_str(v
, NULL
));
4522 } else if (bit_size
== bsize
4523 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4524 /* no need for bit fields */
4526 } else if (bit_size
== 64) {
4527 tcc_error("field width 64 not implemented");
4529 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4531 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4534 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4535 /* Remember we've seen a real field to check
4536 for placement of flexible array member. */
4539 /* If member is a struct or bit-field, enforce
4540 placing into the struct (as anonymous). */
4542 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4547 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4552 if (tok
== ';' || tok
== TOK_EOF
)
4559 parse_attribute(&ad
);
4560 if (ad
.cleanup_func
) {
4561 tcc_warning("attribute '__cleanup__' ignored on type");
4563 check_fields(type
, 1);
4564 check_fields(type
, 0);
4565 struct_layout(type
, &ad
);
4567 tcc_debug_fix_anon(tcc_state
, type
);
4572 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4574 merge_symattr(&ad
->a
, &s
->a
);
4575 merge_funcattr(&ad
->f
, &s
->f
);
4578 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4579 are added to the element type, copied because it could be a typedef. */
4580 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4582 while (type
->t
& VT_ARRAY
) {
4583 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4584 type
= &type
->ref
->type
;
4586 type
->t
|= qualifiers
;
4589 /* return 0 if no type declaration. otherwise, return the basic type
4592 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4594 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4598 memset(ad
, 0, sizeof(AttributeDef
));
4608 /* currently, we really ignore extension */
4618 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4619 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4620 tmbt
: tcc_error("too many basic types");
4623 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4628 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4645 memset(&ad1
, 0, sizeof(AttributeDef
));
4646 if (parse_btype(&type1
, &ad1
, 0)) {
4647 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4649 n
= 1 << (ad1
.a
.aligned
- 1);
4651 type_size(&type1
, &n
);
4654 if (n
< 0 || (n
& (n
- 1)) != 0)
4655 tcc_error("alignment must be a positive power of two");
4658 ad
->a
.aligned
= exact_log2p1(n
);
4662 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4663 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4664 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4665 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4672 #ifdef TCC_TARGET_ARM64
4674 /* GCC's __uint128_t appears in some Linux header files. Make it a
4675 synonym for long double to get the size and alignment right. */
4683 tcc_error("_Complex is not yet supported");
4688 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4689 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4697 struct_decl(&type1
, VT_ENUM
);
4700 type
->ref
= type1
.ref
;
4703 struct_decl(&type1
, VT_STRUCT
);
4706 struct_decl(&type1
, VT_UNION
);
4709 /* type modifiers */
4713 parse_btype_qualify(type
, VT_ATOMIC
);
4716 parse_expr_type(&type1
);
4717 /* remove all storage modifiers except typedef */
4718 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4720 sym_to_attr(ad
, type1
.ref
);
4728 parse_btype_qualify(type
, VT_CONSTANT
);
4736 parse_btype_qualify(type
, VT_VOLATILE
);
4743 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4744 tcc_error("signed and unsigned modifier");
4757 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4758 tcc_error("signed and unsigned modifier");
4759 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4775 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4776 tcc_error("multiple storage classes");
4788 ad
->f
.func_noreturn
= 1;
4790 /* GNUC attribute */
4791 case TOK_ATTRIBUTE1
:
4792 case TOK_ATTRIBUTE2
:
4793 parse_attribute(ad
);
4794 if (ad
->attr_mode
) {
4795 u
= ad
->attr_mode
-1;
4796 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4804 parse_expr_type(&type1
);
4805 /* remove all storage modifiers except typedef */
4806 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4808 sym_to_attr(ad
, type1
.ref
);
4810 case TOK_THREAD_LOCAL
:
4811 tcc_error("_Thread_local is not implemented");
4816 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4820 if (tok
== ':' && ignore_label
) {
4821 /* ignore if it's a label */
4826 t
&= ~(VT_BTYPE
|VT_LONG
);
4827 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4828 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4829 type
->ref
= s
->type
.ref
;
4831 parse_btype_qualify(type
, t
);
4833 /* get attributes from typedef */
4842 if (tcc_state
->char_is_unsigned
) {
4843 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4846 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4847 bt
= t
& (VT_BTYPE
|VT_LONG
);
4849 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4850 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4851 if (bt
== VT_LDOUBLE
)
4852 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4858 /* convert a function parameter type (array to pointer and function to
4859 function pointer) */
4860 static inline void convert_parameter_type(CType
*pt
)
4862 /* remove const and volatile qualifiers (XXX: const could be used
4863 to indicate a const function parameter */
4864 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4865 /* array must be transformed to pointer according to ANSI C */
4866 pt
->t
&= ~(VT_ARRAY
| VT_VLA
);
4867 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4872 ST_FUNC CString
* parse_asm_str(void)
4875 return parse_mult_str("string constant");
4878 /* Parse an asm label and return the token */
4879 static int asm_label_instr(void)
4885 astr
= parse_asm_str()->data
;
4888 printf("asm_alias: \"%s\"\n", astr
);
4890 v
= tok_alloc_const(astr
);
4894 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4896 int n
, l
, t1
, arg_size
, align
;
4897 Sym
**plast
, *s
, *first
;
4900 TokenString
*vla_array_tok
= NULL
;
4901 int *vla_array_str
= NULL
;
4904 /* function type, or recursive declarator (return if so) */
4906 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4910 else if (parse_btype(&pt
, &ad1
, 0))
4912 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4913 merge_attr (ad
, &ad1
);
4924 /* read param name and compute offset */
4925 if (l
!= FUNC_OLD
) {
4926 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4928 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4929 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4930 tcc_error("parameter declared as void");
4935 pt
.t
= VT_VOID
; /* invalid type */
4940 expect("identifier");
4941 convert_parameter_type(&pt
);
4942 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4943 /* these symbols may be evaluated for VLArrays (see below, under
4944 nocode_wanted) which is why we push them here as normal symbols
4945 temporarily. Example: int func(int a, int b[++a]); */
4946 s
= sym_push(n
, &pt
, VT_LOCAL
|VT_LVAL
, 0);
4952 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4957 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4958 tcc_error("invalid type");
4961 /* if no parameters, then old type prototype */
4964 /* remove parameter symbols from token table, keep on stack */
4966 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4967 for (s
= first
; s
; s
= s
->next
)
4971 /* NOTE: const is ignored in returned type as it has a special
4972 meaning in gcc / C++ */
4973 type
->t
&= ~VT_CONSTANT
;
4974 /* some ancient pre-K&R C allows a function to return an array
4975 and the array brackets to be put after the arguments, such
4976 that "int c()[]" means something like "int[] c()" */
4979 skip(']'); /* only handle simple "[]" */
4982 /* we push a anonymous symbol which will contain the function prototype */
4983 ad
->f
.func_args
= arg_size
;
4984 ad
->f
.func_type
= l
;
4985 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4991 } else if (tok
== '[') {
4992 int saved_nocode_wanted
= nocode_wanted
;
4993 /* array definition */
4997 if (td
& TYPE_PARAM
) while (1) {
4998 /* XXX The optional type-quals and static should only be accepted
4999 in parameter decls. The '*' as well, and then even only
5000 in prototypes (not function defs). */
5002 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5013 /* Code generation is not done now but has to be done
5014 at start of function. Save code here for later use. */
5016 skip_or_save_block(&vla_array_tok
);
5018 vla_array_str
= vla_array_tok
->str
;
5019 begin_macro(vla_array_tok
, 2);
5028 } else if (tok
!= ']') {
5029 if (!local_stack
|| (storage
& VT_STATIC
))
5030 vpushi(expr_const());
5032 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5033 length must always be evaluated, even under nocode_wanted,
5034 so that its size slot is initialized (e.g. under sizeof
5040 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5043 tcc_error("invalid array size");
5045 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5046 tcc_error("size of variable length array should be an integer");
5052 /* parse next post type */
5053 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
5055 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5056 tcc_error("declaration of an array of functions");
5057 if ((type
->t
& VT_BTYPE
) == VT_VOID
5058 || type_size(type
, &align
) < 0)
5059 tcc_error("declaration of an array of incomplete type elements");
5061 t1
|= type
->t
& VT_VLA
;
5066 tcc_error("need explicit inner array size in VLAs");
5069 loc
-= type_size(&int_type
, &align
);
5073 vpush_type_size(type
, &align
);
5075 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5082 nocode_wanted
= saved_nocode_wanted
;
5084 /* we push an anonymous symbol which will contain the array
5086 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5087 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5090 if (vla_array_str
) {
5091 /* for function args, the top dimension is converted to pointer */
5092 if ((t1
& VT_VLA
) && (td
& TYPE_NEST
))
5093 s
->vla_array_str
= vla_array_str
;
5095 tok_str_free_str(vla_array_str
);
5101 /* Parse a type declarator (except basic type), and return the type
5102 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5103 expected. 'type' should contain the basic type. 'ad' is the
5104 attribute definition of the basic type. It can be modified by
5105 type_decl(). If this (possibly abstract) declarator is a pointer chain
5106 it returns the innermost pointed to type (equals *type, but is a different
5107 pointer), otherwise returns type itself, that's used for recursive calls. */
5108 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5111 int qualifiers
, storage
;
5113 /* recursive type, remove storage bits first, apply them later again */
5114 storage
= type
->t
& VT_STORAGE
;
5115 type
->t
&= ~VT_STORAGE
;
5118 while (tok
== '*') {
5124 qualifiers
|= VT_ATOMIC
;
5129 qualifiers
|= VT_CONSTANT
;
5134 qualifiers
|= VT_VOLATILE
;
5140 /* XXX: clarify attribute handling */
5141 case TOK_ATTRIBUTE1
:
5142 case TOK_ATTRIBUTE2
:
5143 parse_attribute(ad
);
5147 type
->t
|= qualifiers
;
5149 /* innermost pointed to type is the one for the first derivation */
5150 ret
= pointed_type(type
);
5154 /* This is possibly a parameter type list for abstract declarators
5155 ('int ()'), use post_type for testing this. */
5156 if (!post_type(type
, ad
, 0, td
)) {
5157 /* It's not, so it's a nested declarator, and the post operations
5158 apply to the innermost pointed to type (if any). */
5159 /* XXX: this is not correct to modify 'ad' at this point, but
5160 the syntax is not clear */
5161 parse_attribute(ad
);
5162 post
= type_decl(type
, ad
, v
, td
);
5166 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5167 /* type identifier */
5172 if (!(td
& TYPE_ABSTRACT
))
5173 expect("identifier");
5176 post_type(post
, ad
, post
!= ret
? 0 : storage
,
5177 td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5178 parse_attribute(ad
);
5183 /* indirection with full error checking and bound check */
5184 ST_FUNC
void indir(void)
5186 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5187 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5191 if (vtop
->r
& VT_LVAL
)
5193 vtop
->type
= *pointed_type(&vtop
->type
);
5194 /* Arrays and functions are never lvalues */
5195 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5196 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5198 /* if bound checking, the referenced pointer must be checked */
5199 #ifdef CONFIG_TCC_BCHECK
5200 if (tcc_state
->do_bounds_check
)
5201 vtop
->r
|= VT_MUSTBOUND
;
5206 /* pass a parameter to a function and do type checking and casting */
5207 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5212 func_type
= func
->f
.func_type
;
5213 if (func_type
== FUNC_OLD
||
5214 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5215 /* default casting : only need to convert float to double */
5216 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5217 gen_cast_s(VT_DOUBLE
);
5218 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5219 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5220 type
.ref
= vtop
->type
.ref
;
5222 } else if (vtop
->r
& VT_MUSTCAST
) {
5223 force_charshort_cast();
5225 } else if (arg
== NULL
) {
5226 tcc_error("too many arguments to function");
5229 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5230 gen_assign_cast(&type
);
5234 /* parse an expression and return its type without any side effect. */
5235 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5244 /* parse an expression of the form '(type)' or '(expr)' and return its
5246 static void parse_expr_type(CType
*type
)
5252 if (parse_btype(type
, &ad
, 0)) {
5253 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5255 expr_type(type
, gexpr
);
5260 static void parse_type(CType
*type
)
5265 if (!parse_btype(type
, &ad
, 0)) {
5268 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5271 static void parse_builtin_params(int nc
, const char *args
)
5280 while ((c
= *args
++)) {
5295 type
.t
= VT_CONSTANT
;
5301 type
.t
= VT_CONSTANT
;
5303 type
.t
|= char_type
.t
;
5315 gen_assign_cast(&type
);
5322 static void parse_atomic(int atok
)
5324 int size
, align
, arg
, t
, save
= 0;
5325 CType
*atom
, *atom_ptr
, ct
= {0};
5328 static const char *const templates
[] = {
5330 * Each entry consists of callback and function template.
5331 * The template represents argument types and return type.
5333 * ? void (return-only)
5336 * A read-only atomic
5337 * p pointer to memory
5344 /* keep in order of appearance in tcctok.h: */
5345 /* __atomic_store */ "alm.?",
5346 /* __atomic_load */ "Asm.v",
5347 /* __atomic_exchange */ "alsm.v",
5348 /* __atomic_compare_exchange */ "aplbmm.b",
5349 /* __atomic_fetch_add */ "avm.v",
5350 /* __atomic_fetch_sub */ "avm.v",
5351 /* __atomic_fetch_or */ "avm.v",
5352 /* __atomic_fetch_xor */ "avm.v",
5353 /* __atomic_fetch_and */ "avm.v",
5354 /* __atomic_fetch_nand */ "avm.v",
5355 /* __atomic_and_fetch */ "avm.v",
5356 /* __atomic_sub_fetch */ "avm.v",
5357 /* __atomic_or_fetch */ "avm.v",
5358 /* __atomic_xor_fetch */ "avm.v",
5359 /* __atomic_and_fetch */ "avm.v",
5360 /* __atomic_nand_fetch */ "avm.v"
5362 const char *template = templates
[(atok
- TOK___atomic_store
)];
5364 atom
= atom_ptr
= NULL
;
5365 size
= 0; /* pacify compiler */
5370 switch (template[arg
]) {
5373 atom_ptr
= &vtop
->type
;
5374 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5376 atom
= pointed_type(atom_ptr
);
5377 size
= type_size(atom
, &align
);
5379 || (size
& (size
- 1))
5380 || (atok
> TOK___atomic_compare_exchange
5381 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5382 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5383 expect("integral or integer-sized pointer target type");
5384 /* GCC does not care either: */
5385 /* if (!(atom->t & VT_ATOMIC))
5386 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5390 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5391 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5392 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5393 gen_assign_cast(atom_ptr
);
5396 gen_assign_cast(atom
);
5400 gen_assign_cast(atom
);
5409 gen_assign_cast(&int_type
);
5413 gen_assign_cast(&ct
);
5416 if ('.' == template[++arg
])
5423 switch (template[arg
+ 1]) {
5432 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5433 vpush_helper_func(tok_alloc_const(buf
));
5434 vrott(arg
- save
+ 1);
5435 gfunc_call(arg
- save
);
5438 PUT_R_RET(vtop
, ct
.t
);
5439 t
= ct
.t
& VT_BTYPE
;
5440 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5442 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5444 vtop
->type
.t
= VT_INT
;
5456 ST_FUNC
void unary(void)
5458 int n
, t
, align
, size
, r
;
5463 /* generate line number info */
5465 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5468 /* XXX: GCC 2.95.3 does not generate a table although it should be
5476 #ifdef TCC_TARGET_PE
5477 t
= VT_SHORT
|VT_UNSIGNED
;
5485 vsetc(&type
, VT_CONST
, &tokc
);
5489 t
= VT_INT
| VT_UNSIGNED
;
5495 t
= VT_LLONG
| VT_UNSIGNED
;
5504 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5505 t
= VT_DOUBLE
| VT_LONG
;
5511 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5514 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5516 case TOK___FUNCTION__
:
5518 goto tok_identifier
;
5522 cstr_reset(&tokcstr
);
5523 cstr_cat(&tokcstr
, funcname
, 0);
5524 tokc
.str
.size
= tokcstr
.size
;
5525 tokc
.str
.data
= tokcstr
.data
;
5528 #ifdef TCC_TARGET_PE
5529 t
= VT_SHORT
| VT_UNSIGNED
;
5536 /* string parsing */
5539 if (tcc_state
->warn_write_strings
& WARN_ON
)
5544 memset(&ad
, 0, sizeof(AttributeDef
));
5545 ad
.section
= rodata_section
;
5546 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5553 if (parse_btype(&type
, &ad
, 0)) {
5554 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5556 /* check ISOC99 compound literal */
5558 /* data is allocated locally by default */
5563 /* all except arrays are lvalues */
5564 if (!(type
.t
& VT_ARRAY
))
5566 memset(&ad
, 0, sizeof(AttributeDef
));
5567 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5568 } else if (t
== TOK_SOTYPE
) { /* from sizeof/alignof (...) */
5575 } else if (tok
== '{') {
5576 int saved_nocode_wanted
= nocode_wanted
;
5577 if (CONST_WANTED
&& !NOEVAL_WANTED
)
5579 if (0 == local_scope
)
5580 tcc_error("statement expression outside of function");
5581 /* save all registers */
5583 /* statement expression : we do not accept break/continue
5584 inside as GCC does. We do retain the nocode_wanted state,
5585 as statement expressions can't ever be entered from the
5586 outside, so any reactivation of code emission (from labels
5587 or loop heads) can be disabled again after the end of it. */
5589 /* If the statement expr can be entered, then we retain the current
5590 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5591 If it can't be entered then the state is that from before the
5592 statement expression. */
5593 if (saved_nocode_wanted
)
5594 nocode_wanted
= saved_nocode_wanted
;
5609 /* functions names must be treated as function pointers,
5610 except for unary '&' and sizeof. Since we consider that
5611 functions are not lvalues, we only have to handle it
5612 there and in function calls. */
5613 /* arrays can also be used although they are not lvalues */
5614 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5615 !(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
)))
5618 vtop
->sym
->a
.addrtaken
= 1;
5619 mk_pointer(&vtop
->type
);
5625 gen_test_zero(TOK_EQ
);
5636 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5637 tcc_error("pointer not accepted for unary plus");
5638 /* In order to force cast, we add zero, except for floating point
5639 where we really need an noop (otherwise -0.0 will be transformed
5641 if (!is_float(vtop
->type
.t
)) {
5654 expr_type(&type
, unary
);
5655 if (t
== TOK_SIZEOF
) {
5656 vpush_type_size(&type
, &align
);
5657 gen_cast_s(VT_SIZE_T
);
5659 type_size(&type
, &align
);
5661 if (vtop
[1].r
& VT_SYM
)
5662 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5663 if (s
&& s
->a
.aligned
)
5664 align
= 1 << (s
->a
.aligned
- 1);
5669 case TOK_builtin_expect
:
5670 /* __builtin_expect is a no-op for now */
5671 parse_builtin_params(0, "ee");
5674 case TOK_builtin_types_compatible_p
:
5675 parse_builtin_params(0, "tt");
5676 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5677 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5678 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5682 case TOK_builtin_choose_expr
:
5709 case TOK_builtin_constant_p
:
5710 parse_builtin_params(1, "e");
5712 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5713 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
)
5719 case TOK_builtin_frame_address
:
5720 case TOK_builtin_return_address
:
5726 level
= expr_const();
5728 tcc_error("%s only takes positive integers", get_tok_str(tok1
, 0));
5732 vset(&type
, VT_LOCAL
, 0); /* local frame */
5734 #ifdef TCC_TARGET_RISCV64
5738 mk_pointer(&vtop
->type
);
5739 indir(); /* -> parent frame */
5741 if (tok1
== TOK_builtin_return_address
) {
5742 // assume return address is just above frame pointer on stack
5743 #ifdef TCC_TARGET_ARM
5746 #elif defined TCC_TARGET_RISCV64
5753 mk_pointer(&vtop
->type
);
5758 #ifdef TCC_TARGET_RISCV64
5759 case TOK_builtin_va_start
:
5760 parse_builtin_params(0, "ee");
5761 r
= vtop
->r
& VT_VALMASK
;
5765 tcc_error("__builtin_va_start expects a local variable");
5770 #ifdef TCC_TARGET_X86_64
5771 #ifdef TCC_TARGET_PE
5772 case TOK_builtin_va_start
:
5773 parse_builtin_params(0, "ee");
5774 r
= vtop
->r
& VT_VALMASK
;
5778 tcc_error("__builtin_va_start expects a local variable");
5780 vtop
->type
= char_pointer_type
;
5785 case TOK_builtin_va_arg_types
:
5786 parse_builtin_params(0, "t");
5787 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5794 #ifdef TCC_TARGET_ARM64
5795 case TOK_builtin_va_start
: {
5796 parse_builtin_params(0, "ee");
5800 vtop
->type
.t
= VT_VOID
;
5803 case TOK_builtin_va_arg
: {
5804 parse_builtin_params(0, "et");
5812 case TOK___arm64_clear_cache
: {
5813 parse_builtin_params(0, "ee");
5816 vtop
->type
.t
= VT_VOID
;
5821 /* atomic operations */
5822 case TOK___atomic_store
:
5823 case TOK___atomic_load
:
5824 case TOK___atomic_exchange
:
5825 case TOK___atomic_compare_exchange
:
5826 case TOK___atomic_fetch_add
:
5827 case TOK___atomic_fetch_sub
:
5828 case TOK___atomic_fetch_or
:
5829 case TOK___atomic_fetch_xor
:
5830 case TOK___atomic_fetch_and
:
5831 case TOK___atomic_fetch_nand
:
5832 case TOK___atomic_add_fetch
:
5833 case TOK___atomic_sub_fetch
:
5834 case TOK___atomic_or_fetch
:
5835 case TOK___atomic_xor_fetch
:
5836 case TOK___atomic_and_fetch
:
5837 case TOK___atomic_nand_fetch
:
5841 /* pre operations */
5852 if (is_float(vtop
->type
.t
)) {
5862 goto tok_identifier
;
5864 /* allow to take the address of a label */
5865 if (tok
< TOK_UIDENT
)
5866 expect("label identifier");
5867 s
= label_find(tok
);
5869 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5871 if (s
->r
== LABEL_DECLARED
)
5872 s
->r
= LABEL_FORWARD
;
5874 if ((s
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5875 s
->type
.t
= VT_VOID
;
5876 mk_pointer(&s
->type
);
5877 s
->type
.t
|= VT_STATIC
;
5879 vpushsym(&s
->type
, s
);
5885 CType controlling_type
;
5886 int has_default
= 0;
5889 TokenString
*str
= NULL
;
5890 int saved_nocode_wanted
= nocode_wanted
;
5891 nocode_wanted
&= ~CONST_WANTED_MASK
;
5895 expr_type(&controlling_type
, expr_eq
);
5896 convert_parameter_type (&controlling_type
);
5898 nocode_wanted
= saved_nocode_wanted
;
5903 if (tok
== TOK_DEFAULT
) {
5905 tcc_error("too many 'default'");
5911 AttributeDef ad_tmp
;
5915 parse_btype(&cur_type
, &ad_tmp
, 0);
5916 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5917 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5919 tcc_error("type match twice");
5929 skip_or_save_block(&str
);
5931 skip_or_save_block(NULL
);
5938 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5939 tcc_error("type '%s' does not match any association", buf
);
5941 begin_macro(str
, 1);
5950 // special qnan , snan and infinity values
5955 vtop
->type
.t
= VT_FLOAT
;
5960 goto special_math_val
;
5963 goto special_math_val
;
5967 if (tok
< TOK_UIDENT
)
5968 tcc_error("expression expected before '%s'", get_tok_str(tok
, &tokc
));
5972 if (!s
|| IS_ASM_SYM(s
)) {
5973 const char *name
= get_tok_str(t
, NULL
);
5975 tcc_error("'%s' undeclared", name
);
5976 /* for simple function calls, we tolerate undeclared
5977 external reference to int() function */
5978 tcc_warning_c(warn_implicit_function_declaration
)(
5979 "implicit declaration of function '%s'", name
);
5980 s
= external_global_sym(t
, &func_old_type
);
5984 /* A symbol that has a register is a local register variable,
5985 which starts out as VT_LOCAL value. */
5986 if ((r
& VT_VALMASK
) < VT_CONST
)
5987 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5989 vset(&s
->type
, r
, s
->c
);
5990 /* Point to s as backpointer (even without r&VT_SYM).
5991 Will be used by at least the x86 inline asm parser for
5997 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5998 vtop
->c
.i
= s
->enum_val
;
6003 /* post operations */
6005 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6008 } else if (tok
== '.' || tok
== TOK_ARROW
) {
6009 int qualifiers
, cumofs
;
6011 if (tok
== TOK_ARROW
)
6013 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6015 /* expect pointer on structure */
6017 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6018 /* add field offset to pointer */
6020 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6023 /* change type to field type, and set to lvalue */
6024 vtop
->type
= s
->type
;
6025 vtop
->type
.t
|= qualifiers
;
6026 /* an array is never an lvalue */
6027 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6029 #ifdef CONFIG_TCC_BCHECK
6030 /* if bound checking, the referenced pointer must be checked */
6031 if (tcc_state
->do_bounds_check
)
6032 vtop
->r
|= VT_MUSTBOUND
;
6036 } else if (tok
== '[') {
6042 } else if (tok
== '(') {
6045 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6048 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6049 /* pointer test (no array accepted) */
6050 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6051 vtop
->type
= *pointed_type(&vtop
->type
);
6052 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6056 expect("function pointer");
6059 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6061 /* get return type */
6064 sa
= s
->next
; /* first parameter */
6065 nb_args
= regsize
= 0;
6067 /* compute first implicit argument if a structure is returned */
6068 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6069 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6070 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6071 &ret_align
, ®size
);
6072 if (ret_nregs
<= 0) {
6073 /* get some space for the returned structure */
6074 size
= type_size(&s
->type
, &align
);
6075 #ifdef TCC_TARGET_ARM64
6076 /* On arm64, a small struct is return in registers.
6077 It is much easier to write it to memory if we know
6078 that we are allowed to write some extra bytes, so
6079 round the allocated space up to a power of 2: */
6081 while (size
& (size
- 1))
6082 size
= (size
| (size
- 1)) + 1;
6084 loc
= (loc
- size
) & -align
;
6086 ret
.r
= VT_LOCAL
| VT_LVAL
;
6087 /* pass it as 'int' to avoid structure arg passing
6089 vseti(VT_LOCAL
, loc
);
6090 #ifdef CONFIG_TCC_BCHECK
6091 if (tcc_state
->do_bounds_check
)
6105 if (ret_nregs
> 0) {
6106 /* return in register */
6108 PUT_R_RET(&ret
, ret
.type
.t
);
6113 gfunc_param_typed(s
, sa
);
6123 tcc_error("too few arguments to function");
6125 gfunc_call(nb_args
);
6127 if (ret_nregs
< 0) {
6128 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6129 #ifdef TCC_TARGET_RISCV64
6130 arch_transfer_ret_regs(1);
6136 int rc
= reg_classes
[ret
.r
] & ~(RC_INT
| RC_FLOAT
);
6137 /* We assume that when a structure is returned in multiple
6138 registers, their classes are consecutive values of the
6141 for (r
= 0; r
< NB_REGS
; ++r
)
6142 if (reg_classes
[r
] & rc
)
6144 vsetc(&ret
.type
, r
, &ret
.c
);
6146 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6149 /* handle packed struct return */
6150 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6153 size
= type_size(&s
->type
, &align
);
6154 /* We're writing whole regs often, make sure there's enough
6155 space. Assume register size is power of 2. */
6156 size
= (size
+ regsize
- 1) & -regsize
;
6157 if (ret_align
> align
)
6159 loc
= (loc
- size
) & -align
;
6163 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6167 if (--ret_nregs
== 0)
6171 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6174 /* Promote char/short return values. This is matters only
6175 for calling function that were not compiled by TCC and
6176 only on some architectures. For those where it doesn't
6177 matter we expect things to be already promoted to int,
6179 t
= s
->type
.t
& VT_BTYPE
;
6180 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6182 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6184 vtop
->type
.t
= VT_INT
;
6188 if (s
->f
.func_noreturn
) {
6190 tcc_tcov_block_end(tcc_state
, -1);
6199 #ifndef precedence_parser /* original top-down parser */
6201 static void expr_prod(void)
6206 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6213 static void expr_sum(void)
6218 while ((t
= tok
) == '+' || t
== '-') {
6225 static void expr_shift(void)
6230 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6237 static void expr_cmp(void)
6242 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6243 t
== TOK_ULT
|| t
== TOK_UGE
) {
6250 static void expr_cmpeq(void)
6255 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6262 static void expr_and(void)
6265 while (tok
== '&') {
6272 static void expr_xor(void)
6275 while (tok
== '^') {
6282 static void expr_or(void)
6285 while (tok
== '|') {
6292 static void expr_landor(int op
);
6294 static void expr_land(void)
6297 if (tok
== TOK_LAND
)
6301 static void expr_lor(void)
6308 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6309 #else /* defined precedence_parser */
6310 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6311 # define expr_lor() unary(), expr_infix(1)
6313 static int precedence(int tok
)
6316 case TOK_LOR
: return 1;
6317 case TOK_LAND
: return 2;
6321 case TOK_EQ
: case TOK_NE
: return 6;
6322 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6323 case TOK_SHL
: case TOK_SAR
: return 8;
6324 case '+': case '-': return 9;
6325 case '*': case '/': case '%': return 10;
6327 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6332 static unsigned char prec
[256];
6333 static void init_prec(void)
6336 for (i
= 0; i
< 256; i
++)
6337 prec
[i
] = precedence(i
);
6339 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6341 static void expr_landor(int op
);
6343 static void expr_infix(int p
)
6346 while ((p2
= precedence(t
)) >= p
) {
6347 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6352 if (precedence(tok
) > p2
)
6361 /* Assuming vtop is a value used in a conditional context
6362 (i.e. compared with zero) return 0 if it's false, 1 if
6363 true and -1 if it can't be statically determined. */
6364 static int condition_3way(void)
6367 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6368 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6370 gen_cast_s(VT_BOOL
);
6377 static void expr_landor(int op
)
6379 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6381 c
= f
? i
: condition_3way();
6383 save_regs(1), cc
= 0;
6385 nocode_wanted
++, f
= 1;
6393 expr_landor_next(op
);
6405 static int is_cond_bool(SValue
*sv
)
6407 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6408 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6409 return (unsigned)sv
->c
.i
< 2;
6410 if (sv
->r
== VT_CMP
)
6415 static void expr_cond(void)
6417 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6424 c
= condition_3way();
6425 g
= (tok
== ':' && gnu_ext
);
6435 /* needed to avoid having different registers saved in
6447 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6448 mk_pointer(&vtop
->type
);
6449 sv
= *vtop
; /* save value to handle it later */
6450 vtop
--; /* no vpop so that FP stack is not flushed */
6467 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6468 mk_pointer(&vtop
->type
);
6470 /* cast operands to correct type according to ISOC rules */
6471 if (!combine_types(&type
, &sv
, vtop
, '?'))
6472 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6473 "type mismatch in conditional expression (have '%s' and '%s')");
6475 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6476 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6477 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6478 this code jumps directly to the if's then/else branches. */
6483 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6487 // tcc_warning("two conditions expr_cond");
6491 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6492 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6493 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6495 /* now we convert second operand */
6499 mk_pointer(&vtop
->type
);
6501 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6505 rc
= RC_TYPE(type
.t
);
6506 /* for long longs, we use fixed registers to avoid having
6507 to handle a complicated move */
6508 if (USING_TWO_WORDS(type
.t
))
6509 rc
= RC_RET(type
.t
);
6520 /* this is horrible, but we must also convert first
6526 mk_pointer(&vtop
->type
);
6528 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6534 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6544 static void expr_eq(void)
6549 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6557 gen_op(TOK_ASSIGN_OP(t
));
6563 ST_FUNC
void gexpr(void)
6571 } while (tok
== ',');
6573 /* convert array & function to pointer */
6574 convert_parameter_type(&vtop
->type
);
6576 /* make builtin_constant_p((1,2)) return 0 (like on gcc) */
6577 if ((vtop
->r
& VT_VALMASK
) == VT_CONST
&& nocode_wanted
&& !CONST_WANTED
)
6578 gv(RC_TYPE(vtop
->type
.t
));
6582 /* parse a constant expression and return value in vtop. */
6583 static void expr_const1(void)
6585 nocode_wanted
+= CONST_WANTED_BIT
;
6587 nocode_wanted
-= CONST_WANTED_BIT
;
6590 /* parse an integer constant and return its value. */
6591 static inline int64_t expr_const64(void)
6595 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
6596 expect("constant expression");
6602 /* parse an integer constant and return its value.
6603 Complain if it doesn't fit 32bit (signed or unsigned). */
6604 ST_FUNC
int expr_const(void)
6607 int64_t wc
= expr_const64();
6609 if (c
!= wc
&& (unsigned)c
!= wc
)
6610 tcc_error("constant exceeds 32 bit");
6614 /* ------------------------------------------------------------------------- */
6615 /* return from function */
6617 #ifndef TCC_TARGET_ARM64
6618 static void gfunc_return(CType
*func_type
)
6620 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6621 CType type
, ret_type
;
6622 int ret_align
, ret_nregs
, regsize
;
6623 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6624 &ret_align
, ®size
);
6625 if (ret_nregs
< 0) {
6626 #ifdef TCC_TARGET_RISCV64
6627 arch_transfer_ret_regs(0);
6629 } else if (0 == ret_nregs
) {
6630 /* if returning structure, must copy it to implicit
6631 first pointer arg location */
6634 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6637 /* copy structure value to pointer */
6640 /* returning structure packed into registers */
6641 int size
, addr
, align
, rc
, n
;
6642 size
= type_size(func_type
,&align
);
6643 if ((align
& (ret_align
- 1))
6644 && ((vtop
->r
& VT_VALMASK
) < VT_CONST
/* pointer to struct */
6645 || (vtop
->c
.i
& (ret_align
- 1))
6647 loc
= (loc
- size
) & -ret_align
;
6650 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6654 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6656 vtop
->type
= ret_type
;
6657 rc
= RC_RET(ret_type
.t
);
6658 //printf("struct return: n:%d t:%02x rc:%02x\n", ret_nregs, ret_type.t, rc);
6659 for (n
= ret_nregs
; --n
> 0;) {
6663 incr_offset(regsize
);
6664 /* We assume that when a structure is returned in multiple
6665 registers, their classes are consecutive values of the
6670 vtop
-= ret_nregs
- 1;
6673 gv(RC_RET(func_type
->t
));
6675 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6679 static void check_func_return(void)
6681 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6683 if (!strcmp (funcname
, "main")
6684 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6685 /* main returns 0 by default */
6687 gen_assign_cast(&func_vt
);
6688 gfunc_return(&func_vt
);
6690 tcc_warning("function might return no value: '%s'", funcname
);
6694 /* ------------------------------------------------------------------------- */
6697 static int case_cmpi(const void *pa
, const void *pb
)
6699 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6700 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6701 return a
< b
? -1 : a
> b
;
6704 static int case_cmpu(const void *pa
, const void *pb
)
6706 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6707 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6708 return a
< b
? -1 : a
> b
;
6711 static void gtst_addr(int t
, int a
)
6713 gsym_addr(gvtst(0, t
), a
);
6716 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6720 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6737 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6739 gcase(base
, len
/2, bsym
);
6743 base
+= e
; len
-= e
;
6753 if (p
->v1
== p
->v2
) {
6755 gtst_addr(0, p
->sym
);
6765 gtst_addr(0, p
->sym
);
6769 *bsym
= gjmp(*bsym
);
6772 static void end_switch(void)
6774 struct switch_t
*sw
= cur_switch
;
6775 dynarray_reset(&sw
->p
, &sw
->n
);
6776 cur_switch
= sw
->prev
;
6780 /* ------------------------------------------------------------------------- */
6781 /* __attribute__((cleanup(fn))) */
6783 static void try_call_scope_cleanup(Sym
*stop
)
6785 Sym
*cls
= cur_scope
->cl
.s
;
6787 for (; cls
!= stop
; cls
= cls
->ncl
) {
6788 Sym
*fs
= cls
->next
;
6789 Sym
*vs
= cls
->prev_tok
;
6791 vpushsym(&fs
->type
, fs
);
6792 vset(&vs
->type
, vs
->r
, vs
->c
);
6794 mk_pointer(&vtop
->type
);
6800 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6805 if (!cur_scope
->cl
.s
)
6808 /* search NCA of both cleanup chains given parents and initial depth */
6809 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6810 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6812 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6814 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6817 try_call_scope_cleanup(cc
);
6820 /* call 'func' for each __attribute__((cleanup(func))) */
6821 static void block_cleanup(struct scope
*o
)
6825 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6826 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6831 try_call_scope_cleanup(o
->cl
.s
);
6832 pcl
->jnext
= gjmp(0);
6834 goto remove_pending
;
6844 try_call_scope_cleanup(o
->cl
.s
);
6847 /* ------------------------------------------------------------------------- */
6850 static void vla_restore(int loc
)
6853 gen_vla_sp_restore(loc
);
6856 static void vla_leave(struct scope
*o
)
6858 struct scope
*c
= cur_scope
, *v
= NULL
;
6859 for (; c
!= o
&& c
; c
= c
->prev
)
6863 vla_restore(v
->vla
.locorig
);
6866 /* ------------------------------------------------------------------------- */
6869 static void new_scope(struct scope
*o
)
6871 /* copy and link previous scope */
6873 o
->prev
= cur_scope
;
6875 cur_scope
->vla
.num
= 0;
6877 /* record local declaration stack position */
6878 o
->lstk
= local_stack
;
6879 o
->llstk
= local_label_stack
;
6883 static void prev_scope(struct scope
*o
, int is_expr
)
6887 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6888 block_cleanup(o
->prev
);
6890 /* pop locally defined labels */
6891 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6893 /* In the is_expr case (a statement expression is finished here),
6894 vtop might refer to symbols on the local_stack. Either via the
6895 type or via vtop->sym. We can't pop those nor any that in turn
6896 might be referred to. To make it easier we don't roll back
6897 any symbols in that case; some upper level call to block() will
6898 do that. We do have to remove such symbols from the lookup
6899 tables, though. sym_pop will do that. */
6901 /* pop locally defined symbols */
6902 pop_local_syms(o
->lstk
, is_expr
);
6903 cur_scope
= o
->prev
;
6907 /* leave a scope via break/continue(/goto) */
6908 static void leave_scope(struct scope
*o
)
6912 try_call_scope_cleanup(o
->cl
.s
);
6916 /* short versiona for scopes with 'if/do/while/switch' which can
6917 declare only types (of struct/union/enum) */
6918 static void new_scope_s(struct scope
*o
)
6920 o
->lstk
= local_stack
;
6924 static void prev_scope_s(struct scope
*o
)
6926 sym_pop(&local_stack
, o
->lstk
, 0);
6930 /* ------------------------------------------------------------------------- */
6931 /* call block from 'for do while' loops */
6933 static void lblock(int *bsym
, int *csym
)
6935 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6936 int *b
= co
->bsym
, *c
= co
->csym
;
6950 static void block(int flags
)
6952 int a
, b
, c
, d
, e
, t
;
6956 if (flags
& STMT_EXPR
) {
6957 /* default return value is (void) */
6959 vtop
->type
.t
= VT_VOID
;
6964 /* If the token carries a value, next() might destroy it. Only with
6965 invalid code such as f(){"123"4;} */
6966 if (TOK_HAS_VALUE(t
))
6971 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6980 if (tok
== TOK_ELSE
) {
6985 gsym(d
); /* patch else jmp */
6991 } else if (t
== TOK_WHILE
) {
7005 } else if (t
== '{') {
7007 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
7010 /* handle local labels declarations */
7011 while (tok
== TOK_LABEL
) {
7014 if (tok
< TOK_UIDENT
)
7015 expect("label identifier");
7016 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7018 } while (tok
== ',');
7022 while (tok
!= '}') {
7025 if (flags
& STMT_EXPR
)
7027 block(flags
| STMT_COMPOUND
);
7031 prev_scope(&o
, flags
& STMT_EXPR
);
7033 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
7036 else if (!nocode_wanted
)
7037 check_func_return();
7039 } else if (t
== TOK_RETURN
) {
7040 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7044 gen_assign_cast(&func_vt
);
7046 if (vtop
->type
.t
!= VT_VOID
)
7047 tcc_warning("void function returns a value");
7051 tcc_warning("'return' with no value");
7054 leave_scope(root_scope
);
7056 gfunc_return(&func_vt
);
7058 /* jump unless last stmt in top-level block */
7059 if (tok
!= '}' || local_scope
!= 1)
7062 tcc_tcov_block_end (tcc_state
, -1);
7065 } else if (t
== TOK_BREAK
) {
7067 if (!cur_scope
->bsym
)
7068 tcc_error("cannot break");
7069 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7070 leave_scope(cur_switch
->scope
);
7072 leave_scope(loop_scope
);
7073 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7076 } else if (t
== TOK_CONTINUE
) {
7078 if (!cur_scope
->csym
)
7079 tcc_error("cannot continue");
7080 leave_scope(loop_scope
);
7081 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7084 } else if (t
== TOK_FOR
) {
7089 /* c99 for-loop init decl? */
7090 if (!decl(VT_JMP
)) {
7091 /* no, regular for-loop init expr */
7119 } else if (t
== TOK_DO
) {
7135 } else if (t
== TOK_SWITCH
) {
7136 struct switch_t
*sw
;
7138 sw
= tcc_mallocz(sizeof *sw
);
7140 sw
->scope
= cur_scope
;
7141 sw
->prev
= cur_switch
;
7142 sw
->nocode_wanted
= nocode_wanted
;
7149 sw
->sv
= *vtop
--; /* save switch value */
7151 b
= gjmp(0); /* jump to first case */
7153 a
= gjmp(a
); /* add implicit break */
7158 if (sw
->nocode_wanted
)
7160 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7161 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7163 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7164 for (b
= 1; b
< sw
->n
; b
++)
7165 if (sw
->sv
.type
.t
& VT_UNSIGNED
7166 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7167 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7168 tcc_error("duplicate case value");
7171 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7174 gsym_addr(d
, sw
->def_sym
);
7182 } else if (t
== TOK_CASE
) {
7186 cr
= tcc_malloc(sizeof(struct case_t
));
7187 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7188 cr
->v1
= cr
->v2
= expr_const64();
7189 if (gnu_ext
&& tok
== TOK_DOTS
) {
7191 cr
->v2
= expr_const64();
7192 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7193 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7194 tcc_warning("empty case range");
7196 /* case and default are unreachable from a switch under nocode_wanted */
7197 if (!cur_switch
->nocode_wanted
)
7200 goto block_after_label
;
7202 } else if (t
== TOK_DEFAULT
) {
7205 if (cur_switch
->def_sym
)
7206 tcc_error("too many 'default'");
7207 cur_switch
->def_sym
= cur_switch
->nocode_wanted
? 1 : gind();
7209 goto block_after_label
;
7211 } else if (t
== TOK_GOTO
) {
7212 vla_restore(cur_scope
->vla
.locorig
);
7213 if (tok
== '*' && gnu_ext
) {
7217 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7221 } else if (tok
>= TOK_UIDENT
) {
7222 s
= label_find(tok
);
7223 /* put forward definition if needed */
7225 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7226 else if (s
->r
== LABEL_DECLARED
)
7227 s
->r
= LABEL_FORWARD
;
7229 if (s
->r
& LABEL_FORWARD
) {
7230 /* start new goto chain for cleanups, linked via label->next */
7231 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7232 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7233 pending_gotos
->prev_tok
= s
;
7234 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7235 pending_gotos
->next
= s
;
7237 s
->jnext
= gjmp(s
->jnext
);
7239 try_call_cleanup_goto(s
->cleanupstate
);
7240 gjmp_addr(s
->jnext
);
7245 expect("label identifier");
7249 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7253 if (tok
== ':' && t
>= TOK_UIDENT
) {
7258 if (s
->r
== LABEL_DEFINED
)
7259 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7260 s
->r
= LABEL_DEFINED
;
7262 Sym
*pcl
; /* pending cleanup goto */
7263 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7265 sym_pop(&s
->next
, NULL
, 0);
7269 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7272 s
->cleanupstate
= cur_scope
->cl
.s
;
7276 /* Accept attributes after labels (e.g. 'unused') */
7277 AttributeDef ad_tmp
;
7278 parse_attribute(&ad_tmp
);
7281 tcc_tcov_reset_ind(tcc_state
);
7282 vla_restore(cur_scope
->vla
.loc
);
7285 if (0 == (flags
& STMT_COMPOUND
))
7287 /* C23: insert implicit null-statement whithin compound statement */
7289 /* we accept this, but it is a mistake */
7290 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7293 /* expression case */
7297 if (flags
& STMT_EXPR
) {
7310 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7313 /* This skips over a stream of tokens containing balanced {} and ()
7314 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7315 with a '{'). If STR then allocates and stores the skipped tokens
7316 in *STR. This doesn't check if () and {} are nested correctly,
7317 i.e. "({)}" is accepted. */
7318 static void skip_or_save_block(TokenString
**str
)
7320 int braces
= tok
== '{';
7323 *str
= tok_str_alloc();
7335 if (str
|| level
> 0)
7336 tcc_error("unexpected end of file");
7341 tok_str_add_tok(*str
);
7343 if (t
== '{' || t
== '(' || t
== '[') {
7345 } else if (t
== '}' || t
== ')' || t
== ']') {
7347 if (level
== 0 && braces
&& t
== '}')
7352 tok_str_add(*str
, TOK_EOF
);
7355 #define EXPR_CONST 1
7358 static void parse_init_elem(int expr_type
)
7360 int saved_global_expr
;
7363 /* compound literals must be allocated globally in this case */
7364 saved_global_expr
= global_expr
;
7367 global_expr
= saved_global_expr
;
7368 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7369 (compound literals). */
7370 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7371 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7372 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7373 #ifdef TCC_TARGET_PE
7374 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7377 tcc_error("initializer element is not constant");
7386 static void init_assert(init_params
*p
, int offset
)
7388 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7389 : !nocode_wanted
&& offset
> p
->local_offset
)
7390 tcc_internal_error("initializer overflow");
7393 #define init_assert(sec, offset)
7396 /* put zeros for variable based init */
7397 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7399 init_assert(p
, c
+ size
);
7401 /* nothing to do because globals are already set to zero */
7403 vpush_helper_func(TOK_memset
);
7407 #if defined TCC_TARGET_ARM && defined TCC_ARM_EABI
7408 vswap(); /* using __aeabi_memset(void*, size_t, int) */
7415 #define DIF_SIZE_ONLY 2
7416 #define DIF_HAVE_ELEM 4
7419 /* delete relocations for specified range c ... c + size. Unfortunatly
7420 in very special cases, relocations may occur unordered */
7421 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7423 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7424 if (!sec
|| !sec
->reloc
)
7426 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7427 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7428 while (rel
< rel_end
) {
7429 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7430 sec
->reloc
->data_offset
-= sizeof *rel
;
7433 memcpy(rel2
, rel
, sizeof *rel
);
7440 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7442 if (ref
== p
->flex_array_ref
) {
7443 if (index
>= ref
->c
)
7445 } else if (ref
->c
< 0)
7446 tcc_error("flexible array has zero size in this context");
7449 /* t is the array or struct type. c is the array or struct
7450 address. cur_field is the pointer to the current
7451 field, for arrays the 'c' member contains the current start
7452 index. 'flags' is as in decl_initializer.
7453 'al' contains the already initialized length of the
7454 current container (starting at c). This returns the new length of that. */
7455 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7456 Sym
**cur_field
, int flags
, int al
)
7459 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7460 unsigned long corig
= c
;
7465 if (flags
& DIF_HAVE_ELEM
)
7468 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7475 /* NOTE: we only support ranges for last designator */
7476 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7478 if (!(type
->t
& VT_ARRAY
))
7479 expect("array type");
7481 index
= index_last
= expr_const();
7482 if (tok
== TOK_DOTS
&& gnu_ext
) {
7484 index_last
= expr_const();
7488 decl_design_flex(p
, s
, index_last
);
7489 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7490 tcc_error("index exceeds array bounds or range is empty");
7492 (*cur_field
)->c
= index_last
;
7493 type
= pointed_type(type
);
7494 elem_size
= type_size(type
, &align
);
7495 c
+= index
* elem_size
;
7496 nb_elems
= index_last
- index
+ 1;
7503 f
= find_field(type
, l
, &cumofs
);
7514 } else if (!gnu_ext
) {
7519 if (type
->t
& VT_ARRAY
) {
7520 index
= (*cur_field
)->c
;
7522 decl_design_flex(p
, s
, index
);
7524 tcc_error("too many initializers");
7525 type
= pointed_type(type
);
7526 elem_size
= type_size(type
, &align
);
7527 c
+= index
* elem_size
;
7530 /* Skip bitfield padding. Also with size 32 and 64. */
7531 while (f
&& (f
->v
& SYM_FIRST_ANOM
) &&
7532 is_integer_btype(f
->type
.t
& VT_BTYPE
))
7533 *cur_field
= f
= f
->next
;
7535 tcc_error("too many initializers");
7541 if (!elem_size
) /* for structs */
7542 elem_size
= type_size(type
, &align
);
7544 /* Using designators the same element can be initialized more
7545 than once. In that case we need to delete possibly already
7546 existing relocations. */
7547 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7548 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7549 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7552 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7554 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7558 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7559 /* make init_putv/vstore believe it were a struct */
7561 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7565 vpush_ref(type
, p
->sec
, c
, elem_size
);
7567 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7568 for (i
= 1; i
< nb_elems
; i
++) {
7570 init_putv(p
, type
, c
+ elem_size
* i
);
7575 c
+= nb_elems
* elem_size
;
7581 /* store a value or an expression directly in global data or in local array */
7582 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7588 Section
*sec
= p
->sec
;
7592 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7594 size
= type_size(type
, &align
);
7595 if (type
->t
& VT_BITFIELD
)
7596 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7597 init_assert(p
, c
+ size
);
7600 /* XXX: not portable */
7601 /* XXX: generate error if incorrect relocation */
7602 gen_assign_cast(&dtype
);
7603 bt
= type
->t
& VT_BTYPE
;
7605 if ((vtop
->r
& VT_SYM
)
7607 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7608 || (type
->t
& VT_BITFIELD
))
7609 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7611 tcc_error("initializer element is not computable at load time");
7613 if (NODATA_WANTED
) {
7618 ptr
= sec
->data
+ c
;
7621 /* XXX: make code faster ? */
7622 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7623 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7624 /* XXX This rejects compound literals like
7625 '(void *){ptr}'. The problem is that '&sym' is
7626 represented the same way, which would be ruled out
7627 by the SYM_FIRST_ANOM check above, but also '"string"'
7628 in 'char *p = "string"' is represented the same
7629 with the type being VT_PTR and the symbol being an
7630 anonymous one. That is, there's no difference in vtop
7631 between '(void *){x}' and '&(void *){x}'. Ignore
7632 pointer typed entities here. Hopefully no real code
7633 will ever use compound literals with scalar type. */
7634 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7635 /* These come from compound literals, memcpy stuff over. */
7639 esym
= elfsym(vtop
->sym
);
7640 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7641 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7643 /* We need to copy over all memory contents, and that
7644 includes relocations. Use the fact that relocs are
7645 created it order, so look from the end of relocs
7646 until we hit one before the copied region. */
7647 unsigned long relofs
= ssec
->reloc
->data_offset
;
7648 while (relofs
>= sizeof(*rel
)) {
7649 relofs
-= sizeof(*rel
);
7650 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7651 if (rel
->r_offset
>= esym
->st_value
+ size
)
7653 if (rel
->r_offset
< esym
->st_value
)
7655 put_elf_reloca(symtab_section
, sec
,
7656 c
+ rel
->r_offset
- esym
->st_value
,
7657 ELFW(R_TYPE
)(rel
->r_info
),
7658 ELFW(R_SYM
)(rel
->r_info
),
7668 if (type
->t
& VT_BITFIELD
) {
7669 int bit_pos
, bit_size
, bits
, n
;
7670 unsigned char *p
, v
, m
;
7671 bit_pos
= BIT_POS(vtop
->type
.t
);
7672 bit_size
= BIT_SIZE(vtop
->type
.t
);
7673 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7674 bit_pos
&= 7, bits
= 0;
7679 v
= val
>> bits
<< bit_pos
;
7680 m
= ((1 << n
) - 1) << bit_pos
;
7681 *p
= (*p
& ~m
) | (v
& m
);
7682 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7687 *(char *)ptr
= val
!= 0;
7693 write16le(ptr
, val
);
7696 write32le(ptr
, val
);
7699 write64le(ptr
, val
);
7702 #if defined TCC_IS_NATIVE_387
7703 /* Host and target platform may be different but both have x87.
7704 On windows, tcc does not use VT_LDOUBLE, except when it is a
7705 cross compiler. In this case a mingw gcc as host compiler
7706 comes here with 10-byte long doubles, while msvc or tcc won't.
7707 tcc itself can still translate by asm.
7708 In any case we avoid possibly random bytes 11 and 12.
7710 if (sizeof (long double) >= 10)
7711 memcpy(ptr
, &vtop
->c
.ld
, 10);
7713 else if (sizeof (long double) == sizeof (double))
7714 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7718 /* For other platforms it should work natively, but may not work
7719 for cross compilers */
7720 if (sizeof(long double) == LDOUBLE_SIZE
)
7721 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7722 else if (sizeof(double) == LDOUBLE_SIZE
)
7723 *(double*)ptr
= (double)vtop
->c
.ld
;
7724 else if (0 == memcmp(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
))
7725 ; /* nothing to do for 0.0 */
7726 #ifndef TCC_CROSS_TEST
7728 tcc_error("can't cross compile long double constants");
7733 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7736 if (vtop
->r
& VT_SYM
)
7737 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7739 write64le(ptr
, val
);
7742 write32le(ptr
, val
);
7746 write64le(ptr
, val
);
7750 if (vtop
->r
& VT_SYM
)
7751 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7752 write32le(ptr
, val
);
7756 //tcc_internal_error("unexpected type");
7762 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7769 /* 't' contains the type and storage info. 'c' is the offset of the
7770 object in section 'sec'. If 'sec' is NULL, it means stack based
7771 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7772 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7773 size only evaluation is wanted (only for arrays). */
7774 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7776 int len
, n
, no_oblock
, i
;
7782 /* generate line number info */
7783 if (debug_modes
&& !(flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7784 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7786 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7787 /* In case of strings we have special handling for arrays, so
7788 don't consume them as initializer value (which would commit them
7789 to some anonymous symbol). */
7790 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7791 (!(flags
& DIF_SIZE_ONLY
)
7792 /* a struct may be initialized from a struct of same type, as in
7793 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7794 In that case we need to parse the element in order to check
7795 it for compatibility below */
7796 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7798 int ncw_prev
= nocode_wanted
;
7799 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7801 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7802 nocode_wanted
= ncw_prev
;
7803 flags
|= DIF_HAVE_ELEM
;
7806 if (type
->t
& VT_ARRAY
) {
7808 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7816 t1
= pointed_type(type
);
7817 size1
= type_size(t1
, &align1
);
7819 /* only parse strings here if correct type (otherwise: handle
7820 them as ((w)char *) expressions */
7821 if ((tok
== TOK_LSTR
&&
7822 #ifdef TCC_TARGET_PE
7823 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7825 (t1
->t
& VT_BTYPE
) == VT_INT
7827 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7829 cstr_reset(&initstr
);
7830 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7831 tcc_error("unhandled string literal merging");
7832 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7834 initstr
.size
-= size1
;
7836 len
+= tokc
.str
.size
;
7838 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7840 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7843 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7844 && tok
!= TOK_EOF
) {
7845 /* Not a lone literal but part of a bigger expression. */
7846 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7847 tokc
.str
.size
= initstr
.size
;
7848 tokc
.str
.data
= initstr
.data
;
7852 decl_design_flex(p
, s
, len
);
7853 if (!(flags
& DIF_SIZE_ONLY
)) {
7858 tcc_warning("initializer-string for array is too long");
7859 /* in order to go faster for common case (char
7860 string in global variable, we handle it
7862 if (p
->sec
&& size1
== 1) {
7863 init_assert(p
, c
+ nb
);
7865 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7869 /* only add trailing zero if enough storage (no
7870 warning in this case since it is standard) */
7871 if (flags
& DIF_CLEAR
)
7874 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7878 } else if (size1
== 1)
7879 ch
= ((unsigned char *)initstr
.data
)[i
];
7881 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7883 init_putv(p
, t1
, c
+ i
* size1
);
7894 /* zero memory once in advance */
7895 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7896 init_putz(p
, c
, n
*size1
);
7901 /* GNU extension: if the initializer is empty for a flex array,
7902 it's size is zero. We won't enter the loop, so set the size
7904 decl_design_flex(p
, s
, len
);
7905 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7906 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7907 flags
&= ~DIF_HAVE_ELEM
;
7908 if (type
->t
& VT_ARRAY
) {
7910 /* special test for multi dimensional arrays (may not
7911 be strictly correct if designators are used at the
7913 if (no_oblock
&& len
>= n
*size1
)
7916 if (s
->type
.t
== VT_UNION
)
7920 if (no_oblock
&& f
== NULL
)
7932 } else if ((flags
& DIF_HAVE_ELEM
)
7933 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7934 The source type might have VT_CONSTANT set, which is
7935 of course assignable to non-const elements. */
7936 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7939 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7941 if ((flags
& DIF_FIRST
) || tok
== '{') {
7951 } else if (tok
== '{') {
7952 if (flags
& DIF_HAVE_ELEM
)
7955 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7958 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7959 /* If we supported only ISO C we wouldn't have to accept calling
7960 this on anything than an array if DIF_SIZE_ONLY (and even then
7961 only on the outermost level, so no recursion would be needed),
7962 because initializing a flex array member isn't supported.
7963 But GNU C supports it, so we need to recurse even into
7964 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7965 /* just skip expression */
7966 if (flags
& DIF_HAVE_ELEM
)
7969 skip_or_save_block(NULL
);
7972 if (!(flags
& DIF_HAVE_ELEM
)) {
7973 /* This should happen only when we haven't parsed
7974 the init element above for fear of committing a
7975 string constant to memory too early. */
7976 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7977 expect("string constant");
7978 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7980 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7981 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7983 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7987 init_putv(p
, type
, c
);
7991 /* parse an initializer for type 't' if 'has_init' is non zero, and
7992 allocate space in local or global data space ('r' is either
7993 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7994 variable 'v' of scope 'scope' is declared before initializers
7995 are parsed. If 'v' is zero, then a reference to the new object
7996 is put in the value stack. If 'has_init' is 2, a special parsing
7997 is done to handle string constants. */
7998 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7999 int has_init
, int v
, int global
)
8001 int size
, align
, addr
;
8002 TokenString
*init_str
= NULL
;
8005 Sym
*flexible_array
;
8007 int saved_nocode_wanted
= nocode_wanted
;
8008 #ifdef CONFIG_TCC_BCHECK
8009 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
8011 init_params p
= {0};
8013 /* Always allocate static or global variables */
8014 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
8015 nocode_wanted
|= DATA_ONLY_WANTED
;
8017 flexible_array
= NULL
;
8018 size
= type_size(type
, &align
);
8020 /* exactly one flexible array may be initialized, either the
8021 toplevel array or the last member of the toplevel struct */
8024 // error out except for top-level incomplete arrays
8025 // (arrays of incomplete types are handled in array parsing)
8026 if (!(type
->t
& VT_ARRAY
))
8027 tcc_error("initialization of incomplete type");
8029 /* If the base type itself was an array type of unspecified size
8030 (like in 'typedef int arr[]; arr x = {1};') then we will
8031 overwrite the unknown size by the real one for this decl.
8032 We need to unshare the ref symbol holding that size. */
8033 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
8034 p
.flex_array_ref
= type
->ref
;
8036 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8037 Sym
*field
= type
->ref
->next
;
8040 field
= field
->next
;
8041 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
8042 flexible_array
= field
;
8043 p
.flex_array_ref
= field
->type
.ref
;
8050 /* If unknown size, do a dry-run 1st pass */
8052 tcc_error("unknown type size");
8053 if (has_init
== 2) {
8054 /* only get strings */
8055 init_str
= tok_str_alloc();
8056 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8057 tok_str_add_tok(init_str
);
8060 tok_str_add(init_str
, TOK_EOF
);
8062 skip_or_save_block(&init_str
);
8066 begin_macro(init_str
, 1);
8068 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8069 /* prepare second initializer parsing */
8070 macro_ptr
= init_str
->str
;
8073 /* if still unknown size, error */
8074 size
= type_size(type
, &align
);
8076 tcc_error("unknown type size");
8078 /* If there's a flex member and it was used in the initializer
8080 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8081 size
+= flexible_array
->type
.ref
->c
8082 * pointed_size(&flexible_array
->type
);
8085 /* take into account specified alignment if bigger */
8086 if (ad
->a
.aligned
) {
8087 int speca
= 1 << (ad
->a
.aligned
- 1);
8090 } else if (ad
->a
.packed
) {
8094 if (!v
&& NODATA_WANTED
)
8095 size
= 0, align
= 1;
8097 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8099 #ifdef CONFIG_TCC_BCHECK
8101 /* add padding between stack variables for bound checking */
8105 loc
= (loc
- size
) & -align
;
8107 p
.local_offset
= addr
+ size
;
8108 #ifdef CONFIG_TCC_BCHECK
8110 /* add padding between stack variables for bound checking */
8115 /* local variable */
8116 #ifdef CONFIG_TCC_ASM
8117 if (ad
->asm_label
) {
8118 int reg
= asm_parse_regvar(ad
->asm_label
);
8120 r
= (r
& ~VT_VALMASK
) | reg
;
8123 sym
= sym_push(v
, type
, r
, addr
);
8124 if (ad
->cleanup_func
) {
8125 Sym
*cls
= sym_push2(&all_cleanups
,
8126 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8127 cls
->prev_tok
= sym
;
8128 cls
->next
= ad
->cleanup_func
;
8129 cls
->ncl
= cur_scope
->cl
.s
;
8130 cur_scope
->cl
.s
= cls
;
8135 /* push local reference */
8136 vset(type
, r
, addr
);
8141 /* see if the symbol was already defined */
8144 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
8145 && sym
->type
.ref
->c
> type
->ref
->c
) {
8146 /* flex array was already declared with explicit size
8148 int arr[] = { 1,2,3 }; */
8149 type
->ref
->c
= sym
->type
.ref
->c
;
8150 size
= type_size(type
, &align
);
8152 patch_storage(sym
, ad
, type
);
8153 /* we accept several definitions of the same global variable. */
8154 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8159 /* allocate symbol in corresponding section */
8163 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8164 tp
= &tp
->ref
->type
;
8165 if (tp
->t
& VT_CONSTANT
) {
8166 sec
= rodata_section
;
8167 } else if (has_init
) {
8169 /*if (tcc_state->g_debug & 4)
8170 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8171 } else if (tcc_state
->nocommon
)
8176 addr
= section_add(sec
, size
, align
);
8177 #ifdef CONFIG_TCC_BCHECK
8178 /* add padding if bound check */
8180 section_add(sec
, 1, 1);
8183 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8184 sec
= common_section
;
8189 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8190 patch_storage(sym
, ad
, NULL
);
8192 /* update symbol definition */
8193 put_extern_sym(sym
, sec
, addr
, size
);
8195 /* push global reference */
8196 vpush_ref(type
, sec
, addr
, size
);
8201 #ifdef CONFIG_TCC_BCHECK
8202 /* handles bounds now because the symbol must be defined
8203 before for the relocation */
8207 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8208 /* then add global bound info */
8209 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8210 bounds_ptr
[0] = 0; /* relocated */
8211 bounds_ptr
[1] = size
;
8216 if (type
->t
& VT_VLA
) {
8222 /* save before-VLA stack pointer if needed */
8223 if (cur_scope
->vla
.num
== 0) {
8224 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8225 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8227 gen_vla_sp_save(loc
-= PTR_SIZE
);
8228 cur_scope
->vla
.locorig
= loc
;
8232 vpush_type_size(type
, &a
);
8233 gen_vla_alloc(type
, a
);
8234 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8235 /* on _WIN64, because of the function args scratch area, the
8236 result of alloca differs from RSP and is returned in RAX. */
8237 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8239 gen_vla_sp_save(addr
);
8240 cur_scope
->vla
.loc
= addr
;
8241 cur_scope
->vla
.num
++;
8242 } else if (has_init
) {
8244 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8245 /* patch flexible array member size back to -1, */
8246 /* for possible subsequent similar declarations */
8248 flexible_array
->type
.ref
->c
= -1;
8252 /* restore parse state if needed */
8258 nocode_wanted
= saved_nocode_wanted
;
8261 /* generate vla code saved in post_type() */
8262 static void func_vla_arg_code(Sym
*arg
)
8265 TokenString
*vla_array_tok
= NULL
;
8268 func_vla_arg_code(arg
->type
.ref
);
8270 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8271 loc
-= type_size(&int_type
, &align
);
8273 arg
->type
.ref
->c
= loc
;
8276 vla_array_tok
= tok_str_alloc();
8277 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8278 begin_macro(vla_array_tok
, 1);
8283 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8285 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8292 static void func_vla_arg(Sym
*sym
)
8296 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8297 if ((arg
->type
.t
& VT_BTYPE
) == VT_PTR
&& (arg
->type
.ref
->type
.t
& VT_VLA
))
8298 func_vla_arg_code(arg
->type
.ref
);
8301 /* parse a function defined by symbol 'sym' and generate its code in
8302 'cur_text_section' */
8303 static void gen_function(Sym
*sym
)
8305 struct scope f
= { 0 };
8306 cur_scope
= root_scope
= &f
;
8309 cur_text_section
->sh_flags
|= SHF_EXECINSTR
;
8310 ind
= cur_text_section
->data_offset
;
8311 if (sym
->a
.aligned
) {
8312 size_t newoff
= section_add(cur_text_section
, 0,
8313 1 << (sym
->a
.aligned
- 1));
8314 gen_fill_nops(newoff
- ind
);
8317 funcname
= get_tok_str(sym
->v
, NULL
);
8319 func_vt
= sym
->type
.ref
->type
;
8320 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8322 /* NOTE: we patch the symbol size later */
8323 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8325 if (sym
->type
.ref
->f
.func_ctor
)
8326 add_array (tcc_state
, ".init_array", sym
->c
);
8327 if (sym
->type
.ref
->f
.func_dtor
)
8328 add_array (tcc_state
, ".fini_array", sym
->c
);
8330 /* put debug symbol */
8331 tcc_debug_funcstart(tcc_state
, sym
);
8333 /* push a dummy symbol to enable local sym storage */
8334 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8335 local_scope
= 1; /* for function parameters */
8337 tcc_debug_prolog_epilog(tcc_state
, 0);
8341 clear_temp_local_var_list();
8347 /* reset local stack */
8348 pop_local_syms(NULL
, 0);
8349 tcc_debug_prolog_epilog(tcc_state
, 1);
8352 /* end of function */
8353 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8355 /* patch symbol size */
8356 elfsym(sym
)->st_size
= ind
- func_ind
;
8358 cur_text_section
->data_offset
= ind
;
8360 label_pop(&global_label_stack
, NULL
, 0);
8361 sym_pop(&all_cleanups
, NULL
, 0);
8363 /* It's better to crash than to generate wrong code */
8364 cur_text_section
= NULL
;
8365 funcname
= ""; /* for safety */
8366 func_vt
.t
= VT_VOID
; /* for safety */
8367 func_var
= 0; /* for safety */
8368 ind
= 0; /* for safety */
8370 nocode_wanted
= DATA_ONLY_WANTED
;
8373 /* do this after funcend debug info */
8377 static void gen_inline_functions(TCCState
*s
)
8380 int inline_generated
, i
;
8381 struct InlineFunc
*fn
;
8383 tcc_open_bf(s
, ":inline:", 0);
8384 /* iterate while inline function are referenced */
8386 inline_generated
= 0;
8387 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8388 fn
= s
->inline_fns
[i
];
8390 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8391 /* the function was used or forced (and then not internal):
8392 generate its code and convert it to a normal function */
8394 tccpp_putfile(fn
->filename
);
8395 begin_macro(fn
->func_str
, 1);
8397 cur_text_section
= text_section
;
8401 inline_generated
= 1;
8404 } while (inline_generated
);
8408 static void free_inline_functions(TCCState
*s
)
8411 /* free tokens of unused inline functions */
8412 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8413 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8415 tok_str_free(fn
->func_str
);
8417 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8420 static void do_Static_assert(void)
8428 msg
= "_Static_assert fail";
8431 msg
= parse_mult_str("string constant")->data
;
8435 tcc_error("%s", msg
);
8439 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8440 or VT_CMP if parsing old style parameter list
8441 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8442 static int decl(int l
)
8444 int v
, has_init
, r
, oldint
;
8447 AttributeDef ad
, adbase
;
8452 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8455 /* skip redundant ';' if not in old parameter decl scope */
8456 if (tok
== ';' && l
!= VT_CMP
) {
8460 if (tok
== TOK_STATIC_ASSERT
) {
8466 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8467 /* global asm block */
8471 if (tok
>= TOK_UIDENT
) {
8472 /* special test for old K&R protos without explicit int
8473 type. Only accepted when defining global data */
8478 expect("declaration");
8484 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8486 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8487 tcc_warning("unnamed struct/union that defines no instances");
8491 if (IS_ENUM(btype
.t
)) {
8497 while (1) { /* iterate thru each declaration */
8500 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8504 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8505 printf("type = '%s'\n", buf
);
8508 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8509 if ((type
.t
& VT_STATIC
) && (l
!= VT_CONST
))
8510 tcc_error("function without file scope cannot be static");
8511 /* if old style function prototype, we accept a
8514 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
) {
8518 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8519 if (sym
->f
.func_alwinl
8520 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8521 == (VT_EXTERN
| VT_INLINE
))) {
8522 /* always_inline functions must be handled as if they
8523 don't generate multiple global defs, even if extern
8524 inline, i.e. GNU inline semantics for those. Rewrite
8525 them into static inline. */
8526 type
.t
&= ~VT_EXTERN
;
8527 type
.t
|= VT_STATIC
;
8530 /* always compile 'extern inline' */
8531 if (type
.t
& VT_EXTERN
)
8532 type
.t
&= ~VT_INLINE
;
8534 } else if (oldint
) {
8535 tcc_warning("type defaults to int");
8538 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8539 ad
.asm_label
= asm_label_instr();
8540 /* parse one last attribute list, after asm label */
8541 parse_attribute(&ad
);
8543 /* gcc does not allow __asm__("label") with function definition,
8550 #ifdef TCC_TARGET_PE
8551 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8552 if (type
.t
& VT_STATIC
)
8553 tcc_error("cannot have dll linkage with static");
8554 if (type
.t
& VT_TYPEDEF
) {
8555 tcc_warning("'%s' attribute ignored for typedef",
8556 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8557 (ad
.a
.dllexport
= 0, "dllexport"));
8558 } else if (ad
.a
.dllimport
) {
8559 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8562 type
.t
|= VT_EXTERN
;
8568 tcc_error("cannot use local functions");
8569 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8570 expect("function definition");
8572 /* reject abstract declarators in function definition
8573 make old style params without decl have int type */
8575 while ((sym
= sym
->next
) != NULL
) {
8576 if (!(sym
->v
& ~SYM_FIELD
))
8577 expect("identifier");
8578 if (sym
->type
.t
== VT_VOID
)
8579 sym
->type
= int_type
;
8582 /* apply post-declaraton attributes */
8583 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8585 /* put function symbol */
8586 type
.t
&= ~VT_EXTERN
;
8587 sym
= external_sym(v
, &type
, 0, &ad
);
8589 /* static inline functions are just recorded as a kind
8590 of macro. Their code will be emitted at the end of
8591 the compilation unit only if they are used */
8592 if (sym
->type
.t
& VT_INLINE
) {
8593 struct InlineFunc
*fn
;
8594 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8595 strcpy(fn
->filename
, file
->filename
);
8597 dynarray_add(&tcc_state
->inline_fns
,
8598 &tcc_state
->nb_inline_fns
, fn
);
8599 skip_or_save_block(&fn
->func_str
);
8601 /* compute text section */
8602 cur_text_section
= ad
.section
;
8603 if (!cur_text_section
)
8604 cur_text_section
= text_section
;
8610 /* find parameter in function parameter list */
8611 for (sym
= func_vt
.ref
->next
; sym
; sym
= sym
->next
)
8612 if ((sym
->v
& ~SYM_FIELD
) == v
)
8614 tcc_error("declaration for parameter '%s' but no such parameter",
8615 get_tok_str(v
, NULL
));
8617 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8618 tcc_error("storage class specified for '%s'",
8619 get_tok_str(v
, NULL
));
8620 if (sym
->type
.t
!= VT_VOID
)
8621 tcc_error("redefinition of parameter '%s'",
8622 get_tok_str(v
, NULL
));
8623 convert_parameter_type(&type
);
8625 } else if (type
.t
& VT_TYPEDEF
) {
8626 /* save typedefed type */
8627 /* XXX: test storage specifiers ? */
8629 if (sym
&& sym
->sym_scope
== local_scope
) {
8630 if (!is_compatible_types(&sym
->type
, &type
)
8631 || !(sym
->type
.t
& VT_TYPEDEF
))
8632 tcc_error("incompatible redefinition of '%s'",
8633 get_tok_str(v
, NULL
));
8636 sym
= sym_push(v
, &type
, 0, 0);
8639 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8640 merge_funcattr(&sym
->type
.ref
->f
, &ad
.f
);
8642 tcc_debug_typedef (tcc_state
, sym
);
8643 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8644 && !(type
.t
& VT_EXTERN
)) {
8645 tcc_error("declaration of void object");
8648 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8649 /* external function definition */
8650 /* specific case for func_call attribute */
8651 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8652 } else if (!(type
.t
& VT_ARRAY
)) {
8653 /* not lvalue if array */
8656 has_init
= (tok
== '=');
8657 if (has_init
&& (type
.t
& VT_VLA
))
8658 tcc_error("variable length array cannot be initialized");
8660 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8661 || (type
.t
& VT_BTYPE
) == VT_FUNC
8662 /* as with GCC, uninitialized global arrays with no size
8663 are considered extern: */
8664 || ((type
.t
& VT_ARRAY
) && !has_init
8665 && l
== VT_CONST
&& type
.ref
->c
< 0)
8667 /* external variable or function */
8668 type
.t
|= VT_EXTERN
;
8669 sym
= external_sym(v
, &type
, r
, &ad
);
8671 if (l
== VT_CONST
|| (type
.t
& VT_STATIC
))
8677 else if (l
== VT_CONST
)
8678 /* uninitialized global variables may be overridden */
8679 type
.t
|= VT_EXTERN
;
8680 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
== VT_CONST
);
8683 if (ad
.alias_target
&& l
== VT_CONST
) {
8684 /* Aliases need to be emitted when their target symbol
8685 is emitted, even if perhaps unreferenced.
8686 We only support the case where the base is already
8687 defined, otherwise we would need deferring to emit
8688 the aliases until the end of the compile unit. */
8689 Sym
*alias_target
= sym_find(ad
.alias_target
);
8690 ElfSym
*esym
= elfsym(alias_target
);
8692 tcc_error("unsupported forward __alias__ attribute");
8693 put_extern_sym2(sym_find(v
), esym
->st_shndx
,
8694 esym
->st_value
, esym
->st_size
, 1);
8710 /* ------------------------------------------------------------------------- */
8713 /* ------------------------------------------------------------------------- */