2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind
; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
70 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
77 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
79 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
81 static int last_line_num
, new_file
, func_ind
; /* debug info control */
82 ST_DATA
const char *funcname
;
83 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
84 static CString initstr
;
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
97 ST_DATA
struct switch_t
{
101 } **p
; int n
; /* list of case ranges */
102 int def_sym
; /* default symbol */
105 struct switch_t
*prev
;
107 } *cur_switch
; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 ST_DATA
struct temp_local_variable
{
112 int location
; //offset on stack. Svalue.c.i
115 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
116 short nb_temp_local_vars
;
118 static struct scope
{
120 struct { int loc
, num
; } vla
;
121 struct { Sym
*s
; int n
; } cl
;
124 } *cur_scope
, *loop_scope
, *root_scope
;
132 /********************************************************/
133 /* stab debug support */
135 static const struct {
138 } default_debug
[] = {
139 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
140 { VT_BYTE
, "char:t2=r2;0;127;" },
142 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
144 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
146 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
148 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
150 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
151 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
153 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
154 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
155 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
156 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
157 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
158 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
159 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
160 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
161 { VT_FLOAT
, "float:t14=r1;4;0;" },
162 { VT_DOUBLE
, "double:t15=r1;8;0;" },
163 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
164 { -1, "_Float32:t17=r1;4;0;" },
165 { -1, "_Float64:t18=r1;8;0;" },
166 { -1, "_Float128:t19=r1;16;0;" },
167 { -1, "_Float32x:t20=r1;8;0;" },
168 { -1, "_Float64x:t21=r1;16;0;" },
169 { -1, "_Decimal32:t22=r1;4;0;" },
170 { -1, "_Decimal64:t23=r1;8;0;" },
171 { -1, "_Decimal128:t24=r1;16;0;" },
172 /* if default char is unsigned */
173 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
175 { VT_BOOL
, "bool:t26=r26;0;255;" },
176 { VT_VOID
, "void:t27=27" },
179 static int debug_next_type
;
181 static struct debug_hash
{
186 static int n_debug_hash
;
188 static struct debug_info
{
199 struct debug_info
*child
, *next
, *last
, *parent
;
200 } *debug_info
, *debug_info_root
;
202 /********************************************************/
204 #define precedence_parser
205 static void init_prec(void);
207 /********************************************************/
208 #ifndef CONFIG_TCC_ASM
209 ST_FUNC
void asm_instr(void)
211 tcc_error("inline asm() not supported");
213 ST_FUNC
void asm_global_instr(void)
215 tcc_error("inline asm() not supported");
219 /* ------------------------------------------------------------------------- */
220 static void gen_cast(CType
*type
);
221 static void gen_cast_s(int t
);
222 static inline CType
*pointed_type(CType
*type
);
223 static int is_compatible_types(CType
*type1
, CType
*type2
);
224 static int parse_btype(CType
*type
, AttributeDef
*ad
);
225 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
226 static void parse_expr_type(CType
*type
);
227 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
228 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
229 static void block(int is_expr
);
230 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
231 static void decl(int l
);
232 static int decl0(int l
, int is_for_loop_init
, Sym
*);
233 static void expr_eq(void);
234 static void vla_runtime_type_size(CType
*type
, int *a
);
235 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
236 static inline int64_t expr_const64(void);
237 static void vpush64(int ty
, unsigned long long v
);
238 static void vpush(CType
*type
);
239 static int gvtst(int inv
, int t
);
240 static void gen_inline_functions(TCCState
*s
);
241 static void free_inline_functions(TCCState
*s
);
242 static void skip_or_save_block(TokenString
**str
);
243 static void gv_dup(void);
244 static int get_temp_local_var(int size
,int align
);
245 static void clear_temp_local_var_list();
246 static void cast_error(CType
*st
, CType
*dt
);
248 ST_INLN
int is_float(int t
)
250 int bt
= t
& VT_BTYPE
;
251 return bt
== VT_LDOUBLE
257 static inline int is_integer_btype(int bt
)
266 static int btype_size(int bt
)
268 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
272 bt
== VT_PTR
? PTR_SIZE
: 0;
275 /* returns function return register from type */
276 static int R_RET(int t
)
280 #ifdef TCC_TARGET_X86_64
281 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
283 #elif defined TCC_TARGET_RISCV64
284 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
290 /* returns 2nd function return register, if any */
291 static int R2_RET(int t
)
297 #elif defined TCC_TARGET_X86_64
302 #elif defined TCC_TARGET_RISCV64
309 /* returns true for two-word types */
310 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
312 /* put function return registers to stack value */
313 static void PUT_R_RET(SValue
*sv
, int t
)
315 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
318 /* returns function return register class for type t */
319 static int RC_RET(int t
)
321 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
324 /* returns generic register class for type t */
325 static int RC_TYPE(int t
)
329 #ifdef TCC_TARGET_X86_64
330 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
332 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
334 #elif defined TCC_TARGET_RISCV64
335 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
341 /* returns 2nd register class corresponding to t and rc */
342 static int RC2_TYPE(int t
, int rc
)
344 if (!USING_TWO_WORDS(t
))
359 /* we use our own 'finite' function to avoid potential problems with
360 non standard math libs */
361 /* XXX: endianness dependent */
362 ST_FUNC
int ieee_finite(double d
)
365 memcpy(p
, &d
, sizeof(double));
366 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
369 /* compiling intel long double natively */
370 #if (defined __i386__ || defined __x86_64__) \
371 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
372 # define TCC_IS_NATIVE_387
375 ST_FUNC
void test_lvalue(void)
377 if (!(vtop
->r
& VT_LVAL
))
381 ST_FUNC
void check_vstack(void)
383 if (vtop
!= vstack
- 1)
384 tcc_error("internal compiler error: vstack leak (%d)",
385 (int)(vtop
- vstack
+ 1));
388 /* ------------------------------------------------------------------------- */
389 /* vstack debugging aid */
392 void pv (const char *lbl
, int a
, int b
)
395 for (i
= a
; i
< a
+ b
; ++i
) {
396 SValue
*p
= &vtop
[-i
];
397 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
398 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
403 /* ------------------------------------------------------------------------- */
404 /* start of translation unit info */
405 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
411 /* file info: full path + filename */
412 section_sym
= put_elf_sym(symtab_section
, 0, 0,
413 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
414 text_section
->sh_num
, NULL
);
415 getcwd(buf
, sizeof(buf
));
417 normalize_slashes(buf
);
419 pstrcat(buf
, sizeof(buf
), "/");
420 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
421 text_section
->data_offset
, text_section
, section_sym
);
422 put_stabs_r(s1
, file
->prev
? file
->prev
->filename
: file
->filename
,
424 text_section
->data_offset
, text_section
, section_sym
);
425 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
426 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
428 new_file
= last_line_num
= 0;
430 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
434 /* we're currently 'including' the <command line> */
438 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
439 symbols can be safely used */
440 put_elf_sym(symtab_section
, 0, 0,
441 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
442 SHN_ABS
, file
->filename
);
445 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
446 Section
*sec
, int sym_index
)
452 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
453 sizeof(struct debug_sym
) *
454 (debug_info
->n_sym
+ 1));
455 s
= debug_info
->sym
+ debug_info
->n_sym
++;
458 s
->str
= tcc_strdup(str
);
460 s
->sym_index
= sym_index
;
463 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
465 put_stabs (s1
, str
, type
, 0, 0, value
);
468 static void tcc_debug_stabn(int type
, int value
)
470 if (type
== N_LBRAC
) {
471 struct debug_info
*info
=
472 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
475 info
->parent
= debug_info
;
477 if (debug_info
->child
) {
478 if (debug_info
->child
->last
)
479 debug_info
->child
->last
->next
= info
;
481 debug_info
->child
->next
= info
;
482 debug_info
->child
->last
= info
;
485 debug_info
->child
= info
;
488 debug_info_root
= info
;
492 debug_info
->end
= value
;
493 debug_info
= debug_info
->parent
;
497 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
506 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
507 if ((type
& VT_BTYPE
) != VT_BYTE
)
509 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
510 n
++, t
= t
->type
.ref
;
514 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
518 for (i
= 0; i
< n_debug_hash
; i
++) {
519 if (t
== debug_hash
[i
].type
) {
520 debug_type
= debug_hash
[i
].debug_type
;
524 if (debug_type
== -1) {
525 debug_type
= ++debug_next_type
;
526 debug_hash
= (struct debug_hash
*)
527 tcc_realloc (debug_hash
,
528 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
529 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
530 debug_hash
[n_debug_hash
++].type
= t
;
532 cstr_printf (&str
, "%s:T%d=%c%d",
533 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
534 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
536 IS_UNION (t
->type
.t
) ? 'u' : 's',
539 int pos
, size
, align
;
542 cstr_printf (&str
, "%s:",
543 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
544 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
545 tcc_get_debug_info (s1
, t
, &str
);
546 if (t
->type
.t
& VT_BITFIELD
) {
547 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
548 size
= BIT_SIZE(t
->type
.t
);
552 size
= type_size(&t
->type
, &align
) * 8;
554 cstr_printf (&str
, ",%d,%d;", pos
, size
);
556 cstr_printf (&str
, ";");
557 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
561 else if (IS_ENUM(type
)) {
562 Sym
*e
= t
= t
->type
.ref
;
564 debug_type
= ++debug_next_type
;
566 cstr_printf (&str
, "%s:T%d=e",
567 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
568 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
572 cstr_printf (&str
, "%s:",
573 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
574 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
575 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
578 cstr_printf (&str
, ";");
579 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
582 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
583 type
&= ~VT_STRUCT_MASK
;
585 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
587 if (default_debug
[debug_type
- 1].type
== type
)
589 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
593 cstr_printf (result
, "%d=", ++debug_next_type
);
596 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
597 if ((type
& VT_BTYPE
) != VT_BYTE
)
600 cstr_printf (result
, "%d=*", ++debug_next_type
);
601 else if (type
== (VT_PTR
| VT_ARRAY
))
602 cstr_printf (result
, "%d=ar1;0;%d;",
603 ++debug_next_type
, t
->type
.ref
->c
- 1);
604 else if (type
== VT_FUNC
) {
605 cstr_printf (result
, "%d=f", ++debug_next_type
);
606 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
613 cstr_printf (result
, "%d", debug_type
);
616 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
620 struct debug_info
*next
= cur
->next
;
622 for (i
= 0; i
< cur
->n_sym
; i
++) {
623 struct debug_sym
*s
= &cur
->sym
[i
];
626 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
627 s
->sec
, s
->sym_index
);
629 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
633 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
634 tcc_debug_finish (s1
, cur
->child
);
635 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
641 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
644 cstr_new (&debug_str
);
645 for (; s
!= e
; s
= s
->prev
) {
646 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
648 cstr_reset (&debug_str
);
649 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
650 tcc_get_debug_info(s1
, s
, &debug_str
);
651 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
653 cstr_free (&debug_str
);
656 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
)
658 Section
*s
= s1
->sections
[sh_num
];
662 cstr_printf (&str
, "%s:%c",
663 get_tok_str(sym
->v
, NULL
),
664 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
666 tcc_get_debug_info(s1
, sym
, &str
);
667 if (sym_bind
== STB_GLOBAL
)
668 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
670 tcc_debug_stabs(s1
, str
.data
,
671 (sym
->type
.t
& VT_STATIC
) && data_section
== s
672 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
676 static void tcc_debug_typedef(TCCState
*s1
, Sym
*sym
)
681 cstr_printf (&str
, "%s:t",
682 (sym
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
683 ? "" : get_tok_str(sym
->v
& ~SYM_FIELD
, NULL
));
684 tcc_get_debug_info(s1
, sym
, &str
);
685 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
689 /* put end of translation unit info */
690 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
694 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
695 text_section
->data_offset
, text_section
, section_sym
);
696 tcc_free(debug_hash
);
699 static BufferedFile
* put_new_file(TCCState
*s1
)
701 BufferedFile
*f
= file
;
702 /* use upper file if from inline ":asm:" */
703 if (f
->filename
[0] == ':')
706 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
707 new_file
= last_line_num
= 0;
712 /* generate line number info */
713 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
717 || cur_text_section
!= text_section
718 || !(f
= put_new_file(s1
))
719 || last_line_num
== f
->line_num
)
721 if (func_ind
!= -1) {
722 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
724 /* from tcc_assemble */
725 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
727 last_line_num
= f
->line_num
;
730 /* put function symbol */
731 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
737 debug_info_root
= NULL
;
739 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
740 if (!(f
= put_new_file(s1
)))
742 cstr_new (&debug_str
);
743 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
744 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
745 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
746 cstr_free (&debug_str
);
751 /* put function size */
752 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
756 tcc_debug_stabn(N_RBRAC
, size
);
757 tcc_debug_finish (s1
, debug_info_root
);
760 /* put alternative filename */
761 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
763 if (0 == strcmp(file
->filename
, filename
))
765 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
769 /* begin of #include */
770 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
774 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
778 /* end of #include */
779 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
783 put_stabn(s1
, N_EINCL
, 0, 0, 0);
787 /* ------------------------------------------------------------------------- */
788 /* initialize vstack and types. This must be done also for tcc -E */
789 ST_FUNC
void tccgen_init(TCCState
*s1
)
792 memset(vtop
, 0, sizeof *vtop
);
794 /* define some often used types */
797 char_type
.t
= VT_BYTE
;
798 if (s1
->char_is_unsigned
)
799 char_type
.t
|= VT_UNSIGNED
;
800 char_pointer_type
= char_type
;
801 mk_pointer(&char_pointer_type
);
803 func_old_type
.t
= VT_FUNC
;
804 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
805 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
806 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
807 #ifdef precedence_parser
813 ST_FUNC
int tccgen_compile(TCCState
*s1
)
815 cur_text_section
= NULL
;
817 anon_sym
= SYM_FIRST_ANOM
;
820 nocode_wanted
= 0x80000000;
824 #ifdef TCC_TARGET_ARM
828 printf("%s: **** new file\n", file
->filename
);
830 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
833 gen_inline_functions(s1
);
835 /* end of translation unit info */
840 ST_FUNC
void tccgen_finish(TCCState
*s1
)
843 free_inline_functions(s1
);
844 sym_pop(&global_stack
, NULL
, 0);
845 sym_pop(&local_stack
, NULL
, 0);
846 /* free preprocessor macros */
849 dynarray_reset(&sym_pools
, &nb_sym_pools
);
850 sym_free_first
= NULL
;
853 /* ------------------------------------------------------------------------- */
854 ST_FUNC ElfSym
*elfsym(Sym
*s
)
858 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
861 /* apply storage attributes to Elf symbol */
862 ST_FUNC
void update_storage(Sym
*sym
)
865 int sym_bind
, old_sym_bind
;
871 if (sym
->a
.visibility
)
872 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
875 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
876 sym_bind
= STB_LOCAL
;
877 else if (sym
->a
.weak
)
880 sym_bind
= STB_GLOBAL
;
881 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
882 if (sym_bind
!= old_sym_bind
) {
883 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
887 if (sym
->a
.dllimport
)
888 esym
->st_other
|= ST_PE_IMPORT
;
889 if (sym
->a
.dllexport
)
890 esym
->st_other
|= ST_PE_EXPORT
;
894 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
895 get_tok_str(sym
->v
, NULL
),
896 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
904 /* ------------------------------------------------------------------------- */
905 /* update sym->c so that it points to an external symbol in section
906 'section' with value 'value' */
908 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
909 addr_t value
, unsigned long size
,
910 int can_add_underscore
)
912 int sym_type
, sym_bind
, info
, other
, t
;
918 name
= get_tok_str(sym
->v
, NULL
);
920 if ((t
& VT_BTYPE
) == VT_FUNC
) {
922 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
923 sym_type
= STT_NOTYPE
;
924 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
927 sym_type
= STT_OBJECT
;
929 if (t
& (VT_STATIC
| VT_INLINE
))
930 sym_bind
= STB_LOCAL
;
932 sym_bind
= STB_GLOBAL
;
936 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
937 Sym
*ref
= sym
->type
.ref
;
938 if (ref
->a
.nodecorate
) {
939 can_add_underscore
= 0;
941 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
942 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
944 other
|= ST_PE_STDCALL
;
945 can_add_underscore
= 0;
950 if (sym
->asm_label
) {
951 name
= get_tok_str(sym
->asm_label
, NULL
);
952 can_add_underscore
= 0;
955 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
957 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
961 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
962 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
964 if (tcc_state
->do_debug
965 && sym_type
!= STT_FUNC
966 && sym
->v
< SYM_FIRST_ANOM
)
967 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
);
971 esym
->st_value
= value
;
972 esym
->st_size
= size
;
973 esym
->st_shndx
= sh_num
;
978 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
979 addr_t value
, unsigned long size
)
981 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
982 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
985 /* add a new relocation entry to symbol 'sym' in section 's' */
986 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
991 if (nocode_wanted
&& s
== cur_text_section
)
996 put_extern_sym(sym
, NULL
, 0, 0);
1000 /* now we can add ELF relocation info */
1001 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
1005 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
1007 greloca(s
, sym
, offset
, type
, 0);
1011 /* ------------------------------------------------------------------------- */
1012 /* symbol allocator */
1013 static Sym
*__sym_malloc(void)
1015 Sym
*sym_pool
, *sym
, *last_sym
;
1018 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
1019 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1021 last_sym
= sym_free_first
;
1023 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1024 sym
->next
= last_sym
;
1028 sym_free_first
= last_sym
;
1032 static inline Sym
*sym_malloc(void)
1036 sym
= sym_free_first
;
1038 sym
= __sym_malloc();
1039 sym_free_first
= sym
->next
;
1042 sym
= tcc_malloc(sizeof(Sym
));
1047 ST_INLN
void sym_free(Sym
*sym
)
1050 sym
->next
= sym_free_first
;
1051 sym_free_first
= sym
;
1057 /* push, without hashing */
1058 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1063 memset(s
, 0, sizeof *s
);
1073 /* find a symbol and return its associated structure. 's' is the top
1074 of the symbol stack */
1075 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1080 else if (s
->v
== -1)
1087 /* structure lookup */
1088 ST_INLN Sym
*struct_find(int v
)
1091 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1093 return table_ident
[v
]->sym_struct
;
1096 /* find an identifier */
1097 ST_INLN Sym
*sym_find(int v
)
1100 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1102 return table_ident
[v
]->sym_identifier
;
1105 static int sym_scope(Sym
*s
)
1107 if (IS_ENUM_VAL (s
->type
.t
))
1108 return s
->type
.ref
->sym_scope
;
1110 return s
->sym_scope
;
1113 /* push a given symbol on the symbol stack */
1114 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1123 s
= sym_push2(ps
, v
, type
->t
, c
);
1124 s
->type
.ref
= type
->ref
;
1126 /* don't record fields or anonymous symbols */
1128 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1129 /* record symbol in token array */
1130 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1132 ps
= &ts
->sym_struct
;
1134 ps
= &ts
->sym_identifier
;
1137 s
->sym_scope
= local_scope
;
1138 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1139 tcc_error("redeclaration of '%s'",
1140 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1145 /* push a global identifier */
1146 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1149 s
= sym_push2(&global_stack
, v
, t
, c
);
1150 s
->r
= VT_CONST
| VT_SYM
;
1151 /* don't record anonymous symbol */
1152 if (v
< SYM_FIRST_ANOM
) {
1153 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1154 /* modify the top most local identifier, so that sym_identifier will
1155 point to 's' when popped; happens when called from inline asm */
1156 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1157 ps
= &(*ps
)->prev_tok
;
1164 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1165 pop them yet from the list, but do remove them from the token array. */
1166 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1176 /* remove symbol in token array */
1178 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1179 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1181 ps
= &ts
->sym_struct
;
1183 ps
= &ts
->sym_identifier
;
1194 /* ------------------------------------------------------------------------- */
1195 static void vcheck_cmp(void)
1197 /* cannot let cpu flags if other instruction are generated. Also
1198 avoid leaving VT_JMP anywhere except on the top of the stack
1199 because it would complicate the code generator.
1201 Don't do this when nocode_wanted. vtop might come from
1202 !nocode_wanted regions (see 88_codeopt.c) and transforming
1203 it to a register without actually generating code is wrong
1204 as their value might still be used for real. All values
1205 we push under nocode_wanted will eventually be popped
1206 again, so that the VT_CMP/VT_JMP value will be in vtop
1207 when code is unsuppressed again. */
1209 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1213 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1215 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1216 tcc_error("memory full (vstack)");
1221 vtop
->r2
= VT_CONST
;
1226 ST_FUNC
void vswap(void)
1236 /* pop stack value */
1237 ST_FUNC
void vpop(void)
1240 v
= vtop
->r
& VT_VALMASK
;
1241 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1242 /* for x86, we need to pop the FP stack */
1243 if (v
== TREG_ST0
) {
1244 o(0xd8dd); /* fstp %st(0) */
1248 /* need to put correct jump if && or || without test */
1255 /* push constant of type "type" with useless value */
1256 static void vpush(CType
*type
)
1258 vset(type
, VT_CONST
, 0);
1261 /* push arbitrary 64bit constant */
1262 static void vpush64(int ty
, unsigned long long v
)
1269 vsetc(&ctype
, VT_CONST
, &cval
);
1272 /* push integer constant */
1273 ST_FUNC
void vpushi(int v
)
1278 /* push a pointer sized constant */
1279 static void vpushs(addr_t v
)
1281 vpush64(VT_SIZE_T
, v
);
1284 /* push long long constant */
1285 static inline void vpushll(long long v
)
1287 vpush64(VT_LLONG
, v
);
1290 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1294 vsetc(type
, r
, &cval
);
1297 static void vseti(int r
, int v
)
1305 ST_FUNC
void vpushv(SValue
*v
)
1307 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1308 tcc_error("memory full (vstack)");
1313 static void vdup(void)
1318 /* rotate n first stack elements to the bottom
1319 I1 ... In -> I2 ... In I1 [top is right]
1321 ST_FUNC
void vrotb(int n
)
1328 for(i
=-n
+1;i
!=0;i
++)
1329 vtop
[i
] = vtop
[i
+1];
1333 /* rotate the n elements before entry e towards the top
1334 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1336 ST_FUNC
void vrote(SValue
*e
, int n
)
1343 for(i
= 0;i
< n
- 1; i
++)
1348 /* rotate n first stack elements to the top
1349 I1 ... In -> In I1 ... I(n-1) [top is right]
1351 ST_FUNC
void vrott(int n
)
1356 /* ------------------------------------------------------------------------- */
1357 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1359 /* called from generators to set the result from relational ops */
1360 ST_FUNC
void vset_VT_CMP(int op
)
1368 /* called once before asking generators to load VT_CMP to a register */
1369 static void vset_VT_JMP(void)
1371 int op
= vtop
->cmp_op
;
1373 if (vtop
->jtrue
|| vtop
->jfalse
) {
1374 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1375 int inv
= op
& (op
< 2); /* small optimization */
1376 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1378 /* otherwise convert flags (rsp. 0/1) to register */
1380 if (op
< 2) /* doesn't seem to happen */
1385 /* Set CPU Flags, doesn't yet jump */
1386 static void gvtst_set(int inv
, int t
)
1390 if (vtop
->r
!= VT_CMP
) {
1393 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1394 vset_VT_CMP(vtop
->c
.i
!= 0);
1397 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1398 *p
= gjmp_append(*p
, t
);
1401 /* Generate value test
1403 * Generate a test for any value (jump, comparison and integers) */
1404 static int gvtst(int inv
, int t
)
1409 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1411 x
= u
, u
= t
, t
= x
;
1414 /* jump to the wanted target */
1416 t
= gjmp_cond(op
^ inv
, t
);
1419 /* resolve complementary jumps to here */
1426 /* generate a zero or nozero test */
1427 static void gen_test_zero(int op
)
1429 if (vtop
->r
== VT_CMP
) {
1433 vtop
->jfalse
= vtop
->jtrue
;
1443 /* ------------------------------------------------------------------------- */
1444 /* push a symbol value of TYPE */
1445 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1449 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1453 /* Return a static symbol pointing to a section */
1454 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1460 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1461 sym
->type
.t
|= VT_STATIC
;
1462 put_extern_sym(sym
, sec
, offset
, size
);
1466 /* push a reference to a section offset by adding a dummy symbol */
1467 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1469 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1472 /* define a new external reference to a symbol 'v' of type 'u' */
1473 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1479 /* push forward reference */
1480 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1481 s
->type
.ref
= type
->ref
;
1482 } else if (IS_ASM_SYM(s
)) {
1483 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1484 s
->type
.ref
= type
->ref
;
1490 /* create an external reference with no specific type similar to asm labels.
1491 This avoids type conflicts if the symbol is used from C too */
1492 ST_FUNC Sym
*external_helper_sym(int v
)
1494 CType ct
= { VT_ASM_FUNC
, NULL
};
1495 return external_global_sym(v
, &ct
);
1498 /* push a reference to an helper function (such as memmove) */
1499 ST_FUNC
void vpush_helper_func(int v
)
1501 vpushsym(&func_old_type
, external_helper_sym(v
));
1504 /* Merge symbol attributes. */
1505 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1507 if (sa1
->aligned
&& !sa
->aligned
)
1508 sa
->aligned
= sa1
->aligned
;
1509 sa
->packed
|= sa1
->packed
;
1510 sa
->weak
|= sa1
->weak
;
1511 if (sa1
->visibility
!= STV_DEFAULT
) {
1512 int vis
= sa
->visibility
;
1513 if (vis
== STV_DEFAULT
1514 || vis
> sa1
->visibility
)
1515 vis
= sa1
->visibility
;
1516 sa
->visibility
= vis
;
1518 sa
->dllexport
|= sa1
->dllexport
;
1519 sa
->nodecorate
|= sa1
->nodecorate
;
1520 sa
->dllimport
|= sa1
->dllimport
;
1523 /* Merge function attributes. */
1524 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1526 if (fa1
->func_call
&& !fa
->func_call
)
1527 fa
->func_call
= fa1
->func_call
;
1528 if (fa1
->func_type
&& !fa
->func_type
)
1529 fa
->func_type
= fa1
->func_type
;
1530 if (fa1
->func_args
&& !fa
->func_args
)
1531 fa
->func_args
= fa1
->func_args
;
1532 if (fa1
->func_noreturn
)
1533 fa
->func_noreturn
= 1;
1540 /* Merge attributes. */
1541 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1543 merge_symattr(&ad
->a
, &ad1
->a
);
1544 merge_funcattr(&ad
->f
, &ad1
->f
);
1547 ad
->section
= ad1
->section
;
1548 if (ad1
->alias_target
)
1549 ad
->alias_target
= ad1
->alias_target
;
1551 ad
->asm_label
= ad1
->asm_label
;
1553 ad
->attr_mode
= ad1
->attr_mode
;
1556 /* Merge some type attributes. */
1557 static void patch_type(Sym
*sym
, CType
*type
)
1559 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1560 if (!(sym
->type
.t
& VT_EXTERN
))
1561 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1562 sym
->type
.t
&= ~VT_EXTERN
;
1565 if (IS_ASM_SYM(sym
)) {
1566 /* stay static if both are static */
1567 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1568 sym
->type
.ref
= type
->ref
;
1571 if (!is_compatible_types(&sym
->type
, type
)) {
1572 tcc_error("incompatible types for redefinition of '%s'",
1573 get_tok_str(sym
->v
, NULL
));
1575 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1576 int static_proto
= sym
->type
.t
& VT_STATIC
;
1577 /* warn if static follows non-static function declaration */
1578 if ((type
->t
& VT_STATIC
) && !static_proto
1579 /* XXX this test for inline shouldn't be here. Until we
1580 implement gnu-inline mode again it silences a warning for
1581 mingw caused by our workarounds. */
1582 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1583 tcc_warning("static storage ignored for redefinition of '%s'",
1584 get_tok_str(sym
->v
, NULL
));
1586 /* set 'inline' if both agree or if one has static */
1587 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1588 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1589 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1590 static_proto
|= VT_INLINE
;
1593 if (0 == (type
->t
& VT_EXTERN
)) {
1594 struct FuncAttr f
= sym
->type
.ref
->f
;
1595 /* put complete type, use static from prototype */
1596 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1597 sym
->type
.ref
= type
->ref
;
1598 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1600 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1603 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1604 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1605 sym
->type
.ref
= type
->ref
;
1609 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1610 /* set array size if it was omitted in extern declaration */
1611 sym
->type
.ref
->c
= type
->ref
->c
;
1613 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1614 tcc_warning("storage mismatch for redefinition of '%s'",
1615 get_tok_str(sym
->v
, NULL
));
1619 /* Merge some storage attributes. */
1620 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1623 patch_type(sym
, type
);
1625 #ifdef TCC_TARGET_PE
1626 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1627 tcc_error("incompatible dll linkage for redefinition of '%s'",
1628 get_tok_str(sym
->v
, NULL
));
1630 merge_symattr(&sym
->a
, &ad
->a
);
1632 sym
->asm_label
= ad
->asm_label
;
1633 update_storage(sym
);
1636 /* copy sym to other stack */
1637 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1640 s
= sym_malloc(), *s
= *s0
;
1641 s
->prev
= *ps
, *ps
= s
;
1642 if (s
->v
< SYM_FIRST_ANOM
) {
1643 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1644 s
->prev_tok
= *ps
, *ps
= s
;
1649 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1650 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1652 int bt
= s
->type
.t
& VT_BTYPE
;
1653 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1654 Sym
**sp
= &s
->type
.ref
;
1655 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1656 Sym
*s2
= sym_copy(s
, ps
);
1657 sp
= &(*sp
= s2
)->next
;
1658 sym_copy_ref(s2
, ps
);
1663 /* define a new external reference to a symbol 'v' */
1664 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1668 /* look for global symbol */
1670 while (s
&& s
->sym_scope
)
1674 /* push forward reference */
1675 s
= global_identifier_push(v
, type
->t
, 0);
1678 s
->asm_label
= ad
->asm_label
;
1679 s
->type
.ref
= type
->ref
;
1680 /* copy type to the global stack */
1682 sym_copy_ref(s
, &global_stack
);
1684 patch_storage(s
, ad
, type
);
1686 /* push variables on local_stack if any */
1687 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1688 s
= sym_copy(s
, &local_stack
);
1692 /* save registers up to (vtop - n) stack entry */
1693 ST_FUNC
void save_regs(int n
)
1696 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1700 /* save r to the memory stack, and mark it as being free */
1701 ST_FUNC
void save_reg(int r
)
1703 save_reg_upstack(r
, 0);
1706 /* save r to the memory stack, and mark it as being free,
1707 if seen up to (vtop - n) stack entry */
1708 ST_FUNC
void save_reg_upstack(int r
, int n
)
1710 int l
, size
, align
, bt
;
1713 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1718 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1719 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1720 /* must save value on stack if not already done */
1722 bt
= p
->type
.t
& VT_BTYPE
;
1725 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1728 size
= type_size(&sv
.type
, &align
);
1729 l
= get_temp_local_var(size
,align
);
1730 sv
.r
= VT_LOCAL
| VT_LVAL
;
1732 store(p
->r
& VT_VALMASK
, &sv
);
1733 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1734 /* x86 specific: need to pop fp register ST0 if saved */
1735 if (r
== TREG_ST0
) {
1736 o(0xd8dd); /* fstp %st(0) */
1739 /* special long long case */
1740 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1745 /* mark that stack entry as being saved on the stack */
1746 if (p
->r
& VT_LVAL
) {
1747 /* also clear the bounded flag because the
1748 relocation address of the function was stored in
1750 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1752 p
->r
= VT_LVAL
| VT_LOCAL
;
1760 #ifdef TCC_TARGET_ARM
1761 /* find a register of class 'rc2' with at most one reference on stack.
1762 * If none, call get_reg(rc) */
1763 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1768 for(r
=0;r
<NB_REGS
;r
++) {
1769 if (reg_classes
[r
] & rc2
) {
1772 for(p
= vstack
; p
<= vtop
; p
++) {
1773 if ((p
->r
& VT_VALMASK
) == r
||
1785 /* find a free register of class 'rc'. If none, save one register */
1786 ST_FUNC
int get_reg(int rc
)
1791 /* find a free register */
1792 for(r
=0;r
<NB_REGS
;r
++) {
1793 if (reg_classes
[r
] & rc
) {
1796 for(p
=vstack
;p
<=vtop
;p
++) {
1797 if ((p
->r
& VT_VALMASK
) == r
||
1806 /* no register left : free the first one on the stack (VERY
1807 IMPORTANT to start from the bottom to ensure that we don't
1808 spill registers used in gen_opi()) */
1809 for(p
=vstack
;p
<=vtop
;p
++) {
1810 /* look at second register (if long long) */
1812 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1814 r
= p
->r
& VT_VALMASK
;
1815 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1821 /* Should never comes here */
1825 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1826 static int get_temp_local_var(int size
,int align
){
1828 struct temp_local_variable
*temp_var
;
1835 for(i
=0;i
<nb_temp_local_vars
;i
++){
1836 temp_var
=&arr_temp_local_vars
[i
];
1837 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1840 /*check if temp_var is free*/
1842 for(p
=vstack
;p
<=vtop
;p
++) {
1844 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1845 if(p
->c
.i
==temp_var
->location
){
1852 found_var
=temp_var
->location
;
1858 loc
= (loc
- size
) & -align
;
1859 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1860 temp_var
=&arr_temp_local_vars
[i
];
1861 temp_var
->location
=loc
;
1862 temp_var
->size
=size
;
1863 temp_var
->align
=align
;
1864 nb_temp_local_vars
++;
1871 static void clear_temp_local_var_list(){
1872 nb_temp_local_vars
=0;
1875 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1877 static void move_reg(int r
, int s
, int t
)
1891 /* get address of vtop (vtop MUST BE an lvalue) */
1892 ST_FUNC
void gaddrof(void)
1894 vtop
->r
&= ~VT_LVAL
;
1895 /* tricky: if saved lvalue, then we can go back to lvalue */
1896 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1897 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1900 #ifdef CONFIG_TCC_BCHECK
1901 /* generate a bounded pointer addition */
1902 static void gen_bounded_ptr_add(void)
1904 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1909 vpush_helper_func(TOK___bound_ptr_add
);
1914 /* returned pointer is in REG_IRET */
1915 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1918 /* relocation offset of the bounding function call point */
1919 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1922 /* patch pointer addition in vtop so that pointer dereferencing is
1924 static void gen_bounded_ptr_deref(void)
1934 size
= type_size(&vtop
->type
, &align
);
1936 case 1: func
= TOK___bound_ptr_indir1
; break;
1937 case 2: func
= TOK___bound_ptr_indir2
; break;
1938 case 4: func
= TOK___bound_ptr_indir4
; break;
1939 case 8: func
= TOK___bound_ptr_indir8
; break;
1940 case 12: func
= TOK___bound_ptr_indir12
; break;
1941 case 16: func
= TOK___bound_ptr_indir16
; break;
1943 /* may happen with struct member access */
1946 sym
= external_helper_sym(func
);
1948 put_extern_sym(sym
, NULL
, 0, 0);
1949 /* patch relocation */
1950 /* XXX: find a better solution ? */
1951 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1952 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1955 /* generate lvalue bound code */
1956 static void gbound(void)
1960 vtop
->r
&= ~VT_MUSTBOUND
;
1961 /* if lvalue, then use checking code before dereferencing */
1962 if (vtop
->r
& VT_LVAL
) {
1963 /* if not VT_BOUNDED value, then make one */
1964 if (!(vtop
->r
& VT_BOUNDED
)) {
1965 /* must save type because we must set it to int to get pointer */
1967 vtop
->type
.t
= VT_PTR
;
1970 gen_bounded_ptr_add();
1974 /* then check for dereferencing */
1975 gen_bounded_ptr_deref();
1979 /* we need to call __bound_ptr_add before we start to load function
1980 args into registers */
1981 ST_FUNC
void gbound_args(int nb_args
)
1986 for (i
= 1; i
<= nb_args
; ++i
)
1987 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1993 sv
= vtop
- nb_args
;
1994 if (sv
->r
& VT_SYM
) {
1998 #ifndef TCC_TARGET_PE
1999 || v
== TOK_sigsetjmp
2000 || v
== TOK___sigsetjmp
2003 vpush_helper_func(TOK___bound_setjmp
);
2006 func_bound_add_epilog
= 1;
2008 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2009 if (v
== TOK_alloca
)
2010 func_bound_add_epilog
= 1;
2013 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
2014 sv
->sym
->asm_label
= TOK___bound_longjmp
;
2019 /* Add bounds for local symbols from S to E (via ->prev) */
2020 static void add_local_bounds(Sym
*s
, Sym
*e
)
2022 for (; s
!= e
; s
= s
->prev
) {
2023 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
2025 /* Add arrays/structs/unions because we always take address */
2026 if ((s
->type
.t
& VT_ARRAY
)
2027 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
2028 || s
->a
.addrtaken
) {
2029 /* add local bound info */
2030 int align
, size
= type_size(&s
->type
, &align
);
2031 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
2032 2 * sizeof(addr_t
));
2033 bounds_ptr
[0] = s
->c
;
2034 bounds_ptr
[1] = size
;
2040 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2041 static void pop_local_syms(Sym
**ptop
, Sym
*b
, int keep
, int ellipsis
)
2043 #ifdef CONFIG_TCC_BCHECK
2044 if (tcc_state
->do_bounds_check
&& !ellipsis
&& !keep
)
2045 add_local_bounds(*ptop
, b
);
2047 if (tcc_state
->do_debug
)
2048 tcc_add_debug_info (tcc_state
, !local_scope
, *ptop
, b
);
2049 sym_pop(ptop
, b
, keep
);
2052 static void incr_bf_adr(int o
)
2054 vtop
->type
= char_pointer_type
;
2058 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
2062 /* single-byte load mode for packed or otherwise unaligned bitfields */
2063 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
2066 save_reg_upstack(vtop
->r
, 1);
2067 vpush64(type
->t
& VT_BTYPE
, 0); // B X
2068 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2077 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
2079 vpushi((1 << n
) - 1), gen_op('&');
2082 vpushi(bits
), gen_op(TOK_SHL
);
2085 bits
+= n
, bit_size
-= n
, o
= 1;
2088 if (!(type
->t
& VT_UNSIGNED
)) {
2089 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2090 vpushi(n
), gen_op(TOK_SHL
);
2091 vpushi(n
), gen_op(TOK_SAR
);
2095 /* single-byte store mode for packed or otherwise unaligned bitfields */
2096 static void store_packed_bf(int bit_pos
, int bit_size
)
2098 int bits
, n
, o
, m
, c
;
2100 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2102 save_reg_upstack(vtop
->r
, 1);
2103 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2105 incr_bf_adr(o
); // X B
2107 c
? vdup() : gv_dup(); // B V X
2110 vpushi(bits
), gen_op(TOK_SHR
);
2112 vpushi(bit_pos
), gen_op(TOK_SHL
);
2117 m
= ((1 << n
) - 1) << bit_pos
;
2118 vpushi(m
), gen_op('&'); // X B V1
2119 vpushv(vtop
-1); // X B V1 B
2120 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2121 gen_op('&'); // X B V1 B1
2122 gen_op('|'); // X B V2
2124 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2125 vstore(), vpop(); // X B
2126 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2131 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2134 if (0 == sv
->type
.ref
)
2136 t
= sv
->type
.ref
->auxtype
;
2137 if (t
!= -1 && t
!= VT_STRUCT
) {
2138 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
2144 /* store vtop a register belonging to class 'rc'. lvalues are
2145 converted to values. Cannot be used if cannot be converted to
2146 register value (such as structures). */
2147 ST_FUNC
int gv(int rc
)
2149 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2150 int bit_pos
, bit_size
, size
, align
;
2152 /* NOTE: get_reg can modify vstack[] */
2153 if (vtop
->type
.t
& VT_BITFIELD
) {
2156 bit_pos
= BIT_POS(vtop
->type
.t
);
2157 bit_size
= BIT_SIZE(vtop
->type
.t
);
2158 /* remove bit field info to avoid loops */
2159 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2162 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2163 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2164 type
.t
|= VT_UNSIGNED
;
2166 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2168 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2173 if (r
== VT_STRUCT
) {
2174 load_packed_bf(&type
, bit_pos
, bit_size
);
2176 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2177 /* cast to int to propagate signedness in following ops */
2179 /* generate shifts */
2180 vpushi(bits
- (bit_pos
+ bit_size
));
2182 vpushi(bits
- bit_size
);
2183 /* NOTE: transformed to SHR if unsigned */
2188 if (is_float(vtop
->type
.t
) &&
2189 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2190 /* CPUs usually cannot use float constants, so we store them
2191 generically in data segment */
2192 init_params p
= { data_section
};
2193 unsigned long offset
;
2194 size
= type_size(&vtop
->type
, &align
);
2196 size
= 0, align
= 1;
2197 offset
= section_add(p
.sec
, size
, align
);
2198 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
2200 init_putv(&p
, &vtop
->type
, offset
);
2203 #ifdef CONFIG_TCC_BCHECK
2204 if (vtop
->r
& VT_MUSTBOUND
)
2208 bt
= vtop
->type
.t
& VT_BTYPE
;
2210 #ifdef TCC_TARGET_RISCV64
2212 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2215 rc2
= RC2_TYPE(bt
, rc
);
2217 /* need to reload if:
2219 - lvalue (need to dereference pointer)
2220 - already a register, but not in the right class */
2221 r
= vtop
->r
& VT_VALMASK
;
2222 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2223 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2225 if (!r_ok
|| !r2_ok
) {
2229 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2230 int original_type
= vtop
->type
.t
;
2232 /* two register type load :
2233 expand to two words temporarily */
2234 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2236 unsigned long long ll
= vtop
->c
.i
;
2237 vtop
->c
.i
= ll
; /* first word */
2239 vtop
->r
= r
; /* save register value */
2240 vpushi(ll
>> 32); /* second word */
2241 } else if (vtop
->r
& VT_LVAL
) {
2242 /* We do not want to modifier the long long pointer here.
2243 So we save any other instances down the stack */
2244 save_reg_upstack(vtop
->r
, 1);
2245 /* load from memory */
2246 vtop
->type
.t
= load_type
;
2249 vtop
[-1].r
= r
; /* save register value */
2250 /* increment pointer to get second word */
2251 vtop
->type
.t
= VT_PTRDIFF_T
;
2256 vtop
->type
.t
= load_type
;
2258 /* move registers */
2261 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2264 vtop
[-1].r
= r
; /* save register value */
2265 vtop
->r
= vtop
[-1].r2
;
2267 /* Allocate second register. Here we rely on the fact that
2268 get_reg() tries first to free r2 of an SValue. */
2272 /* write second register */
2275 vtop
->type
.t
= original_type
;
2277 if (vtop
->r
== VT_CMP
)
2279 /* one register type load */
2284 #ifdef TCC_TARGET_C67
2285 /* uses register pairs for doubles */
2286 if (bt
== VT_DOUBLE
)
2293 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2294 ST_FUNC
void gv2(int rc1
, int rc2
)
2296 /* generate more generic register first. But VT_JMP or VT_CMP
2297 values must be generated first in all cases to avoid possible
2299 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2304 /* test if reload is needed for first register */
2305 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2315 /* test if reload is needed for first register */
2316 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2323 /* expand 64bit on stack in two ints */
2324 ST_FUNC
void lexpand(void)
2327 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2328 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2329 if (v
== VT_CONST
) {
2332 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2338 vtop
[0].r
= vtop
[-1].r2
;
2339 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2341 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2346 /* build a long long from two ints */
2347 static void lbuild(int t
)
2349 gv2(RC_INT
, RC_INT
);
2350 vtop
[-1].r2
= vtop
[0].r
;
2351 vtop
[-1].type
.t
= t
;
2356 /* convert stack entry to register and duplicate its value in another
2358 static void gv_dup(void)
2364 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2365 if (t
& VT_BITFIELD
) {
2375 /* stack: H L L1 H1 */
2385 /* duplicate value */
2395 /* generate CPU independent (unsigned) long long operations */
2396 static void gen_opl(int op
)
2398 int t
, a
, b
, op1
, c
, i
;
2400 unsigned short reg_iret
= REG_IRET
;
2401 unsigned short reg_lret
= REG_IRE2
;
2407 func
= TOK___divdi3
;
2410 func
= TOK___udivdi3
;
2413 func
= TOK___moddi3
;
2416 func
= TOK___umoddi3
;
2423 /* call generic long long function */
2424 vpush_helper_func(func
);
2429 vtop
->r2
= reg_lret
;
2437 //pv("gen_opl A",0,2);
2443 /* stack: L1 H1 L2 H2 */
2448 vtop
[-2] = vtop
[-3];
2451 /* stack: H1 H2 L1 L2 */
2452 //pv("gen_opl B",0,4);
2458 /* stack: H1 H2 L1 L2 ML MH */
2461 /* stack: ML MH H1 H2 L1 L2 */
2465 /* stack: ML MH H1 L2 H2 L1 */
2470 /* stack: ML MH M1 M2 */
2473 } else if (op
== '+' || op
== '-') {
2474 /* XXX: add non carry method too (for MIPS or alpha) */
2480 /* stack: H1 H2 (L1 op L2) */
2483 gen_op(op1
+ 1); /* TOK_xxxC2 */
2486 /* stack: H1 H2 (L1 op L2) */
2489 /* stack: (L1 op L2) H1 H2 */
2491 /* stack: (L1 op L2) (H1 op H2) */
2499 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2500 t
= vtop
[-1].type
.t
;
2504 /* stack: L H shift */
2506 /* constant: simpler */
2507 /* NOTE: all comments are for SHL. the other cases are
2508 done by swapping words */
2519 if (op
!= TOK_SAR
) {
2552 /* XXX: should provide a faster fallback on x86 ? */
2555 func
= TOK___ashrdi3
;
2558 func
= TOK___lshrdi3
;
2561 func
= TOK___ashldi3
;
2567 /* compare operations */
2573 /* stack: L1 H1 L2 H2 */
2575 vtop
[-1] = vtop
[-2];
2577 /* stack: L1 L2 H1 H2 */
2581 /* when values are equal, we need to compare low words. since
2582 the jump is inverted, we invert the test too. */
2585 else if (op1
== TOK_GT
)
2587 else if (op1
== TOK_ULT
)
2589 else if (op1
== TOK_UGT
)
2599 /* generate non equal test */
2601 vset_VT_CMP(TOK_NE
);
2605 /* compare low. Always unsigned */
2609 else if (op1
== TOK_LE
)
2611 else if (op1
== TOK_GT
)
2613 else if (op1
== TOK_GE
)
2616 #if 0//def TCC_TARGET_I386
2617 if (op
== TOK_NE
) { gsym(b
); break; }
2618 if (op
== TOK_EQ
) { gsym(a
); break; }
2627 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2629 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2630 return (a
^ b
) >> 63 ? -x
: x
;
2633 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2635 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2638 /* handle integer constant optimizations and various machine
2640 static void gen_opic(int op
)
2642 SValue
*v1
= vtop
- 1;
2644 int t1
= v1
->type
.t
& VT_BTYPE
;
2645 int t2
= v2
->type
.t
& VT_BTYPE
;
2646 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2647 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2648 uint64_t l1
= c1
? v1
->c
.i
: 0;
2649 uint64_t l2
= c2
? v2
->c
.i
: 0;
2650 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2652 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2653 l1
= ((uint32_t)l1
|
2654 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2655 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2656 l2
= ((uint32_t)l2
|
2657 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2661 case '+': l1
+= l2
; break;
2662 case '-': l1
-= l2
; break;
2663 case '&': l1
&= l2
; break;
2664 case '^': l1
^= l2
; break;
2665 case '|': l1
|= l2
; break;
2666 case '*': l1
*= l2
; break;
2673 /* if division by zero, generate explicit division */
2675 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2676 tcc_error("division by zero in constant");
2680 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2681 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2682 case TOK_UDIV
: l1
= l1
/ l2
; break;
2683 case TOK_UMOD
: l1
= l1
% l2
; break;
2686 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2687 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2689 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2692 case TOK_ULT
: l1
= l1
< l2
; break;
2693 case TOK_UGE
: l1
= l1
>= l2
; break;
2694 case TOK_EQ
: l1
= l1
== l2
; break;
2695 case TOK_NE
: l1
= l1
!= l2
; break;
2696 case TOK_ULE
: l1
= l1
<= l2
; break;
2697 case TOK_UGT
: l1
= l1
> l2
; break;
2698 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2699 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2700 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2701 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2703 case TOK_LAND
: l1
= l1
&& l2
; break;
2704 case TOK_LOR
: l1
= l1
|| l2
; break;
2708 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2709 l1
= ((uint32_t)l1
|
2710 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2714 /* if commutative ops, put c2 as constant */
2715 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2716 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2718 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2719 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2721 if (!const_wanted
&&
2723 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2724 (l1
== -1 && op
== TOK_SAR
))) {
2725 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2727 } else if (!const_wanted
&&
2728 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2730 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2731 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2732 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2737 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2740 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2741 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2744 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2745 /* filter out NOP operations like x*1, x-0, x&-1... */
2747 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2748 /* try to use shifts instead of muls or divs */
2749 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2758 else if (op
== TOK_PDIV
)
2764 } else if (c2
&& (op
== '+' || op
== '-') &&
2765 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2766 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2767 /* symbol + constant case */
2771 /* The backends can't always deal with addends to symbols
2772 larger than +-1<<31. Don't construct such. */
2779 /* call low level op generator */
2780 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2781 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2789 /* generate a floating point operation with constant propagation */
2790 static void gen_opif(int op
)
2794 #if defined _MSC_VER && defined __x86_64__
2795 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2802 /* currently, we cannot do computations with forward symbols */
2803 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2804 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2806 if (v1
->type
.t
== VT_FLOAT
) {
2809 } else if (v1
->type
.t
== VT_DOUBLE
) {
2817 /* NOTE: we only do constant propagation if finite number (not
2818 NaN or infinity) (ANSI spec) */
2819 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2823 case '+': f1
+= f2
; break;
2824 case '-': f1
-= f2
; break;
2825 case '*': f1
*= f2
; break;
2828 /* If not in initializer we need to potentially generate
2829 FP exceptions at runtime, otherwise we want to fold. */
2835 /* XXX: also handles tests ? */
2839 /* XXX: overflow test ? */
2840 if (v1
->type
.t
== VT_FLOAT
) {
2842 } else if (v1
->type
.t
== VT_DOUBLE
) {
2854 /* print a type. If 'varstr' is not NULL, then the variable is also
2855 printed in the type */
2857 /* XXX: add array and function pointers */
2858 static void type_to_str(char *buf
, int buf_size
,
2859 CType
*type
, const char *varstr
)
2871 pstrcat(buf
, buf_size
, "extern ");
2873 pstrcat(buf
, buf_size
, "static ");
2875 pstrcat(buf
, buf_size
, "typedef ");
2877 pstrcat(buf
, buf_size
, "inline ");
2878 if (t
& VT_VOLATILE
)
2879 pstrcat(buf
, buf_size
, "volatile ");
2880 if (t
& VT_CONSTANT
)
2881 pstrcat(buf
, buf_size
, "const ");
2883 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2884 || ((t
& VT_UNSIGNED
)
2885 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2888 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2890 buf_size
-= strlen(buf
);
2926 tstr
= "long double";
2928 pstrcat(buf
, buf_size
, tstr
);
2935 pstrcat(buf
, buf_size
, tstr
);
2936 v
= type
->ref
->v
& ~SYM_STRUCT
;
2937 if (v
>= SYM_FIRST_ANOM
)
2938 pstrcat(buf
, buf_size
, "<anonymous>");
2940 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2945 if (varstr
&& '*' == *varstr
) {
2946 pstrcat(buf1
, sizeof(buf1
), "(");
2947 pstrcat(buf1
, sizeof(buf1
), varstr
);
2948 pstrcat(buf1
, sizeof(buf1
), ")");
2950 pstrcat(buf1
, buf_size
, "(");
2952 while (sa
!= NULL
) {
2954 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2955 pstrcat(buf1
, sizeof(buf1
), buf2
);
2958 pstrcat(buf1
, sizeof(buf1
), ", ");
2960 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2961 pstrcat(buf1
, sizeof(buf1
), ", ...");
2962 pstrcat(buf1
, sizeof(buf1
), ")");
2963 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2968 if (varstr
&& '*' == *varstr
)
2969 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2971 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2972 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2975 pstrcpy(buf1
, sizeof(buf1
), "*");
2976 if (t
& VT_CONSTANT
)
2977 pstrcat(buf1
, buf_size
, "const ");
2978 if (t
& VT_VOLATILE
)
2979 pstrcat(buf1
, buf_size
, "volatile ");
2981 pstrcat(buf1
, sizeof(buf1
), varstr
);
2982 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2986 pstrcat(buf
, buf_size
, " ");
2987 pstrcat(buf
, buf_size
, varstr
);
2992 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2994 char buf1
[256], buf2
[256];
2995 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2996 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2997 tcc_error(fmt
, buf1
, buf2
);
3000 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
3002 char buf1
[256], buf2
[256];
3003 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3004 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3005 tcc_warning(fmt
, buf1
, buf2
);
3008 static int pointed_size(CType
*type
)
3011 return type_size(pointed_type(type
), &align
);
3014 static void vla_runtime_pointed_size(CType
*type
)
3017 vla_runtime_type_size(pointed_type(type
), &align
);
3020 static inline int is_null_pointer(SValue
*p
)
3022 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
3024 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
3025 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
3026 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
3027 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
3028 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
3029 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3033 /* compare function types. OLD functions match any new functions */
3034 static int is_compatible_func(CType
*type1
, CType
*type2
)
3040 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3042 if (s1
->f
.func_type
!= s2
->f
.func_type
3043 && s1
->f
.func_type
!= FUNC_OLD
3044 && s2
->f
.func_type
!= FUNC_OLD
)
3047 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3049 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
3060 /* return true if type1 and type2 are the same. If unqualified is
3061 true, qualifiers on the types are ignored.
3063 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3067 t1
= type1
->t
& VT_TYPE
;
3068 t2
= type2
->t
& VT_TYPE
;
3070 /* strip qualifiers before comparing */
3071 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3072 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3075 /* Default Vs explicit signedness only matters for char */
3076 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3080 /* XXX: bitfields ? */
3085 && !(type1
->ref
->c
< 0
3086 || type2
->ref
->c
< 0
3087 || type1
->ref
->c
== type2
->ref
->c
))
3090 /* test more complicated cases */
3091 bt1
= t1
& VT_BTYPE
;
3092 if (bt1
== VT_PTR
) {
3093 type1
= pointed_type(type1
);
3094 type2
= pointed_type(type2
);
3095 return is_compatible_types(type1
, type2
);
3096 } else if (bt1
== VT_STRUCT
) {
3097 return (type1
->ref
== type2
->ref
);
3098 } else if (bt1
== VT_FUNC
) {
3099 return is_compatible_func(type1
, type2
);
3100 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3101 /* If both are enums then they must be the same, if only one is then
3102 t1 and t2 must be equal, which was checked above already. */
3103 return type1
->ref
== type2
->ref
;
3109 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3110 type is stored in DEST if non-null (except for pointer plus/minus) . */
3111 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3113 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3114 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3120 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3121 ret
= op
== '?' ? 1 : 0;
3122 /* NOTE: as an extension, we accept void on only one side */
3124 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3125 if (op
== '+') ; /* Handled in caller */
3126 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3127 /* If one is a null ptr constant the result type is the other. */
3128 else if (is_null_pointer (op2
)) type
= *type1
;
3129 else if (is_null_pointer (op1
)) type
= *type2
;
3130 else if (bt1
!= bt2
) {
3131 /* accept comparison or cond-expr between pointer and integer
3133 if ((op
== '?' || TOK_ISCOND(op
))
3134 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3135 tcc_warning("pointer/integer mismatch in %s",
3136 op
== '?' ? "conditional expression" : "comparison");
3137 else if (op
!= '-' || !is_integer_btype(bt2
))
3139 type
= *(bt1
== VT_PTR
? type1
: type2
);
3141 CType
*pt1
= pointed_type(type1
);
3142 CType
*pt2
= pointed_type(type2
);
3143 int pbt1
= pt1
->t
& VT_BTYPE
;
3144 int pbt2
= pt2
->t
& VT_BTYPE
;
3145 int newquals
, copied
= 0;
3146 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3147 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3148 if (op
!= '?' && !TOK_ISCOND(op
))
3151 type_incompatibility_warning(type1
, type2
,
3153 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3154 : "pointer type mismatch in comparison('%s' and '%s')");
3157 /* pointers to void get preferred, otherwise the
3158 pointed to types minus qualifs should be compatible */
3159 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3160 /* combine qualifs */
3161 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3162 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3165 /* copy the pointer target symbol */
3166 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3169 pointed_type(&type
)->t
|= newquals
;
3171 /* pointers to incomplete arrays get converted to
3172 pointers to completed ones if possible */
3173 if (pt1
->t
& VT_ARRAY
3174 && pt2
->t
& VT_ARRAY
3175 && pointed_type(&type
)->ref
->c
< 0
3176 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3179 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3181 pointed_type(&type
)->ref
=
3182 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3183 0, pointed_type(&type
)->ref
->c
);
3184 pointed_type(&type
)->ref
->c
=
3185 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3191 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3192 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3195 } else if (is_float(bt1
) || is_float(bt2
)) {
3196 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3197 type
.t
= VT_LDOUBLE
;
3198 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3203 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3204 /* cast to biggest op */
3205 type
.t
= VT_LLONG
| VT_LONG
;
3206 if (bt1
== VT_LLONG
)
3208 if (bt2
== VT_LLONG
)
3210 /* convert to unsigned if it does not fit in a long long */
3211 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3212 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3213 type
.t
|= VT_UNSIGNED
;
3215 /* integer operations */
3216 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3217 /* convert to unsigned if it does not fit in an integer */
3218 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3219 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3220 type
.t
|= VT_UNSIGNED
;
3227 /* generic gen_op: handles types problems */
3228 ST_FUNC
void gen_op(int op
)
3230 int u
, t1
, t2
, bt1
, bt2
, t
;
3231 CType type1
, combtype
;
3234 t1
= vtop
[-1].type
.t
;
3235 t2
= vtop
[0].type
.t
;
3236 bt1
= t1
& VT_BTYPE
;
3237 bt2
= t2
& VT_BTYPE
;
3239 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3240 if (bt2
== VT_FUNC
) {
3241 mk_pointer(&vtop
->type
);
3244 if (bt1
== VT_FUNC
) {
3246 mk_pointer(&vtop
->type
);
3251 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3252 tcc_error_noabort("invalid operand types for binary operation");
3254 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3255 /* at least one operand is a pointer */
3256 /* relational op: must be both pointers */
3259 /* if both pointers, then it must be the '-' op */
3260 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3262 tcc_error("cannot use pointers here");
3263 if (vtop
[-1].type
.t
& VT_VLA
) {
3264 vla_runtime_pointed_size(&vtop
[-1].type
);
3266 vpushi(pointed_size(&vtop
[-1].type
));
3270 vtop
->type
.t
= VT_PTRDIFF_T
;
3274 /* exactly one pointer : must be '+' or '-'. */
3275 if (op
!= '-' && op
!= '+')
3276 tcc_error("cannot use pointers here");
3277 /* Put pointer as first operand */
3278 if (bt2
== VT_PTR
) {
3280 t
= t1
, t1
= t2
, t2
= t
;
3283 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3284 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3287 type1
= vtop
[-1].type
;
3288 if (vtop
[-1].type
.t
& VT_VLA
)
3289 vla_runtime_pointed_size(&vtop
[-1].type
);
3291 u
= pointed_size(&vtop
[-1].type
);
3293 tcc_error("unknown array element size");
3297 /* XXX: cast to int ? (long long case) */
3302 #ifdef CONFIG_TCC_BCHECK
3303 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3304 /* if bounded pointers, we generate a special code to
3311 gen_bounded_ptr_add();
3317 type1
.t
&= ~VT_ARRAY
;
3318 /* put again type if gen_opic() swaped operands */
3322 /* floats can only be used for a few operations */
3323 if (is_float(combtype
.t
)
3324 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3326 tcc_error("invalid operands for binary operation");
3327 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3328 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3329 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3331 t
|= (VT_LONG
& t1
);
3335 t
= t2
= combtype
.t
;
3336 /* XXX: currently, some unsigned operations are explicit, so
3337 we modify them here */
3338 if (t
& VT_UNSIGNED
) {
3345 else if (op
== TOK_LT
)
3347 else if (op
== TOK_GT
)
3349 else if (op
== TOK_LE
)
3351 else if (op
== TOK_GE
)
3357 /* special case for shifts and long long: we keep the shift as
3359 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3366 if (TOK_ISCOND(op
)) {
3367 /* relational op: the result is an int */
3368 vtop
->type
.t
= VT_INT
;
3373 // Make sure that we have converted to an rvalue:
3374 if (vtop
->r
& VT_LVAL
)
3375 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3378 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3379 #define gen_cvt_itof1 gen_cvt_itof
3381 /* generic itof for unsigned long long case */
3382 static void gen_cvt_itof1(int t
)
3384 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3385 (VT_LLONG
| VT_UNSIGNED
)) {
3388 vpush_helper_func(TOK___floatundisf
);
3389 #if LDOUBLE_SIZE != 8
3390 else if (t
== VT_LDOUBLE
)
3391 vpush_helper_func(TOK___floatundixf
);
3394 vpush_helper_func(TOK___floatundidf
);
3405 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3406 #define gen_cvt_ftoi1 gen_cvt_ftoi
3408 /* generic ftoi for unsigned long long case */
3409 static void gen_cvt_ftoi1(int t
)
3412 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3413 /* not handled natively */
3414 st
= vtop
->type
.t
& VT_BTYPE
;
3416 vpush_helper_func(TOK___fixunssfdi
);
3417 #if LDOUBLE_SIZE != 8
3418 else if (st
== VT_LDOUBLE
)
3419 vpush_helper_func(TOK___fixunsxfdi
);
3422 vpush_helper_func(TOK___fixunsdfdi
);
3433 /* special delayed cast for char/short */
3434 static void force_charshort_cast(void)
3436 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3437 int dbt
= vtop
->type
.t
;
3438 vtop
->r
&= ~VT_MUSTCAST
;
3440 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3444 static void gen_cast_s(int t
)
3452 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3453 static void gen_cast(CType
*type
)
3455 int sbt
, dbt
, sf
, df
, c
;
3456 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3458 /* special delayed cast for char/short */
3459 if (vtop
->r
& VT_MUSTCAST
)
3460 force_charshort_cast();
3462 /* bitfields first get cast to ints */
3463 if (vtop
->type
.t
& VT_BITFIELD
)
3466 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3467 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3475 dbt_bt
= dbt
& VT_BTYPE
;
3476 sbt_bt
= sbt
& VT_BTYPE
;
3478 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3479 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3480 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3483 /* constant case: we can do it now */
3484 /* XXX: in ISOC, cannot do it if error in convert */
3485 if (sbt
== VT_FLOAT
)
3486 vtop
->c
.ld
= vtop
->c
.f
;
3487 else if (sbt
== VT_DOUBLE
)
3488 vtop
->c
.ld
= vtop
->c
.d
;
3491 if (sbt_bt
== VT_LLONG
) {
3492 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3493 vtop
->c
.ld
= vtop
->c
.i
;
3495 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3497 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3498 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3500 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3503 if (dbt
== VT_FLOAT
)
3504 vtop
->c
.f
= (float)vtop
->c
.ld
;
3505 else if (dbt
== VT_DOUBLE
)
3506 vtop
->c
.d
= (double)vtop
->c
.ld
;
3507 } else if (sf
&& dbt
== VT_BOOL
) {
3508 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3511 vtop
->c
.i
= vtop
->c
.ld
;
3512 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3514 else if (sbt
& VT_UNSIGNED
)
3515 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3517 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3519 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3521 else if (dbt
== VT_BOOL
)
3522 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3524 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3525 dbt_bt
== VT_SHORT
? 0xffff :
3528 if (!(dbt
& VT_UNSIGNED
))
3529 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3534 } else if (dbt
== VT_BOOL
3535 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3536 == (VT_CONST
| VT_SYM
)) {
3537 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3543 /* cannot generate code for global or static initializers */
3544 if (STATIC_DATA_WANTED
)
3547 /* non constant case: generate code */
3548 if (dbt
== VT_BOOL
) {
3549 gen_test_zero(TOK_NE
);
3555 /* convert from fp to fp */
3558 /* convert int to fp */
3561 /* convert fp to int */
3563 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3566 goto again
; /* may need char/short cast */
3571 ds
= btype_size(dbt_bt
);
3572 ss
= btype_size(sbt_bt
);
3573 if (ds
== 0 || ss
== 0) {
3574 if (dbt_bt
== VT_VOID
)
3576 cast_error(&vtop
->type
, type
);
3578 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3579 tcc_error("cast to incomplete type");
3581 /* same size and no sign conversion needed */
3582 if (ds
== ss
&& ds
>= 4)
3584 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3585 tcc_warning("cast between pointer and integer of different size");
3586 if (sbt_bt
== VT_PTR
) {
3587 /* put integer type to allow logical operations below */
3588 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3592 /* processor allows { int a = 0, b = *(char*)&a; }
3593 That means that if we cast to less width, we can just
3594 change the type and read it still later. */
3595 #define ALLOW_SUBTYPE_ACCESS 1
3597 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3598 /* value still in memory */
3602 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3604 goto done
; /* no 64bit envolved */
3612 /* generate high word */
3613 if (sbt
& VT_UNSIGNED
) {
3622 } else if (ss
== 8) {
3623 /* from long long: just take low order word */
3631 /* need to convert from 32bit to 64bit */
3632 if (sbt
& VT_UNSIGNED
) {
3633 #if defined(TCC_TARGET_RISCV64)
3634 /* RISC-V keeps 32bit vals in registers sign-extended.
3635 So here we need a zero-extension. */
3644 ss
= ds
, ds
= 4, dbt
= sbt
;
3645 } else if (ss
== 8) {
3646 /* RISC-V keeps 32bit vals in registers sign-extended.
3647 So here we need a sign-extension for signed types and
3648 zero-extension. for unsigned types. */
3649 #if !defined(TCC_TARGET_RISCV64)
3650 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3659 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3665 bits
= (ss
- ds
) * 8;
3666 /* for unsigned, gen_op will convert SAR to SHR */
3667 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3670 vpushi(bits
- trunc
);
3677 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3680 /* return type size as known at compile time. Put alignment at 'a' */
3681 ST_FUNC
int type_size(CType
*type
, int *a
)
3686 bt
= type
->t
& VT_BTYPE
;
3687 if (bt
== VT_STRUCT
) {
3692 } else if (bt
== VT_PTR
) {
3693 if (type
->t
& VT_ARRAY
) {
3697 ts
= type_size(&s
->type
, a
);
3699 if (ts
< 0 && s
->c
< 0)
3707 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3708 return -1; /* incomplete enum */
3709 } else if (bt
== VT_LDOUBLE
) {
3711 return LDOUBLE_SIZE
;
3712 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3713 #ifdef TCC_TARGET_I386
3714 #ifdef TCC_TARGET_PE
3719 #elif defined(TCC_TARGET_ARM)
3729 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3732 } else if (bt
== VT_SHORT
) {
3735 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3739 /* char, void, function, _Bool */
3745 /* push type size as known at runtime time on top of value stack. Put
3747 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3749 if (type
->t
& VT_VLA
) {
3750 type_size(&type
->ref
->type
, a
);
3751 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3753 vpushi(type_size(type
, a
));
3757 /* return the pointed type of t */
3758 static inline CType
*pointed_type(CType
*type
)
3760 return &type
->ref
->type
;
3763 /* modify type so that its it is a pointer to type. */
3764 ST_FUNC
void mk_pointer(CType
*type
)
3767 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3768 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3772 /* return true if type1 and type2 are exactly the same (including
3775 static int is_compatible_types(CType
*type1
, CType
*type2
)
3777 return compare_types(type1
,type2
,0);
3780 /* return true if type1 and type2 are the same (ignoring qualifiers).
3782 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3784 return compare_types(type1
,type2
,1);
3787 static void cast_error(CType
*st
, CType
*dt
)
3789 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3792 /* verify type compatibility to store vtop in 'dt' type */
3793 static void verify_assign_cast(CType
*dt
)
3795 CType
*st
, *type1
, *type2
;
3796 int dbt
, sbt
, qualwarn
, lvl
;
3798 st
= &vtop
->type
; /* source type */
3799 dbt
= dt
->t
& VT_BTYPE
;
3800 sbt
= st
->t
& VT_BTYPE
;
3801 if (dt
->t
& VT_CONSTANT
)
3802 tcc_warning("assignment of read-only location");
3806 tcc_error("assignment to void expression");
3809 /* special cases for pointers */
3810 /* '0' can also be a pointer */
3811 if (is_null_pointer(vtop
))
3813 /* accept implicit pointer to integer cast with warning */
3814 if (is_integer_btype(sbt
)) {
3815 tcc_warning("assignment makes pointer from integer without a cast");
3818 type1
= pointed_type(dt
);
3820 type2
= pointed_type(st
);
3821 else if (sbt
== VT_FUNC
)
3822 type2
= st
; /* a function is implicitly a function pointer */
3825 if (is_compatible_types(type1
, type2
))
3827 for (qualwarn
= lvl
= 0;; ++lvl
) {
3828 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3829 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3831 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3832 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3833 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3835 type1
= pointed_type(type1
);
3836 type2
= pointed_type(type2
);
3838 if (!is_compatible_unqualified_types(type1
, type2
)) {
3839 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3840 /* void * can match anything */
3841 } else if (dbt
== sbt
3842 && is_integer_btype(sbt
& VT_BTYPE
)
3843 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3844 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3845 /* Like GCC don't warn by default for merely changes
3846 in pointer target signedness. Do warn for different
3847 base types, though, in particular for unsigned enums
3848 and signed int targets. */
3850 tcc_warning("assignment from incompatible pointer type");
3855 tcc_warning("assignment discards qualifiers from pointer target type");
3861 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3862 tcc_warning("assignment makes integer from pointer without a cast");
3863 } else if (sbt
== VT_STRUCT
) {
3864 goto case_VT_STRUCT
;
3866 /* XXX: more tests */
3870 if (!is_compatible_unqualified_types(dt
, st
)) {
3878 static void gen_assign_cast(CType
*dt
)
3880 verify_assign_cast(dt
);
3884 /* store vtop in lvalue pushed on stack */
3885 ST_FUNC
void vstore(void)
3887 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3889 ft
= vtop
[-1].type
.t
;
3890 sbt
= vtop
->type
.t
& VT_BTYPE
;
3891 dbt
= ft
& VT_BTYPE
;
3893 verify_assign_cast(&vtop
[-1].type
);
3895 if (sbt
== VT_STRUCT
) {
3896 /* if structure, only generate pointer */
3897 /* structure assignment : generate memcpy */
3898 /* XXX: optimize if small size */
3899 size
= type_size(&vtop
->type
, &align
);
3903 #ifdef CONFIG_TCC_BCHECK
3904 if (vtop
->r
& VT_MUSTBOUND
)
3905 gbound(); /* check would be wrong after gaddrof() */
3907 vtop
->type
.t
= VT_PTR
;
3910 /* address of memcpy() */
3913 vpush_helper_func(TOK_memmove8
);
3914 else if(!(align
& 3))
3915 vpush_helper_func(TOK_memmove4
);
3918 /* Use memmove, rather than memcpy, as dest and src may be same: */
3919 vpush_helper_func(TOK_memmove
);
3924 #ifdef CONFIG_TCC_BCHECK
3925 if (vtop
->r
& VT_MUSTBOUND
)
3928 vtop
->type
.t
= VT_PTR
;
3933 /* leave source on stack */
3935 } else if (ft
& VT_BITFIELD
) {
3936 /* bitfield store handling */
3938 /* save lvalue as expression result (example: s.b = s.a = n;) */
3939 vdup(), vtop
[-1] = vtop
[-2];
3941 bit_pos
= BIT_POS(ft
);
3942 bit_size
= BIT_SIZE(ft
);
3943 /* remove bit field info to avoid loops */
3944 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3946 if (dbt
== VT_BOOL
) {
3947 gen_cast(&vtop
[-1].type
);
3948 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3950 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3951 if (dbt
!= VT_BOOL
) {
3952 gen_cast(&vtop
[-1].type
);
3953 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3955 if (r
== VT_STRUCT
) {
3956 store_packed_bf(bit_pos
, bit_size
);
3958 unsigned long long mask
= (1ULL << bit_size
) - 1;
3959 if (dbt
!= VT_BOOL
) {
3961 if (dbt
== VT_LLONG
)
3964 vpushi((unsigned)mask
);
3971 /* duplicate destination */
3974 /* load destination, mask and or with source */
3975 if (dbt
== VT_LLONG
)
3976 vpushll(~(mask
<< bit_pos
));
3978 vpushi(~((unsigned)mask
<< bit_pos
));
3983 /* ... and discard */
3986 } else if (dbt
== VT_VOID
) {
3989 /* optimize char/short casts */
3991 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3992 && is_integer_btype(sbt
)
3994 if ((vtop
->r
& VT_MUSTCAST
)
3995 && btype_size(dbt
) > btype_size(sbt
)
3997 force_charshort_cast();
4000 gen_cast(&vtop
[-1].type
);
4003 #ifdef CONFIG_TCC_BCHECK
4004 /* bound check case */
4005 if (vtop
[-1].r
& VT_MUSTBOUND
) {
4011 gv(RC_TYPE(dbt
)); /* generate value */
4014 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
4015 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4016 vtop
->type
.t
= ft
& VT_TYPE
;
4019 /* if lvalue was saved on stack, must read it */
4020 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
4022 r
= get_reg(RC_INT
);
4023 sv
.type
.t
= VT_PTRDIFF_T
;
4024 sv
.r
= VT_LOCAL
| VT_LVAL
;
4025 sv
.c
.i
= vtop
[-1].c
.i
;
4027 vtop
[-1].r
= r
| VT_LVAL
;
4030 r
= vtop
->r
& VT_VALMASK
;
4031 /* two word case handling :
4032 store second register at word + 4 (or +8 for x86-64) */
4033 if (USING_TWO_WORDS(dbt
)) {
4034 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
4035 vtop
[-1].type
.t
= load_type
;
4038 /* convert to int to increment easily */
4039 vtop
->type
.t
= VT_PTRDIFF_T
;
4045 vtop
[-1].type
.t
= load_type
;
4046 /* XXX: it works because r2 is spilled last ! */
4047 store(vtop
->r2
, vtop
- 1);
4053 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
4057 /* post defines POST/PRE add. c is the token ++ or -- */
4058 ST_FUNC
void inc(int post
, int c
)
4061 vdup(); /* save lvalue */
4063 gv_dup(); /* duplicate value */
4068 vpushi(c
- TOK_MID
);
4070 vstore(); /* store value */
4072 vpop(); /* if post op, return saved value */
4075 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
4077 /* read the string */
4081 while (tok
== TOK_STR
) {
4082 /* XXX: add \0 handling too ? */
4083 cstr_cat(astr
, tokc
.str
.data
, -1);
4086 cstr_ccat(astr
, '\0');
4089 /* If I is >= 1 and a power of two, returns log2(i)+1.
4090 If I is 0 returns 0. */
4091 ST_FUNC
int exact_log2p1(int i
)
4096 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4107 /* Parse __attribute__((...)) GNUC extension. */
4108 static void parse_attribute(AttributeDef
*ad
)
4114 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4119 while (tok
!= ')') {
4120 if (tok
< TOK_IDENT
)
4121 expect("attribute name");
4133 tcc_warning("implicit declaration of function '%s'",
4134 get_tok_str(tok
, &tokc
));
4135 s
= external_global_sym(tok
, &func_old_type
);
4136 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4137 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4138 ad
->cleanup_func
= s
;
4143 case TOK_CONSTRUCTOR1
:
4144 case TOK_CONSTRUCTOR2
:
4145 ad
->f
.func_ctor
= 1;
4147 case TOK_DESTRUCTOR1
:
4148 case TOK_DESTRUCTOR2
:
4149 ad
->f
.func_dtor
= 1;
4151 case TOK_ALWAYS_INLINE1
:
4152 case TOK_ALWAYS_INLINE2
:
4153 ad
->f
.func_alwinl
= 1;
4158 parse_mult_str(&astr
, "section name");
4159 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4166 parse_mult_str(&astr
, "alias(\"target\")");
4167 ad
->alias_target
= /* save string as token, for later */
4168 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
4172 case TOK_VISIBILITY1
:
4173 case TOK_VISIBILITY2
:
4175 parse_mult_str(&astr
,
4176 "visibility(\"default|hidden|internal|protected\")");
4177 if (!strcmp (astr
.data
, "default"))
4178 ad
->a
.visibility
= STV_DEFAULT
;
4179 else if (!strcmp (astr
.data
, "hidden"))
4180 ad
->a
.visibility
= STV_HIDDEN
;
4181 else if (!strcmp (astr
.data
, "internal"))
4182 ad
->a
.visibility
= STV_INTERNAL
;
4183 else if (!strcmp (astr
.data
, "protected"))
4184 ad
->a
.visibility
= STV_PROTECTED
;
4186 expect("visibility(\"default|hidden|internal|protected\")");
4195 if (n
<= 0 || (n
& (n
- 1)) != 0)
4196 tcc_error("alignment must be a positive power of two");
4201 ad
->a
.aligned
= exact_log2p1(n
);
4202 if (n
!= 1 << (ad
->a
.aligned
- 1))
4203 tcc_error("alignment of %d is larger than implemented", n
);
4215 /* currently, no need to handle it because tcc does not
4216 track unused objects */
4220 ad
->f
.func_noreturn
= 1;
4225 ad
->f
.func_call
= FUNC_CDECL
;
4230 ad
->f
.func_call
= FUNC_STDCALL
;
4232 #ifdef TCC_TARGET_I386
4242 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4248 ad
->f
.func_call
= FUNC_FASTCALLW
;
4255 ad
->attr_mode
= VT_LLONG
+ 1;
4258 ad
->attr_mode
= VT_BYTE
+ 1;
4261 ad
->attr_mode
= VT_SHORT
+ 1;
4265 ad
->attr_mode
= VT_INT
+ 1;
4268 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4275 ad
->a
.dllexport
= 1;
4277 case TOK_NODECORATE
:
4278 ad
->a
.nodecorate
= 1;
4281 ad
->a
.dllimport
= 1;
4284 if (tcc_state
->warn_unsupported
)
4285 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
4286 /* skip parameters */
4288 int parenthesis
= 0;
4292 else if (tok
== ')')
4295 } while (parenthesis
&& tok
!= -1);
4308 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4312 while ((s
= s
->next
) != NULL
) {
4313 if ((s
->v
& SYM_FIELD
) &&
4314 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4315 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4316 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4328 static void check_fields (CType
*type
, int check
)
4332 while ((s
= s
->next
) != NULL
) {
4333 int v
= s
->v
& ~SYM_FIELD
;
4334 if (v
< SYM_FIRST_ANOM
) {
4335 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4336 if (check
&& (ts
->tok
& SYM_FIELD
))
4337 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4338 ts
->tok
^= SYM_FIELD
;
4339 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4340 check_fields (&s
->type
, check
);
4344 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4346 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4347 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4348 int pcc
= !tcc_state
->ms_bitfields
;
4349 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4356 prevbt
= VT_STRUCT
; /* make it never match */
4361 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4362 if (f
->type
.t
& VT_BITFIELD
)
4363 bit_size
= BIT_SIZE(f
->type
.t
);
4366 size
= type_size(&f
->type
, &align
);
4367 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4370 if (pcc
&& bit_size
== 0) {
4371 /* in pcc mode, packing does not affect zero-width bitfields */
4374 /* in pcc mode, attribute packed overrides if set. */
4375 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4378 /* pragma pack overrides align if lesser and packs bitfields always */
4381 if (pragma_pack
< align
)
4382 align
= pragma_pack
;
4383 /* in pcc mode pragma pack also overrides individual align */
4384 if (pcc
&& pragma_pack
< a
)
4388 /* some individual align was specified */
4392 if (type
->ref
->type
.t
== VT_UNION
) {
4393 if (pcc
&& bit_size
>= 0)
4394 size
= (bit_size
+ 7) >> 3;
4399 } else if (bit_size
< 0) {
4401 c
+= (bit_pos
+ 7) >> 3;
4402 c
= (c
+ align
- 1) & -align
;
4411 /* A bit-field. Layout is more complicated. There are two
4412 options: PCC (GCC) compatible and MS compatible */
4414 /* In PCC layout a bit-field is placed adjacent to the
4415 preceding bit-fields, except if:
4417 - an individual alignment was given
4418 - it would overflow its base type container and
4419 there is no packing */
4420 if (bit_size
== 0) {
4422 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4424 } else if (f
->a
.aligned
) {
4426 } else if (!packed
) {
4428 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4429 if (ofs
> size
/ align
)
4433 /* in pcc mode, long long bitfields have type int if they fit */
4434 if (size
== 8 && bit_size
<= 32)
4435 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4437 while (bit_pos
>= align
* 8)
4438 c
+= align
, bit_pos
-= align
* 8;
4441 /* In PCC layout named bit-fields influence the alignment
4442 of the containing struct using the base types alignment,
4443 except for packed fields (which here have correct align). */
4444 if (f
->v
& SYM_FIRST_ANOM
4445 // && bit_size // ??? gcc on ARM/rpi does that
4450 bt
= f
->type
.t
& VT_BTYPE
;
4451 if ((bit_pos
+ bit_size
> size
* 8)
4452 || (bit_size
> 0) == (bt
!= prevbt
)
4454 c
= (c
+ align
- 1) & -align
;
4457 /* In MS bitfield mode a bit-field run always uses
4458 at least as many bits as the underlying type.
4459 To start a new run it's also required that this
4460 or the last bit-field had non-zero width. */
4461 if (bit_size
|| prev_bit_size
)
4464 /* In MS layout the records alignment is normally
4465 influenced by the field, except for a zero-width
4466 field at the start of a run (but by further zero-width
4467 fields it is again). */
4468 if (bit_size
== 0 && prevbt
!= bt
)
4471 prev_bit_size
= bit_size
;
4474 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4475 | (bit_pos
<< VT_STRUCT_SHIFT
);
4476 bit_pos
+= bit_size
;
4478 if (align
> maxalign
)
4482 printf("set field %s offset %-2d size %-2d align %-2d",
4483 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4484 if (f
->type
.t
& VT_BITFIELD
) {
4485 printf(" pos %-2d bits %-2d",
4498 c
+= (bit_pos
+ 7) >> 3;
4500 /* store size and alignment */
4501 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4505 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4506 /* can happen if individual align for some member was given. In
4507 this case MSVC ignores maxalign when aligning the size */
4512 c
= (c
+ a
- 1) & -a
;
4516 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4519 /* check whether we can access bitfields by their type */
4520 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4524 if (0 == (f
->type
.t
& VT_BITFIELD
))
4528 bit_size
= BIT_SIZE(f
->type
.t
);
4531 bit_pos
= BIT_POS(f
->type
.t
);
4532 size
= type_size(&f
->type
, &align
);
4533 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
4536 /* try to access the field using a different type */
4537 c0
= -1, s
= align
= 1;
4540 px
= f
->c
* 8 + bit_pos
;
4541 cx
= (px
>> 3) & -align
;
4542 px
= px
- (cx
<< 3);
4545 s
= (px
+ bit_size
+ 7) >> 3;
4555 s
= type_size(&t
, &align
);
4559 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
4560 /* update offset and bit position */
4563 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4564 | (bit_pos
<< VT_STRUCT_SHIFT
);
4568 printf("FIX field %s offset %-2d size %-2d align %-2d "
4569 "pos %-2d bits %-2d\n",
4570 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4571 cx
, s
, align
, px
, bit_size
);
4574 /* fall back to load/store single-byte wise */
4575 f
->auxtype
= VT_STRUCT
;
4577 printf("FIX field %s : load byte-wise\n",
4578 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4584 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4585 static void struct_decl(CType
*type
, int u
)
4587 int v
, c
, size
, align
, flexible
;
4588 int bit_size
, bsize
, bt
;
4590 AttributeDef ad
, ad1
;
4593 memset(&ad
, 0, sizeof ad
);
4595 parse_attribute(&ad
);
4599 /* struct already defined ? return it */
4601 expect("struct/union/enum name");
4603 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4606 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4608 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4613 /* Record the original enum/struct/union token. */
4614 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4616 /* we put an undefined size for struct/union */
4617 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4618 s
->r
= 0; /* default alignment is zero as gcc */
4620 type
->t
= s
->type
.t
;
4626 tcc_error("struct/union/enum already defined");
4628 /* cannot be empty */
4629 /* non empty enums are not allowed */
4632 long long ll
= 0, pl
= 0, nl
= 0;
4635 /* enum symbols have static storage */
4636 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4640 expect("identifier");
4642 if (ss
&& !local_stack
)
4643 tcc_error("redefinition of enumerator '%s'",
4644 get_tok_str(v
, NULL
));
4648 ll
= expr_const64();
4650 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4652 *ps
= ss
, ps
= &ss
->next
;
4661 /* NOTE: we accept a trailing comma */
4666 /* set integral type of the enum */
4669 if (pl
!= (unsigned)pl
)
4670 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4672 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4673 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4674 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4676 /* set type for enum members */
4677 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4679 if (ll
== (int)ll
) /* default is int if it fits */
4681 if (t
.t
& VT_UNSIGNED
) {
4682 ss
->type
.t
|= VT_UNSIGNED
;
4683 if (ll
== (unsigned)ll
)
4686 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4687 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4692 while (tok
!= '}') {
4693 if (!parse_btype(&btype
, &ad1
)) {
4699 tcc_error("flexible array member '%s' not at the end of struct",
4700 get_tok_str(v
, NULL
));
4706 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4708 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4709 expect("identifier");
4711 int v
= btype
.ref
->v
;
4712 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4713 if (tcc_state
->ms_extensions
== 0)
4714 expect("identifier");
4718 if (type_size(&type1
, &align
) < 0) {
4719 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4722 tcc_error("field '%s' has incomplete type",
4723 get_tok_str(v
, NULL
));
4725 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4726 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4727 (type1
.t
& VT_STORAGE
))
4728 tcc_error("invalid type for '%s'",
4729 get_tok_str(v
, NULL
));
4733 bit_size
= expr_const();
4734 /* XXX: handle v = 0 case for messages */
4736 tcc_error("negative width in bit-field '%s'",
4737 get_tok_str(v
, NULL
));
4738 if (v
&& bit_size
== 0)
4739 tcc_error("zero width for bit-field '%s'",
4740 get_tok_str(v
, NULL
));
4741 parse_attribute(&ad1
);
4743 size
= type_size(&type1
, &align
);
4744 if (bit_size
>= 0) {
4745 bt
= type1
.t
& VT_BTYPE
;
4751 tcc_error("bitfields must have scalar type");
4753 if (bit_size
> bsize
) {
4754 tcc_error("width of '%s' exceeds its type",
4755 get_tok_str(v
, NULL
));
4756 } else if (bit_size
== bsize
4757 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4758 /* no need for bit fields */
4760 } else if (bit_size
== 64) {
4761 tcc_error("field width 64 not implemented");
4763 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4765 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4768 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4769 /* Remember we've seen a real field to check
4770 for placement of flexible array member. */
4773 /* If member is a struct or bit-field, enforce
4774 placing into the struct (as anonymous). */
4776 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4781 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4786 if (tok
== ';' || tok
== TOK_EOF
)
4793 parse_attribute(&ad
);
4794 if (ad
.cleanup_func
) {
4795 tcc_warning("attribute '__cleanup__' ignored on type");
4797 check_fields(type
, 1);
4798 check_fields(type
, 0);
4799 struct_layout(type
, &ad
);
4804 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4806 merge_symattr(&ad
->a
, &s
->a
);
4807 merge_funcattr(&ad
->f
, &s
->f
);
4810 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4811 are added to the element type, copied because it could be a typedef. */
4812 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4814 while (type
->t
& VT_ARRAY
) {
4815 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4816 type
= &type
->ref
->type
;
4818 type
->t
|= qualifiers
;
4821 /* return 0 if no type declaration. otherwise, return the basic type
4824 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4826 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4830 memset(ad
, 0, sizeof(AttributeDef
));
4840 /* currently, we really ignore extension */
4850 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4851 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4852 tmbt
: tcc_error("too many basic types");
4855 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4860 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4877 memset(&ad1
, 0, sizeof(AttributeDef
));
4878 if (parse_btype(&type1
, &ad1
)) {
4879 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4881 n
= 1 << (ad1
.a
.aligned
- 1);
4883 type_size(&type1
, &n
);
4886 if (n
<= 0 || (n
& (n
- 1)) != 0)
4887 tcc_error("alignment must be a positive power of two");
4890 ad
->a
.aligned
= exact_log2p1(n
);
4894 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4895 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4896 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4897 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4904 #ifdef TCC_TARGET_ARM64
4906 /* GCC's __uint128_t appears in some Linux header files. Make it a
4907 synonym for long double to get the size and alignment right. */
4918 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4919 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4927 struct_decl(&type1
, VT_ENUM
);
4930 type
->ref
= type1
.ref
;
4933 struct_decl(&type1
, VT_STRUCT
);
4936 struct_decl(&type1
, VT_UNION
);
4939 /* type modifiers */
4944 parse_btype_qualify(type
, VT_CONSTANT
);
4952 parse_btype_qualify(type
, VT_VOLATILE
);
4959 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4960 tcc_error("signed and unsigned modifier");
4973 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4974 tcc_error("signed and unsigned modifier");
4975 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4991 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4992 tcc_error("multiple storage classes");
5004 ad
->f
.func_noreturn
= 1;
5006 /* GNUC attribute */
5007 case TOK_ATTRIBUTE1
:
5008 case TOK_ATTRIBUTE2
:
5009 parse_attribute(ad
);
5010 if (ad
->attr_mode
) {
5011 u
= ad
->attr_mode
-1;
5012 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
5020 parse_expr_type(&type1
);
5021 /* remove all storage modifiers except typedef */
5022 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
5024 sym_to_attr(ad
, type1
.ref
);
5030 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
5034 if (tok
== ':' && !in_generic
) {
5035 /* ignore if it's a label */
5040 t
&= ~(VT_BTYPE
|VT_LONG
);
5041 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
5042 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
5043 type
->ref
= s
->type
.ref
;
5045 parse_btype_qualify(type
, t
);
5047 /* get attributes from typedef */
5056 if (tcc_state
->char_is_unsigned
) {
5057 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
5060 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5061 bt
= t
& (VT_BTYPE
|VT_LONG
);
5063 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
5064 #if defined TCC_TARGET_PE || (defined _WIN32 && defined _MSC_VER)
5065 if (bt
== VT_LDOUBLE
)
5066 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
5072 /* convert a function parameter type (array to pointer and function to
5073 function pointer) */
5074 static inline void convert_parameter_type(CType
*pt
)
5076 /* remove const and volatile qualifiers (XXX: const could be used
5077 to indicate a const function parameter */
5078 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5079 /* array must be transformed to pointer according to ANSI C */
5081 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
5086 ST_FUNC
void parse_asm_str(CString
*astr
)
5089 parse_mult_str(astr
, "string constant");
5092 /* Parse an asm label and return the token */
5093 static int asm_label_instr(void)
5099 parse_asm_str(&astr
);
5102 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
5104 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5109 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5111 int n
, l
, t1
, arg_size
, align
, unused_align
;
5112 Sym
**plast
, *s
, *first
;
5117 /* function type, or recursive declarator (return if so) */
5119 if (td
&& !(td
& TYPE_ABSTRACT
))
5123 else if (parse_btype(&pt
, &ad1
))
5126 merge_attr (ad
, &ad1
);
5135 /* read param name and compute offset */
5136 if (l
!= FUNC_OLD
) {
5137 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5139 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
5140 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5141 tcc_error("parameter declared as void");
5145 expect("identifier");
5146 pt
.t
= VT_VOID
; /* invalid type */
5150 convert_parameter_type(&pt
);
5151 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5152 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
5158 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5163 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5164 tcc_error("invalid type");
5167 /* if no parameters, then old type prototype */
5170 /* NOTE: const is ignored in returned type as it has a special
5171 meaning in gcc / C++ */
5172 type
->t
&= ~VT_CONSTANT
;
5173 /* some ancient pre-K&R C allows a function to return an array
5174 and the array brackets to be put after the arguments, such
5175 that "int c()[]" means something like "int[] c()" */
5178 skip(']'); /* only handle simple "[]" */
5181 /* we push a anonymous symbol which will contain the function prototype */
5182 ad
->f
.func_args
= arg_size
;
5183 ad
->f
.func_type
= l
;
5184 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5190 } else if (tok
== '[') {
5191 int saved_nocode_wanted
= nocode_wanted
;
5192 /* array definition */
5195 /* XXX The optional type-quals and static should only be accepted
5196 in parameter decls. The '*' as well, and then even only
5197 in prototypes (not function defs). */
5199 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5214 if (!local_stack
|| (storage
& VT_STATIC
))
5215 vpushi(expr_const());
5217 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5218 length must always be evaluated, even under nocode_wanted,
5219 so that its size slot is initialized (e.g. under sizeof
5224 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5227 tcc_error("invalid array size");
5229 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5230 tcc_error("size of variable length array should be an integer");
5236 /* parse next post type */
5237 post_type(type
, ad
, storage
, 0);
5239 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5240 tcc_error("declaration of an array of functions");
5241 if ((type
->t
& VT_BTYPE
) == VT_VOID
5242 || type_size(type
, &unused_align
) < 0)
5243 tcc_error("declaration of an array of incomplete type elements");
5245 t1
|= type
->t
& VT_VLA
;
5249 tcc_error("need explicit inner array size in VLAs");
5250 loc
-= type_size(&int_type
, &align
);
5254 vla_runtime_type_size(type
, &align
);
5256 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5262 nocode_wanted
= saved_nocode_wanted
;
5264 /* we push an anonymous symbol which will contain the array
5266 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5267 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5273 /* Parse a type declarator (except basic type), and return the type
5274 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5275 expected. 'type' should contain the basic type. 'ad' is the
5276 attribute definition of the basic type. It can be modified by
5277 type_decl(). If this (possibly abstract) declarator is a pointer chain
5278 it returns the innermost pointed to type (equals *type, but is a different
5279 pointer), otherwise returns type itself, that's used for recursive calls. */
5280 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5283 int qualifiers
, storage
;
5285 /* recursive type, remove storage bits first, apply them later again */
5286 storage
= type
->t
& VT_STORAGE
;
5287 type
->t
&= ~VT_STORAGE
;
5290 while (tok
== '*') {
5298 qualifiers
|= VT_CONSTANT
;
5303 qualifiers
|= VT_VOLATILE
;
5309 /* XXX: clarify attribute handling */
5310 case TOK_ATTRIBUTE1
:
5311 case TOK_ATTRIBUTE2
:
5312 parse_attribute(ad
);
5316 type
->t
|= qualifiers
;
5318 /* innermost pointed to type is the one for the first derivation */
5319 ret
= pointed_type(type
);
5323 /* This is possibly a parameter type list for abstract declarators
5324 ('int ()'), use post_type for testing this. */
5325 if (!post_type(type
, ad
, 0, td
)) {
5326 /* It's not, so it's a nested declarator, and the post operations
5327 apply to the innermost pointed to type (if any). */
5328 /* XXX: this is not correct to modify 'ad' at this point, but
5329 the syntax is not clear */
5330 parse_attribute(ad
);
5331 post
= type_decl(type
, ad
, v
, td
);
5335 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5336 /* type identifier */
5341 if (!(td
& TYPE_ABSTRACT
))
5342 expect("identifier");
5345 post_type(post
, ad
, storage
, 0);
5346 parse_attribute(ad
);
5351 /* indirection with full error checking and bound check */
5352 ST_FUNC
void indir(void)
5354 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5355 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5359 if (vtop
->r
& VT_LVAL
)
5361 vtop
->type
= *pointed_type(&vtop
->type
);
5362 /* Arrays and functions are never lvalues */
5363 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5364 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5366 /* if bound checking, the referenced pointer must be checked */
5367 #ifdef CONFIG_TCC_BCHECK
5368 if (tcc_state
->do_bounds_check
)
5369 vtop
->r
|= VT_MUSTBOUND
;
5374 /* pass a parameter to a function and do type checking and casting */
5375 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5380 func_type
= func
->f
.func_type
;
5381 if (func_type
== FUNC_OLD
||
5382 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5383 /* default casting : only need to convert float to double */
5384 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5385 gen_cast_s(VT_DOUBLE
);
5386 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5387 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5388 type
.ref
= vtop
->type
.ref
;
5390 } else if (vtop
->r
& VT_MUSTCAST
) {
5391 force_charshort_cast();
5393 } else if (arg
== NULL
) {
5394 tcc_error("too many arguments to function");
5397 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5398 gen_assign_cast(&type
);
5402 /* parse an expression and return its type without any side effect. */
5403 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5412 /* parse an expression of the form '(type)' or '(expr)' and return its
5414 static void parse_expr_type(CType
*type
)
5420 if (parse_btype(type
, &ad
)) {
5421 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5423 expr_type(type
, gexpr
);
5428 static void parse_type(CType
*type
)
5433 if (!parse_btype(type
, &ad
)) {
5436 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5439 static void parse_builtin_params(int nc
, const char *args
)
5448 while ((c
= *args
++)) {
5463 type
.t
= VT_CONSTANT
;
5469 type
.t
= VT_CONSTANT
;
5471 type
.t
|= char_type
.t
;
5483 gen_assign_cast(&type
);
5490 ST_FUNC
void unary(void)
5492 int n
, t
, align
, size
, r
, sizeof_caller
;
5497 /* generate line number info */
5498 if (tcc_state
->do_debug
)
5499 tcc_debug_line(tcc_state
);
5501 sizeof_caller
= in_sizeof
;
5504 /* XXX: GCC 2.95.3 does not generate a table although it should be
5512 #ifdef TCC_TARGET_PE
5513 t
= VT_SHORT
|VT_UNSIGNED
;
5521 vsetc(&type
, VT_CONST
, &tokc
);
5525 t
= VT_INT
| VT_UNSIGNED
;
5531 t
= VT_LLONG
| VT_UNSIGNED
;
5543 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5546 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5548 case TOK___FUNCTION__
:
5550 goto tok_identifier
;
5556 /* special function name identifier */
5557 len
= strlen(funcname
) + 1;
5558 /* generate char[len] type */
5563 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5564 if (!NODATA_WANTED
) {
5565 ptr
= section_ptr_add(data_section
, len
);
5566 memcpy(ptr
, funcname
, len
);
5572 #ifdef TCC_TARGET_PE
5573 t
= VT_SHORT
| VT_UNSIGNED
;
5579 /* string parsing */
5581 if (tcc_state
->char_is_unsigned
)
5582 t
= VT_BYTE
| VT_UNSIGNED
;
5584 if (tcc_state
->warn_write_strings
)
5589 memset(&ad
, 0, sizeof(AttributeDef
));
5590 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5595 if (parse_btype(&type
, &ad
)) {
5596 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5598 /* check ISOC99 compound literal */
5600 /* data is allocated locally by default */
5605 /* all except arrays are lvalues */
5606 if (!(type
.t
& VT_ARRAY
))
5608 memset(&ad
, 0, sizeof(AttributeDef
));
5609 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5611 if (sizeof_caller
) {
5618 } else if (tok
== '{') {
5619 int saved_nocode_wanted
= nocode_wanted
;
5620 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5622 if (0 == local_scope
)
5623 tcc_error("statement expression outside of function");
5624 /* save all registers */
5626 /* statement expression : we do not accept break/continue
5627 inside as GCC does. We do retain the nocode_wanted state,
5628 as statement expressions can't ever be entered from the
5629 outside, so any reactivation of code emission (from labels
5630 or loop heads) can be disabled again after the end of it. */
5632 nocode_wanted
= saved_nocode_wanted
;
5647 /* functions names must be treated as function pointers,
5648 except for unary '&' and sizeof. Since we consider that
5649 functions are not lvalues, we only have to handle it
5650 there and in function calls. */
5651 /* arrays can also be used although they are not lvalues */
5652 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5653 !(vtop
->type
.t
& VT_ARRAY
))
5656 vtop
->sym
->a
.addrtaken
= 1;
5657 mk_pointer(&vtop
->type
);
5663 gen_test_zero(TOK_EQ
);
5674 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5675 tcc_error("pointer not accepted for unary plus");
5676 /* In order to force cast, we add zero, except for floating point
5677 where we really need an noop (otherwise -0.0 will be transformed
5679 if (!is_float(vtop
->type
.t
)) {
5691 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5693 if (vtop
[1].r
& VT_SYM
)
5694 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5695 size
= type_size(&type
, &align
);
5696 if (s
&& s
->a
.aligned
)
5697 align
= 1 << (s
->a
.aligned
- 1);
5698 if (t
== TOK_SIZEOF
) {
5699 if (!(type
.t
& VT_VLA
)) {
5701 tcc_error("sizeof applied to an incomplete type");
5704 vla_runtime_type_size(&type
, &align
);
5709 vtop
->type
.t
|= VT_UNSIGNED
;
5712 case TOK_builtin_expect
:
5713 /* __builtin_expect is a no-op for now */
5714 parse_builtin_params(0, "ee");
5717 case TOK_builtin_types_compatible_p
:
5718 parse_builtin_params(0, "tt");
5719 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5720 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5721 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5725 case TOK_builtin_choose_expr
:
5752 case TOK_builtin_constant_p
:
5753 parse_builtin_params(1, "e");
5754 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5755 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5759 case TOK_builtin_frame_address
:
5760 case TOK_builtin_return_address
:
5766 if (tok
!= TOK_CINT
) {
5767 tcc_error("%s only takes positive integers",
5768 tok1
== TOK_builtin_return_address
?
5769 "__builtin_return_address" :
5770 "__builtin_frame_address");
5772 level
= (uint32_t)tokc
.i
;
5777 vset(&type
, VT_LOCAL
, 0); /* local frame */
5779 #ifdef TCC_TARGET_RISCV64
5783 mk_pointer(&vtop
->type
);
5784 indir(); /* -> parent frame */
5786 if (tok1
== TOK_builtin_return_address
) {
5787 // assume return address is just above frame pointer on stack
5788 #ifdef TCC_TARGET_ARM
5791 #elif defined TCC_TARGET_RISCV64
5798 mk_pointer(&vtop
->type
);
5803 #ifdef TCC_TARGET_RISCV64
5804 case TOK_builtin_va_start
:
5805 parse_builtin_params(0, "ee");
5806 r
= vtop
->r
& VT_VALMASK
;
5810 tcc_error("__builtin_va_start expects a local variable");
5815 #ifdef TCC_TARGET_X86_64
5816 #ifdef TCC_TARGET_PE
5817 case TOK_builtin_va_start
:
5818 parse_builtin_params(0, "ee");
5819 r
= vtop
->r
& VT_VALMASK
;
5823 tcc_error("__builtin_va_start expects a local variable");
5825 vtop
->type
= char_pointer_type
;
5830 case TOK_builtin_va_arg_types
:
5831 parse_builtin_params(0, "t");
5832 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5839 #ifdef TCC_TARGET_ARM64
5840 case TOK_builtin_va_start
: {
5841 parse_builtin_params(0, "ee");
5845 vtop
->type
.t
= VT_VOID
;
5848 case TOK_builtin_va_arg
: {
5849 parse_builtin_params(0, "et");
5857 case TOK___arm64_clear_cache
: {
5858 parse_builtin_params(0, "ee");
5861 vtop
->type
.t
= VT_VOID
;
5866 /* pre operations */
5877 t
= vtop
->type
.t
& VT_BTYPE
;
5879 /* In IEEE negate(x) isn't subtract(0,x), but rather
5883 vtop
->c
.f
= -1.0 * 0.0;
5884 else if (t
== VT_DOUBLE
)
5885 vtop
->c
.d
= -1.0 * 0.0;
5887 vtop
->c
.ld
= -1.0 * 0.0;
5895 goto tok_identifier
;
5897 /* allow to take the address of a label */
5898 if (tok
< TOK_UIDENT
)
5899 expect("label identifier");
5900 s
= label_find(tok
);
5902 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5904 if (s
->r
== LABEL_DECLARED
)
5905 s
->r
= LABEL_FORWARD
;
5908 s
->type
.t
= VT_VOID
;
5909 mk_pointer(&s
->type
);
5910 s
->type
.t
|= VT_STATIC
;
5912 vpushsym(&s
->type
, s
);
5918 CType controlling_type
;
5919 int has_default
= 0;
5922 TokenString
*str
= NULL
;
5923 int saved_const_wanted
= const_wanted
;
5928 expr_type(&controlling_type
, expr_eq
);
5929 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5930 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5931 mk_pointer(&controlling_type
);
5932 const_wanted
= saved_const_wanted
;
5936 if (tok
== TOK_DEFAULT
) {
5938 tcc_error("too many 'default'");
5944 AttributeDef ad_tmp
;
5949 parse_btype(&cur_type
, &ad_tmp
);
5952 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5953 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5955 tcc_error("type match twice");
5965 skip_or_save_block(&str
);
5967 skip_or_save_block(NULL
);
5974 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5975 tcc_error("type '%s' does not match any association", buf
);
5977 begin_macro(str
, 1);
5986 // special qnan , snan and infinity values
5991 vtop
->type
.t
= VT_FLOAT
;
5996 goto special_math_val
;
5999 goto special_math_val
;
6006 expect("identifier");
6008 if (!s
|| IS_ASM_SYM(s
)) {
6009 const char *name
= get_tok_str(t
, NULL
);
6011 tcc_error("'%s' undeclared", name
);
6012 /* for simple function calls, we tolerate undeclared
6013 external reference to int() function */
6014 if (tcc_state
->warn_implicit_function_declaration
6015 #ifdef TCC_TARGET_PE
6016 /* people must be warned about using undeclared WINAPI functions
6017 (which usually start with uppercase letter) */
6018 || (name
[0] >= 'A' && name
[0] <= 'Z')
6021 tcc_warning("implicit declaration of function '%s'", name
);
6022 s
= external_global_sym(t
, &func_old_type
);
6026 /* A symbol that has a register is a local register variable,
6027 which starts out as VT_LOCAL value. */
6028 if ((r
& VT_VALMASK
) < VT_CONST
)
6029 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
6031 vset(&s
->type
, r
, s
->c
);
6032 /* Point to s as backpointer (even without r&VT_SYM).
6033 Will be used by at least the x86 inline asm parser for
6039 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
6040 vtop
->c
.i
= s
->enum_val
;
6045 /* post operations */
6047 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6050 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
6051 int qualifiers
, cumofs
= 0;
6053 if (tok
== TOK_ARROW
)
6055 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6058 /* expect pointer on structure */
6059 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
6060 expect("struct or union");
6061 if (tok
== TOK_CDOUBLE
)
6062 expect("field name");
6064 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
6065 expect("field name");
6066 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6068 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
6069 /* add field offset to pointer */
6070 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6071 vpushi(cumofs
+ s
->c
);
6073 /* change type to field type, and set to lvalue */
6074 vtop
->type
= s
->type
;
6075 vtop
->type
.t
|= qualifiers
;
6076 /* an array is never an lvalue */
6077 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6079 #ifdef CONFIG_TCC_BCHECK
6080 /* if bound checking, the referenced pointer must be checked */
6081 if (tcc_state
->do_bounds_check
)
6082 vtop
->r
|= VT_MUSTBOUND
;
6086 } else if (tok
== '[') {
6092 } else if (tok
== '(') {
6095 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6098 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6099 /* pointer test (no array accepted) */
6100 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6101 vtop
->type
= *pointed_type(&vtop
->type
);
6102 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6106 expect("function pointer");
6109 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6111 /* get return type */
6114 sa
= s
->next
; /* first parameter */
6115 nb_args
= regsize
= 0;
6117 /* compute first implicit argument if a structure is returned */
6118 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6119 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6120 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6121 &ret_align
, ®size
);
6122 if (ret_nregs
<= 0) {
6123 /* get some space for the returned structure */
6124 size
= type_size(&s
->type
, &align
);
6125 #ifdef TCC_TARGET_ARM64
6126 /* On arm64, a small struct is return in registers.
6127 It is much easier to write it to memory if we know
6128 that we are allowed to write some extra bytes, so
6129 round the allocated space up to a power of 2: */
6131 while (size
& (size
- 1))
6132 size
= (size
| (size
- 1)) + 1;
6134 loc
= (loc
- size
) & -align
;
6136 ret
.r
= VT_LOCAL
| VT_LVAL
;
6137 /* pass it as 'int' to avoid structure arg passing
6139 vseti(VT_LOCAL
, loc
);
6140 #ifdef CONFIG_TCC_BCHECK
6141 if (tcc_state
->do_bounds_check
)
6155 if (ret_nregs
> 0) {
6156 /* return in register */
6158 PUT_R_RET(&ret
, ret
.type
.t
);
6163 gfunc_param_typed(s
, sa
);
6173 tcc_error("too few arguments to function");
6175 gfunc_call(nb_args
);
6177 if (ret_nregs
< 0) {
6178 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6179 #ifdef TCC_TARGET_RISCV64
6180 arch_transfer_ret_regs(1);
6184 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6185 vsetc(&ret
.type
, r
, &ret
.c
);
6186 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6189 /* handle packed struct return */
6190 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6193 size
= type_size(&s
->type
, &align
);
6194 /* We're writing whole regs often, make sure there's enough
6195 space. Assume register size is power of 2. */
6196 if (regsize
> align
)
6198 loc
= (loc
- size
) & -align
;
6202 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6206 if (--ret_nregs
== 0)
6210 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6213 /* Promote char/short return values. This is matters only
6214 for calling function that were not compiled by TCC and
6215 only on some architectures. For those where it doesn't
6216 matter we expect things to be already promoted to int,
6218 t
= s
->type
.t
& VT_BTYPE
;
6219 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6221 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6223 vtop
->type
.t
= VT_INT
;
6227 if (s
->f
.func_noreturn
)
6235 #ifndef precedence_parser /* original top-down parser */
6237 static void expr_prod(void)
6242 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6249 static void expr_sum(void)
6254 while ((t
= tok
) == '+' || t
== '-') {
6261 static void expr_shift(void)
6266 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6273 static void expr_cmp(void)
6278 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6279 t
== TOK_ULT
|| t
== TOK_UGE
) {
6286 static void expr_cmpeq(void)
6291 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6298 static void expr_and(void)
6301 while (tok
== '&') {
6308 static void expr_xor(void)
6311 while (tok
== '^') {
6318 static void expr_or(void)
6321 while (tok
== '|') {
6328 static void expr_landor(int op
);
6330 static void expr_land(void)
6333 if (tok
== TOK_LAND
)
6337 static void expr_lor(void)
6344 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6345 #else /* defined precedence_parser */
6346 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6347 # define expr_lor() unary(), expr_infix(1)
6349 static int precedence(int tok
)
6352 case TOK_LOR
: return 1;
6353 case TOK_LAND
: return 2;
6357 case TOK_EQ
: case TOK_NE
: return 6;
6358 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6359 case TOK_SHL
: case TOK_SAR
: return 8;
6360 case '+': case '-': return 9;
6361 case '*': case '/': case '%': return 10;
6363 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6368 static unsigned char prec
[256];
6369 static void init_prec(void)
6372 for (i
= 0; i
< 256; i
++)
6373 prec
[i
] = precedence(i
);
6375 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6377 static void expr_landor(int op
);
6379 static void expr_infix(int p
)
6382 while ((p2
= precedence(t
)) >= p
) {
6383 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6388 if (precedence(tok
) > p2
)
6397 /* Assuming vtop is a value used in a conditional context
6398 (i.e. compared with zero) return 0 if it's false, 1 if
6399 true and -1 if it can't be statically determined. */
6400 static int condition_3way(void)
6403 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6404 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6406 gen_cast_s(VT_BOOL
);
6413 static void expr_landor(int op
)
6415 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6417 c
= f
? i
: condition_3way();
6419 save_regs(1), cc
= 0;
6421 nocode_wanted
++, f
= 1;
6429 expr_landor_next(op
);
6441 static int is_cond_bool(SValue
*sv
)
6443 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6444 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6445 return (unsigned)sv
->c
.i
< 2;
6446 if (sv
->r
== VT_CMP
)
6451 static void expr_cond(void)
6453 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6461 c
= condition_3way();
6462 g
= (tok
== ':' && gnu_ext
);
6472 /* needed to avoid having different registers saved in
6479 ncw_prev
= nocode_wanted
;
6485 if (c
< 0 && vtop
->r
== VT_CMP
) {
6492 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6493 mk_pointer(&vtop
->type
);
6494 sv
= *vtop
; /* save value to handle it later */
6495 vtop
--; /* no vpop so that FP stack is not flushed */
6505 nocode_wanted
= ncw_prev
;
6511 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6512 if (sv
.r
== VT_CMP
) {
6523 nocode_wanted
= ncw_prev
;
6524 // tcc_warning("two conditions expr_cond");
6528 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6529 mk_pointer(&vtop
->type
);
6531 /* cast operands to correct type according to ISOC rules */
6532 if (!combine_types(&type
, &sv
, vtop
, '?'))
6533 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6534 "type mismatch in conditional expression (have '%s' and '%s')");
6535 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6536 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6537 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6539 /* now we convert second operand */
6543 mk_pointer(&vtop
->type
);
6545 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6549 rc
= RC_TYPE(type
.t
);
6550 /* for long longs, we use fixed registers to avoid having
6551 to handle a complicated move */
6552 if (USING_TWO_WORDS(type
.t
))
6553 rc
= RC_RET(type
.t
);
6561 nocode_wanted
= ncw_prev
;
6563 /* this is horrible, but we must also convert first
6569 mk_pointer(&vtop
->type
);
6571 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6577 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6587 static void expr_eq(void)
6592 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6600 gen_op(TOK_ASSIGN_OP(t
));
6606 ST_FUNC
void gexpr(void)
6617 /* parse a constant expression and return value in vtop. */
6618 static void expr_const1(void)
6621 nocode_wanted
+= unevalmask
+ 1;
6623 nocode_wanted
-= unevalmask
+ 1;
6627 /* parse an integer constant and return its value. */
6628 static inline int64_t expr_const64(void)
6632 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6633 expect("constant expression");
6639 /* parse an integer constant and return its value.
6640 Complain if it doesn't fit 32bit (signed or unsigned). */
6641 ST_FUNC
int expr_const(void)
6644 int64_t wc
= expr_const64();
6646 if (c
!= wc
&& (unsigned)c
!= wc
)
6647 tcc_error("constant exceeds 32 bit");
6651 /* ------------------------------------------------------------------------- */
6652 /* return from function */
6654 #ifndef TCC_TARGET_ARM64
6655 static void gfunc_return(CType
*func_type
)
6657 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6658 CType type
, ret_type
;
6659 int ret_align
, ret_nregs
, regsize
;
6660 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6661 &ret_align
, ®size
);
6662 if (ret_nregs
< 0) {
6663 #ifdef TCC_TARGET_RISCV64
6664 arch_transfer_ret_regs(0);
6666 } else if (0 == ret_nregs
) {
6667 /* if returning structure, must copy it to implicit
6668 first pointer arg location */
6671 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6674 /* copy structure value to pointer */
6677 /* returning structure packed into registers */
6678 int size
, addr
, align
, rc
;
6679 size
= type_size(func_type
,&align
);
6680 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6681 (vtop
->c
.i
& (ret_align
-1)))
6682 && (align
& (ret_align
-1))) {
6683 loc
= (loc
- size
) & -ret_align
;
6686 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6690 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6692 vtop
->type
= ret_type
;
6693 rc
= RC_RET(ret_type
.t
);
6701 if (--ret_nregs
== 0)
6703 /* We assume that when a structure is returned in multiple
6704 registers, their classes are consecutive values of the
6707 vtop
->c
.i
+= regsize
;
6712 gv(RC_RET(func_type
->t
));
6714 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6718 static void check_func_return(void)
6720 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6722 if (!strcmp (funcname
, "main")
6723 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6724 /* main returns 0 by default */
6726 gen_assign_cast(&func_vt
);
6727 gfunc_return(&func_vt
);
6729 tcc_warning("function might return no value: '%s'", funcname
);
6733 /* ------------------------------------------------------------------------- */
6736 static int case_cmpi(const void *pa
, const void *pb
)
6738 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6739 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6740 return a
< b
? -1 : a
> b
;
6743 static int case_cmpu(const void *pa
, const void *pb
)
6745 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6746 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6747 return a
< b
? -1 : a
> b
;
6750 static void gtst_addr(int t
, int a
)
6752 gsym_addr(gvtst(0, t
), a
);
6755 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6759 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6776 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6778 gcase(base
, len
/2, bsym
);
6782 base
+= e
; len
-= e
;
6792 if (p
->v1
== p
->v2
) {
6794 gtst_addr(0, p
->sym
);
6804 gtst_addr(0, p
->sym
);
6808 *bsym
= gjmp(*bsym
);
6811 /* ------------------------------------------------------------------------- */
6812 /* __attribute__((cleanup(fn))) */
6814 static void try_call_scope_cleanup(Sym
*stop
)
6816 Sym
*cls
= cur_scope
->cl
.s
;
6818 for (; cls
!= stop
; cls
= cls
->ncl
) {
6819 Sym
*fs
= cls
->next
;
6820 Sym
*vs
= cls
->prev_tok
;
6822 vpushsym(&fs
->type
, fs
);
6823 vset(&vs
->type
, vs
->r
, vs
->c
);
6825 mk_pointer(&vtop
->type
);
6831 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6836 if (!cur_scope
->cl
.s
)
6839 /* search NCA of both cleanup chains given parents and initial depth */
6840 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6841 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6843 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6845 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6848 try_call_scope_cleanup(cc
);
6851 /* call 'func' for each __attribute__((cleanup(func))) */
6852 static void block_cleanup(struct scope
*o
)
6856 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6857 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6862 try_call_scope_cleanup(o
->cl
.s
);
6863 pcl
->jnext
= gjmp(0);
6865 goto remove_pending
;
6875 try_call_scope_cleanup(o
->cl
.s
);
6878 /* ------------------------------------------------------------------------- */
6881 static void vla_restore(int loc
)
6884 gen_vla_sp_restore(loc
);
6887 static void vla_leave(struct scope
*o
)
6889 if (o
->vla
.num
< cur_scope
->vla
.num
)
6890 vla_restore(o
->vla
.loc
);
6893 /* ------------------------------------------------------------------------- */
6896 void new_scope(struct scope
*o
)
6898 /* copy and link previous scope */
6900 o
->prev
= cur_scope
;
6903 /* record local declaration stack position */
6904 o
->lstk
= local_stack
;
6905 o
->llstk
= local_label_stack
;
6909 if (tcc_state
->do_debug
)
6910 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
6913 void prev_scope(struct scope
*o
, int is_expr
)
6917 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6918 block_cleanup(o
->prev
);
6920 /* pop locally defined labels */
6921 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6923 /* In the is_expr case (a statement expression is finished here),
6924 vtop might refer to symbols on the local_stack. Either via the
6925 type or via vtop->sym. We can't pop those nor any that in turn
6926 might be referred to. To make it easier we don't roll back
6927 any symbols in that case; some upper level call to block() will
6928 do that. We do have to remove such symbols from the lookup
6929 tables, though. sym_pop will do that. */
6931 /* pop locally defined symbols */
6932 pop_local_syms(&local_stack
, o
->lstk
, is_expr
, 0);
6933 cur_scope
= o
->prev
;
6936 if (tcc_state
->do_debug
)
6937 tcc_debug_stabn(N_RBRAC
, ind
- func_ind
);
6940 /* leave a scope via break/continue(/goto) */
6941 void leave_scope(struct scope
*o
)
6945 try_call_scope_cleanup(o
->cl
.s
);
6949 /* ------------------------------------------------------------------------- */
6950 /* call block from 'for do while' loops */
6952 static void lblock(int *bsym
, int *csym
)
6954 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6955 int *b
= co
->bsym
, *c
= co
->csym
;
6969 static void block(int is_expr
)
6971 int a
, b
, c
, d
, e
, t
;
6976 /* default return value is (void) */
6978 vtop
->type
.t
= VT_VOID
;
6983 /* If the token carries a value, next() might destroy it. Only with
6984 invalid code such as f(){"123"4;} */
6985 if (TOK_HAS_VALUE(t
))
6995 if (tok
== TOK_ELSE
) {
7000 gsym(d
); /* patch else jmp */
7005 } else if (t
== TOK_WHILE
) {
7017 } else if (t
== '{') {
7020 /* handle local labels declarations */
7021 while (tok
== TOK_LABEL
) {
7024 if (tok
< TOK_UIDENT
)
7025 expect("label identifier");
7026 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7028 } while (tok
== ',');
7032 while (tok
!= '}') {
7041 prev_scope(&o
, is_expr
);
7044 else if (!nocode_wanted
)
7045 check_func_return();
7047 } else if (t
== TOK_RETURN
) {
7048 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7052 gen_assign_cast(&func_vt
);
7054 if (vtop
->type
.t
!= VT_VOID
)
7055 tcc_warning("void function returns a value");
7059 tcc_warning("'return' with no value");
7062 leave_scope(root_scope
);
7064 gfunc_return(&func_vt
);
7066 /* jump unless last stmt in top-level block */
7067 if (tok
!= '}' || local_scope
!= 1)
7071 } else if (t
== TOK_BREAK
) {
7073 if (!cur_scope
->bsym
)
7074 tcc_error("cannot break");
7075 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7076 leave_scope(cur_switch
->scope
);
7078 leave_scope(loop_scope
);
7079 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7082 } else if (t
== TOK_CONTINUE
) {
7084 if (!cur_scope
->csym
)
7085 tcc_error("cannot continue");
7086 leave_scope(loop_scope
);
7087 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7090 } else if (t
== TOK_FOR
) {
7095 /* c99 for-loop init decl? */
7096 if (!decl0(VT_LOCAL
, 1, NULL
)) {
7097 /* no, regular for-loop init expr */
7125 } else if (t
== TOK_DO
) {
7139 } else if (t
== TOK_SWITCH
) {
7140 struct switch_t
*sw
;
7142 sw
= tcc_mallocz(sizeof *sw
);
7144 sw
->scope
= cur_scope
;
7145 sw
->prev
= cur_switch
;
7151 sw
->sv
= *vtop
--; /* save switch value */
7154 b
= gjmp(0); /* jump to first case */
7156 a
= gjmp(a
); /* add implicit break */
7160 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7161 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7163 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7165 for (b
= 1; b
< sw
->n
; b
++)
7166 if (sw
->sv
.type
.t
& VT_UNSIGNED
7167 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7168 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7169 tcc_error("duplicate case value");
7173 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7176 gsym_addr(d
, sw
->def_sym
);
7182 dynarray_reset(&sw
->p
, &sw
->n
);
7183 cur_switch
= sw
->prev
;
7186 } else if (t
== TOK_CASE
) {
7187 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7190 cr
->v1
= cr
->v2
= expr_const64();
7191 if (gnu_ext
&& tok
== TOK_DOTS
) {
7193 cr
->v2
= expr_const64();
7194 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7195 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7196 tcc_warning("empty case range");
7199 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7202 goto block_after_label
;
7204 } else if (t
== TOK_DEFAULT
) {
7207 if (cur_switch
->def_sym
)
7208 tcc_error("too many 'default'");
7209 cur_switch
->def_sym
= gind();
7212 goto block_after_label
;
7214 } else if (t
== TOK_GOTO
) {
7215 vla_restore(root_scope
->vla
.loc
);
7216 if (tok
== '*' && gnu_ext
) {
7220 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7224 } else if (tok
>= TOK_UIDENT
) {
7225 s
= label_find(tok
);
7226 /* put forward definition if needed */
7228 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7229 else if (s
->r
== LABEL_DECLARED
)
7230 s
->r
= LABEL_FORWARD
;
7232 if (s
->r
& LABEL_FORWARD
) {
7233 /* start new goto chain for cleanups, linked via label->next */
7234 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7235 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7236 pending_gotos
->prev_tok
= s
;
7237 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7238 pending_gotos
->next
= s
;
7240 s
->jnext
= gjmp(s
->jnext
);
7242 try_call_cleanup_goto(s
->cleanupstate
);
7243 gjmp_addr(s
->jnext
);
7248 expect("label identifier");
7252 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7256 if (tok
== ':' && t
>= TOK_UIDENT
) {
7261 if (s
->r
== LABEL_DEFINED
)
7262 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7263 s
->r
= LABEL_DEFINED
;
7265 Sym
*pcl
; /* pending cleanup goto */
7266 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7268 sym_pop(&s
->next
, NULL
, 0);
7272 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7275 s
->cleanupstate
= cur_scope
->cl
.s
;
7278 vla_restore(cur_scope
->vla
.loc
);
7279 /* we accept this, but it is a mistake */
7281 tcc_warning("deprecated use of label at end of compound statement");
7287 /* expression case */
7304 /* This skips over a stream of tokens containing balanced {} and ()
7305 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7306 with a '{'). If STR then allocates and stores the skipped tokens
7307 in *STR. This doesn't check if () and {} are nested correctly,
7308 i.e. "({)}" is accepted. */
7309 static void skip_or_save_block(TokenString
**str
)
7311 int braces
= tok
== '{';
7314 *str
= tok_str_alloc();
7316 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7318 if (tok
== TOK_EOF
) {
7319 if (str
|| level
> 0)
7320 tcc_error("unexpected end of file");
7325 tok_str_add_tok(*str
);
7328 if (t
== '{' || t
== '(') {
7330 } else if (t
== '}' || t
== ')') {
7332 if (level
== 0 && braces
&& t
== '}')
7337 tok_str_add(*str
, -1);
7338 tok_str_add(*str
, 0);
7342 #define EXPR_CONST 1
7345 static void parse_init_elem(int expr_type
)
7347 int saved_global_expr
;
7350 /* compound literals must be allocated globally in this case */
7351 saved_global_expr
= global_expr
;
7354 global_expr
= saved_global_expr
;
7355 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7356 (compound literals). */
7357 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7358 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7359 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7360 #ifdef TCC_TARGET_PE
7361 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7364 tcc_error("initializer element is not constant");
7373 static void init_assert(init_params
*p
, int offset
)
7375 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7376 : !nocode_wanted
&& offset
> p
->local_offset
)
7377 tcc_internal_error("initializer overflow");
7380 #define init_assert(sec, offset)
7383 /* put zeros for variable based init */
7384 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7386 init_assert(p
, c
+ size
);
7388 /* nothing to do because globals are already set to zero */
7390 vpush_helper_func(TOK_memset
);
7392 #ifdef TCC_TARGET_ARM
7404 #define DIF_SIZE_ONLY 2
7405 #define DIF_HAVE_ELEM 4
7408 /* delete relocations for specified range c ... c + size. Unfortunatly
7409 in very special cases, relocations may occur unordered */
7410 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7412 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7413 if (!sec
|| !sec
->reloc
)
7415 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7416 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7417 while (rel
< rel_end
) {
7418 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7419 sec
->reloc
->data_offset
-= sizeof *rel
;
7422 memcpy(rel2
, rel
, sizeof *rel
);
7429 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7431 if (ref
== p
->flex_array_ref
) {
7432 if (index
>= ref
->c
)
7434 } else if (ref
->c
< 0)
7435 tcc_error("flexible array has zero size in this context");
7438 /* t is the array or struct type. c is the array or struct
7439 address. cur_field is the pointer to the current
7440 field, for arrays the 'c' member contains the current start
7441 index. 'flags' is as in decl_initializer.
7442 'al' contains the already initialized length of the
7443 current container (starting at c). This returns the new length of that. */
7444 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7445 Sym
**cur_field
, int flags
, int al
)
7448 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7449 unsigned long corig
= c
;
7454 if (flags
& DIF_HAVE_ELEM
)
7457 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7464 /* NOTE: we only support ranges for last designator */
7465 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7467 if (!(type
->t
& VT_ARRAY
))
7468 expect("array type");
7470 index
= index_last
= expr_const();
7471 if (tok
== TOK_DOTS
&& gnu_ext
) {
7473 index_last
= expr_const();
7477 decl_design_flex(p
, s
, index_last
);
7478 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7479 tcc_error("index exceeds array bounds or range is empty");
7481 (*cur_field
)->c
= index_last
;
7482 type
= pointed_type(type
);
7483 elem_size
= type_size(type
, &align
);
7484 c
+= index
* elem_size
;
7485 nb_elems
= index_last
- index
+ 1;
7492 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7493 expect("struct/union type");
7495 f
= find_field(type
, l
, &cumofs
);
7508 } else if (!gnu_ext
) {
7513 if (type
->t
& VT_ARRAY
) {
7514 index
= (*cur_field
)->c
;
7516 decl_design_flex(p
, s
, index
);
7518 tcc_error("too many initializers");
7519 type
= pointed_type(type
);
7520 elem_size
= type_size(type
, &align
);
7521 c
+= index
* elem_size
;
7524 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7525 *cur_field
= f
= f
->next
;
7527 tcc_error("too many initializers");
7533 if (!elem_size
) /* for structs */
7534 elem_size
= type_size(type
, &align
);
7536 /* Using designators the same element can be initialized more
7537 than once. In that case we need to delete possibly already
7538 existing relocations. */
7539 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7540 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7541 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7544 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7546 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7550 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7551 /* make init_putv/vstore believe it were a struct */
7553 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7557 vpush_ref(type
, p
->sec
, c
, elem_size
);
7559 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7560 for (i
= 1; i
< nb_elems
; i
++) {
7562 init_putv(p
, type
, c
+ elem_size
* i
);
7567 c
+= nb_elems
* elem_size
;
7573 /* store a value or an expression directly in global data or in local array */
7574 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7580 Section
*sec
= p
->sec
;
7583 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7585 size
= type_size(type
, &align
);
7586 if (type
->t
& VT_BITFIELD
)
7587 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7588 init_assert(p
, c
+ size
);
7591 /* XXX: not portable */
7592 /* XXX: generate error if incorrect relocation */
7593 gen_assign_cast(&dtype
);
7594 bt
= type
->t
& VT_BTYPE
;
7596 if ((vtop
->r
& VT_SYM
)
7599 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7600 || (type
->t
& VT_BITFIELD
))
7601 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7603 tcc_error("initializer element is not computable at load time");
7605 if (NODATA_WANTED
) {
7610 ptr
= sec
->data
+ c
;
7612 /* XXX: make code faster ? */
7613 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7614 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7615 /* XXX This rejects compound literals like
7616 '(void *){ptr}'. The problem is that '&sym' is
7617 represented the same way, which would be ruled out
7618 by the SYM_FIRST_ANOM check above, but also '"string"'
7619 in 'char *p = "string"' is represented the same
7620 with the type being VT_PTR and the symbol being an
7621 anonymous one. That is, there's no difference in vtop
7622 between '(void *){x}' and '&(void *){x}'. Ignore
7623 pointer typed entities here. Hopefully no real code
7624 will ever use compound literals with scalar type. */
7625 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7626 /* These come from compound literals, memcpy stuff over. */
7630 esym
= elfsym(vtop
->sym
);
7631 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7632 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7634 /* We need to copy over all memory contents, and that
7635 includes relocations. Use the fact that relocs are
7636 created it order, so look from the end of relocs
7637 until we hit one before the copied region. */
7638 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7639 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7640 while (num_relocs
--) {
7642 if (rel
->r_offset
>= esym
->st_value
+ size
)
7644 if (rel
->r_offset
< esym
->st_value
)
7646 put_elf_reloca(symtab_section
, sec
,
7647 c
+ rel
->r_offset
- esym
->st_value
,
7648 ELFW(R_TYPE
)(rel
->r_info
),
7649 ELFW(R_SYM
)(rel
->r_info
),
7659 if (type
->t
& VT_BITFIELD
) {
7660 int bit_pos
, bit_size
, bits
, n
;
7661 unsigned char *p
, v
, m
;
7662 bit_pos
= BIT_POS(vtop
->type
.t
);
7663 bit_size
= BIT_SIZE(vtop
->type
.t
);
7664 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7665 bit_pos
&= 7, bits
= 0;
7670 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7671 m
= ((1 << n
) - 1) << bit_pos
;
7672 *p
= (*p
& ~m
) | (v
& m
);
7673 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7677 /* XXX: when cross-compiling we assume that each type has the
7678 same representation on host and target, which is likely to
7679 be wrong in the case of long double */
7681 vtop
->c
.i
= vtop
->c
.i
!= 0;
7683 *(char *)ptr
= vtop
->c
.i
;
7686 *(short *)ptr
= vtop
->c
.i
;
7689 *(float*)ptr
= vtop
->c
.f
;
7692 *(double *)ptr
= vtop
->c
.d
;
7695 #if defined TCC_IS_NATIVE_387
7696 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7697 memcpy(ptr
, &vtop
->c
.ld
, 10);
7699 else if (sizeof (long double) == sizeof (double))
7700 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7702 else if (vtop
->c
.ld
== 0.0)
7706 if (sizeof(long double) == LDOUBLE_SIZE
)
7707 *(long double*)ptr
= vtop
->c
.ld
;
7708 else if (sizeof(double) == LDOUBLE_SIZE
)
7709 *(double *)ptr
= (double)vtop
->c
.ld
;
7711 tcc_error("can't cross compile long double constants");
7715 *(long long *)ptr
= vtop
->c
.i
;
7722 addr_t val
= vtop
->c
.i
;
7724 if (vtop
->r
& VT_SYM
)
7725 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7727 *(addr_t
*)ptr
= val
;
7729 if (vtop
->r
& VT_SYM
)
7730 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7731 *(addr_t
*)ptr
= val
;
7737 int val
= vtop
->c
.i
;
7739 if (vtop
->r
& VT_SYM
)
7740 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7744 if (vtop
->r
& VT_SYM
)
7745 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7754 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7761 /* 't' contains the type and storage info. 'c' is the offset of the
7762 object in section 'sec'. If 'sec' is NULL, it means stack based
7763 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7764 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7765 size only evaluation is wanted (only for arrays). */
7766 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7768 int len
, n
, no_oblock
, i
;
7774 /* generate line number info */
7775 if (!p
->sec
&& tcc_state
->do_debug
)
7776 tcc_debug_line(tcc_state
);
7778 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7779 /* In case of strings we have special handling for arrays, so
7780 don't consume them as initializer value (which would commit them
7781 to some anonymous symbol). */
7782 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7783 !(flags
& DIF_SIZE_ONLY
)) {
7784 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7785 flags
|= DIF_HAVE_ELEM
;
7788 if ((flags
& DIF_HAVE_ELEM
) &&
7789 !(type
->t
& VT_ARRAY
) &&
7790 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7791 The source type might have VT_CONSTANT set, which is
7792 of course assignable to non-const elements. */
7793 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7796 } else if (type
->t
& VT_ARRAY
) {
7798 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7806 t1
= pointed_type(type
);
7807 size1
= type_size(t1
, &align1
);
7809 /* only parse strings here if correct type (otherwise: handle
7810 them as ((w)char *) expressions */
7811 if ((tok
== TOK_LSTR
&&
7812 #ifdef TCC_TARGET_PE
7813 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7815 (t1
->t
& VT_BTYPE
) == VT_INT
7817 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7819 cstr_reset(&initstr
);
7820 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7821 tcc_error("unhandled string literal merging");
7822 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7824 initstr
.size
-= size1
;
7826 len
+= tokc
.str
.size
;
7828 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7830 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7833 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7834 && tok
!= TOK_EOF
) {
7835 /* Not a lone literal but part of a bigger expression. */
7836 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7837 tokc
.str
.size
= initstr
.size
;
7838 tokc
.str
.data
= initstr
.data
;
7842 if (!(flags
& DIF_SIZE_ONLY
)) {
7847 tcc_warning("initializer-string for array is too long");
7848 /* in order to go faster for common case (char
7849 string in global variable, we handle it
7851 if (p
->sec
&& size1
== 1) {
7852 init_assert(p
, c
+ nb
);
7854 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7858 /* only add trailing zero if enough storage (no
7859 warning in this case since it is standard) */
7860 if (flags
& DIF_CLEAR
)
7863 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7867 } else if (size1
== 1)
7868 ch
= ((unsigned char *)initstr
.data
)[i
];
7870 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7872 init_putv(p
, t1
, c
+ i
* size1
);
7876 decl_design_flex(p
, s
, len
);
7885 /* zero memory once in advance */
7886 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7887 init_putz(p
, c
, n
*size1
);
7892 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7893 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7894 flags
&= ~DIF_HAVE_ELEM
;
7895 if (type
->t
& VT_ARRAY
) {
7897 /* special test for multi dimensional arrays (may not
7898 be strictly correct if designators are used at the
7900 if (no_oblock
&& len
>= n
*size1
)
7903 if (s
->type
.t
== VT_UNION
)
7907 if (no_oblock
&& f
== NULL
)
7918 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7920 if ((flags
& DIF_FIRST
) || tok
== '{') {
7929 } else if (tok
== '{') {
7930 if (flags
& DIF_HAVE_ELEM
)
7933 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7935 } else if ((flags
& DIF_SIZE_ONLY
)) {
7936 /* If we supported only ISO C we wouldn't have to accept calling
7937 this on anything than an array if DIF_SIZE_ONLY (and even then
7938 only on the outermost level, so no recursion would be needed),
7939 because initializing a flex array member isn't supported.
7940 But GNU C supports it, so we need to recurse even into
7941 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7942 /* just skip expression */
7943 skip_or_save_block(NULL
);
7945 if (!(flags
& DIF_HAVE_ELEM
)) {
7946 /* This should happen only when we haven't parsed
7947 the init element above for fear of committing a
7948 string constant to memory too early. */
7949 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7950 expect("string constant");
7951 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7954 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7955 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7957 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7961 init_putv(p
, type
, c
);
7965 /* parse an initializer for type 't' if 'has_init' is non zero, and
7966 allocate space in local or global data space ('r' is either
7967 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7968 variable 'v' of scope 'scope' is declared before initializers
7969 are parsed. If 'v' is zero, then a reference to the new object
7970 is put in the value stack. If 'has_init' is 2, a special parsing
7971 is done to handle string constants. */
7972 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7973 int has_init
, int v
, int scope
)
7975 int size
, align
, addr
;
7976 TokenString
*init_str
= NULL
;
7979 Sym
*flexible_array
;
7981 int saved_nocode_wanted
= nocode_wanted
;
7982 #ifdef CONFIG_TCC_BCHECK
7983 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7985 init_params p
= {0};
7987 /* Always allocate static or global variables */
7988 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7989 nocode_wanted
|= 0x80000000;
7991 flexible_array
= NULL
;
7992 size
= type_size(type
, &align
);
7994 /* exactly one flexible array may be initialized, either the
7995 toplevel array or the last member of the toplevel struct */
7998 /* If the base type itself was an array type of unspecified size
7999 (like in 'typedef int arr[]; arr x = {1};') then we will
8000 overwrite the unknown size by the real one for this decl.
8001 We need to unshare the ref symbol holding that size. */
8002 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
8003 p
.flex_array_ref
= type
->ref
;
8005 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8006 Sym
*field
= type
->ref
->next
;
8009 field
= field
->next
;
8010 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
8011 flexible_array
= field
;
8012 p
.flex_array_ref
= field
->type
.ref
;
8019 /* If unknown size, do a dry-run 1st pass */
8021 tcc_error("unknown type size");
8022 if (has_init
== 2) {
8023 /* only get strings */
8024 init_str
= tok_str_alloc();
8025 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8026 tok_str_add_tok(init_str
);
8029 tok_str_add(init_str
, -1);
8030 tok_str_add(init_str
, 0);
8032 skip_or_save_block(&init_str
);
8036 begin_macro(init_str
, 1);
8038 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8039 /* prepare second initializer parsing */
8040 macro_ptr
= init_str
->str
;
8043 /* if still unknown size, error */
8044 size
= type_size(type
, &align
);
8046 tcc_error("unknown type size");
8048 /* If there's a flex member and it was used in the initializer
8050 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8051 size
+= flexible_array
->type
.ref
->c
8052 * pointed_size(&flexible_array
->type
);
8055 /* take into account specified alignment if bigger */
8056 if (ad
->a
.aligned
) {
8057 int speca
= 1 << (ad
->a
.aligned
- 1);
8060 } else if (ad
->a
.packed
) {
8064 if (!v
&& NODATA_WANTED
)
8065 size
= 0, align
= 1;
8067 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8069 #ifdef CONFIG_TCC_BCHECK
8071 /* add padding between stack variables for bound checking */
8075 loc
= (loc
- size
) & -align
;
8077 p
.local_offset
= addr
+ size
;
8078 #ifdef CONFIG_TCC_BCHECK
8080 /* add padding between stack variables for bound checking */
8085 /* local variable */
8086 #ifdef CONFIG_TCC_ASM
8087 if (ad
->asm_label
) {
8088 int reg
= asm_parse_regvar(ad
->asm_label
);
8090 r
= (r
& ~VT_VALMASK
) | reg
;
8093 sym
= sym_push(v
, type
, r
, addr
);
8094 if (ad
->cleanup_func
) {
8095 Sym
*cls
= sym_push2(&all_cleanups
,
8096 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8097 cls
->prev_tok
= sym
;
8098 cls
->next
= ad
->cleanup_func
;
8099 cls
->ncl
= cur_scope
->cl
.s
;
8100 cur_scope
->cl
.s
= cls
;
8105 /* push local reference */
8106 vset(type
, r
, addr
);
8109 if (v
&& scope
== VT_CONST
) {
8110 /* see if the symbol was already defined */
8113 patch_storage(sym
, ad
, type
);
8114 /* we accept several definitions of the same global variable. */
8115 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8120 /* allocate symbol in corresponding section */
8123 if (type
->t
& VT_CONSTANT
)
8124 sec
= data_ro_section
;
8127 else if (tcc_state
->nocommon
)
8132 addr
= section_add(sec
, size
, align
);
8133 #ifdef CONFIG_TCC_BCHECK
8134 /* add padding if bound check */
8136 section_add(sec
, 1, 1);
8139 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8140 sec
= common_section
;
8145 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8146 patch_storage(sym
, ad
, NULL
);
8148 /* update symbol definition */
8149 put_extern_sym(sym
, sec
, addr
, size
);
8151 /* push global reference */
8152 vpush_ref(type
, sec
, addr
, size
);
8157 #ifdef CONFIG_TCC_BCHECK
8158 /* handles bounds now because the symbol must be defined
8159 before for the relocation */
8163 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8164 /* then add global bound info */
8165 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8166 bounds_ptr
[0] = 0; /* relocated */
8167 bounds_ptr
[1] = size
;
8172 if (type
->t
& VT_VLA
) {
8178 /* save current stack pointer */
8179 if (root_scope
->vla
.loc
== 0) {
8180 struct scope
*v
= cur_scope
;
8181 gen_vla_sp_save(loc
-= PTR_SIZE
);
8182 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
8185 vla_runtime_type_size(type
, &a
);
8186 gen_vla_alloc(type
, a
);
8187 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8188 /* on _WIN64, because of the function args scratch area, the
8189 result of alloca differs from RSP and is returned in RAX. */
8190 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8192 gen_vla_sp_save(addr
);
8193 cur_scope
->vla
.loc
= addr
;
8194 cur_scope
->vla
.num
++;
8195 } else if (has_init
) {
8197 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8198 /* patch flexible array member size back to -1, */
8199 /* for possible subsequent similar declarations */
8201 flexible_array
->type
.ref
->c
= -1;
8205 /* restore parse state if needed */
8211 nocode_wanted
= saved_nocode_wanted
;
8214 /* parse a function defined by symbol 'sym' and generate its code in
8215 'cur_text_section' */
8216 static void gen_function(Sym
*sym
)
8218 struct scope f
= { 0 };
8219 cur_scope
= root_scope
= &f
;
8221 ind
= cur_text_section
->data_offset
;
8222 if (sym
->a
.aligned
) {
8223 size_t newoff
= section_add(cur_text_section
, 0,
8224 1 << (sym
->a
.aligned
- 1));
8225 gen_fill_nops(newoff
- ind
);
8227 /* NOTE: we patch the symbol size later */
8228 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8229 if (sym
->type
.ref
->f
.func_ctor
)
8230 add_array (tcc_state
, ".init_array", sym
->c
);
8231 if (sym
->type
.ref
->f
.func_dtor
)
8232 add_array (tcc_state
, ".fini_array", sym
->c
);
8234 funcname
= get_tok_str(sym
->v
, NULL
);
8236 func_vt
= sym
->type
.ref
->type
;
8237 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8239 /* put debug symbol */
8240 tcc_debug_funcstart(tcc_state
, sym
);
8241 /* push a dummy symbol to enable local sym storage */
8242 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8243 local_scope
= 1; /* for function parameters */
8247 clear_temp_local_var_list();
8251 /* reset local stack */
8252 pop_local_syms(&local_stack
, NULL
, 0, func_var
);
8254 cur_text_section
->data_offset
= ind
;
8256 label_pop(&global_label_stack
, NULL
, 0);
8257 sym_pop(&all_cleanups
, NULL
, 0);
8258 /* patch symbol size */
8259 elfsym(sym
)->st_size
= ind
- func_ind
;
8260 /* end of function */
8261 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8262 /* It's better to crash than to generate wrong code */
8263 cur_text_section
= NULL
;
8264 funcname
= ""; /* for safety */
8265 func_vt
.t
= VT_VOID
; /* for safety */
8266 func_var
= 0; /* for safety */
8267 ind
= 0; /* for safety */
8268 nocode_wanted
= 0x80000000;
8270 /* do this after funcend debug info */
8274 static void gen_inline_functions(TCCState
*s
)
8277 int inline_generated
, i
;
8278 struct InlineFunc
*fn
;
8280 tcc_open_bf(s
, ":inline:", 0);
8281 /* iterate while inline function are referenced */
8283 inline_generated
= 0;
8284 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8285 fn
= s
->inline_fns
[i
];
8287 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8288 /* the function was used or forced (and then not internal):
8289 generate its code and convert it to a normal function */
8291 tcc_debug_putfile(s
, fn
->filename
);
8292 begin_macro(fn
->func_str
, 1);
8294 cur_text_section
= text_section
;
8298 inline_generated
= 1;
8301 } while (inline_generated
);
8305 static void free_inline_functions(TCCState
*s
)
8308 /* free tokens of unused inline functions */
8309 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8310 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8312 tok_str_free(fn
->func_str
);
8314 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8317 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8318 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8319 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8321 int v
, has_init
, r
, oldint
;
8324 AttributeDef ad
, adbase
;
8327 if (tok
== TOK_STATIC_ASSERT
) {
8337 tcc_error("_Static_assert fail");
8339 goto static_assert_out
;
8343 parse_mult_str(&error_str
, "string constant");
8345 tcc_error("%s", (char *)error_str
.data
);
8346 cstr_free(&error_str
);
8354 if (!parse_btype(&btype
, &adbase
)) {
8355 if (is_for_loop_init
)
8357 /* skip redundant ';' if not in old parameter decl scope */
8358 if (tok
== ';' && l
!= VT_CMP
) {
8364 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8365 /* global asm block */
8369 if (tok
>= TOK_UIDENT
) {
8370 /* special test for old K&R protos without explicit int
8371 type. Only accepted when defining global data */
8376 expect("declaration");
8382 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8384 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8385 tcc_warning("unnamed struct/union that defines no instances");
8389 if (IS_ENUM(btype
.t
)) {
8395 while (1) { /* iterate thru each declaration */
8398 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8402 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8403 printf("type = '%s'\n", buf
);
8406 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8407 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8408 tcc_error("function without file scope cannot be static");
8409 /* if old style function prototype, we accept a
8412 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8413 decl0(VT_CMP
, 0, sym
);
8414 #ifdef TCC_TARGET_MACHO
8415 if (sym
->f
.func_alwinl
8416 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8417 == (VT_EXTERN
| VT_INLINE
))) {
8418 /* always_inline functions must be handled as if they
8419 don't generate multiple global defs, even if extern
8420 inline, i.e. GNU inline semantics for those. Rewrite
8421 them into static inline. */
8422 type
.t
&= ~VT_EXTERN
;
8423 type
.t
|= VT_STATIC
;
8426 /* always compile 'extern inline' */
8427 if (type
.t
& VT_EXTERN
)
8428 type
.t
&= ~VT_INLINE
;
8430 } else if (oldint
) {
8431 tcc_warning("type defaults to int");
8434 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8435 ad
.asm_label
= asm_label_instr();
8436 /* parse one last attribute list, after asm label */
8437 parse_attribute(&ad
);
8439 /* gcc does not allow __asm__("label") with function definition,
8446 #ifdef TCC_TARGET_PE
8447 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8448 if (type
.t
& VT_STATIC
)
8449 tcc_error("cannot have dll linkage with static");
8450 if (type
.t
& VT_TYPEDEF
) {
8451 tcc_warning("'%s' attribute ignored for typedef",
8452 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8453 (ad
.a
.dllexport
= 0, "dllexport"));
8454 } else if (ad
.a
.dllimport
) {
8455 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8458 type
.t
|= VT_EXTERN
;
8464 tcc_error("cannot use local functions");
8465 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8466 expect("function definition");
8468 /* reject abstract declarators in function definition
8469 make old style params without decl have int type */
8471 while ((sym
= sym
->next
) != NULL
) {
8472 if (!(sym
->v
& ~SYM_FIELD
))
8473 expect("identifier");
8474 if (sym
->type
.t
== VT_VOID
)
8475 sym
->type
= int_type
;
8478 /* apply post-declaraton attributes */
8479 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8481 /* put function symbol */
8482 type
.t
&= ~VT_EXTERN
;
8483 sym
= external_sym(v
, &type
, 0, &ad
);
8485 /* static inline functions are just recorded as a kind
8486 of macro. Their code will be emitted at the end of
8487 the compilation unit only if they are used */
8488 if (sym
->type
.t
& VT_INLINE
) {
8489 struct InlineFunc
*fn
;
8490 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8491 strcpy(fn
->filename
, file
->filename
);
8493 skip_or_save_block(&fn
->func_str
);
8494 dynarray_add(&tcc_state
->inline_fns
,
8495 &tcc_state
->nb_inline_fns
, fn
);
8497 /* compute text section */
8498 cur_text_section
= ad
.section
;
8499 if (!cur_text_section
)
8500 cur_text_section
= text_section
;
8506 /* find parameter in function parameter list */
8507 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8508 if ((sym
->v
& ~SYM_FIELD
) == v
)
8510 tcc_error("declaration for parameter '%s' but no such parameter",
8511 get_tok_str(v
, NULL
));
8513 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8514 tcc_error("storage class specified for '%s'",
8515 get_tok_str(v
, NULL
));
8516 if (sym
->type
.t
!= VT_VOID
)
8517 tcc_error("redefinition of parameter '%s'",
8518 get_tok_str(v
, NULL
));
8519 convert_parameter_type(&type
);
8521 } else if (type
.t
& VT_TYPEDEF
) {
8522 /* save typedefed type */
8523 /* XXX: test storage specifiers ? */
8525 if (sym
&& sym
->sym_scope
== local_scope
) {
8526 if (!is_compatible_types(&sym
->type
, &type
)
8527 || !(sym
->type
.t
& VT_TYPEDEF
))
8528 tcc_error("incompatible redefinition of '%s'",
8529 get_tok_str(v
, NULL
));
8532 sym
= sym_push(v
, &type
, 0, 0);
8536 if (tcc_state
->do_debug
)
8537 tcc_debug_typedef (tcc_state
, sym
);
8538 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8539 && !(type
.t
& VT_EXTERN
)) {
8540 tcc_error("declaration of void object");
8543 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8544 /* external function definition */
8545 /* specific case for func_call attribute */
8547 } else if (!(type
.t
& VT_ARRAY
)) {
8548 /* not lvalue if array */
8551 has_init
= (tok
== '=');
8552 if (has_init
&& (type
.t
& VT_VLA
))
8553 tcc_error("variable length array cannot be initialized");
8554 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8555 || (type
.t
& VT_BTYPE
) == VT_FUNC
8556 /* as with GCC, uninitialized global arrays with no size
8557 are considered extern: */
8558 || ((type
.t
& VT_ARRAY
) && !has_init
8559 && l
== VT_CONST
&& type
.ref
->c
< 0)
8561 /* external variable or function */
8562 type
.t
|= VT_EXTERN
;
8563 sym
= external_sym(v
, &type
, r
, &ad
);
8564 if (ad
.alias_target
) {
8565 /* Aliases need to be emitted when their target
8566 symbol is emitted, even if perhaps unreferenced.
8567 We only support the case where the base is
8568 already defined, otherwise we would need
8569 deferring to emit the aliases until the end of
8570 the compile unit. */
8571 Sym
*alias_target
= sym_find(ad
.alias_target
);
8572 ElfSym
*esym
= elfsym(alias_target
);
8574 tcc_error("unsupported forward __alias__ attribute");
8575 put_extern_sym2(sym
, esym
->st_shndx
,
8576 esym
->st_value
, esym
->st_size
, 1);
8579 if (type
.t
& VT_STATIC
)
8585 else if (l
== VT_CONST
)
8586 /* uninitialized global variables may be overridden */
8587 type
.t
|= VT_EXTERN
;
8588 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8592 if (is_for_loop_init
)
8604 static void decl(int l
)
8609 /* ------------------------------------------------------------------------- */
8612 /* ------------------------------------------------------------------------- */