Revert " [LoongArch][ISel] Check the number of sign bits in `PatGprGpr_32` (#107432)"
[llvm-project.git] / llvm / test / CodeGen / AArch64 / arm64ec-hybrid-patchable.ll
blob1ed6a273338abb75c14a2257edf9cd6472476b5d
1 ; RUN: llc -mtriple=arm64ec-pc-windows-msvc < %s | FileCheck %s
2 ; RUN: llc -mtriple=arm64ec-pc-windows-msvc -filetype=obj -o %t.o < %s
3 ; RUN: llvm-objdump -t %t.o | FileCheck --check-prefix=SYM %s
5 define dso_local ptr @func() hybrid_patchable nounwind {
6 ; SYM: [ 8](sec  4)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 #func$hp_target
7 ; CHECK-LABEL:     .def    "#func$hp_target";
8 ; CHECK:           .section        .text,"xr",discard,"#func$hp_target"
9 ; CHECK-NEXT:      .globl  "#func$hp_target"               // -- Begin function #func$hp_target
10 ; CHECK-NEXT:      .p2align        2
11 ; CHECK-NEXT:  "#func$hp_target":                      // @"#func$hp_target"
12 ; CHECK-NEXT:      // %bb.0:
13 ; CHECK-NEXT:      adrp x0, func
14 ; CHECK-NEXT:      add x0, x0, :lo12:func
15 ; CHECK-NEXT:      ret
16   ret ptr @func
19 define void @has_varargs(...) hybrid_patchable nounwind {
20 ; SYM: [11](sec  5)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 #has_varargs$hp_target
21 ; CHECK-LABEL:     .def "#has_varargs$hp_target";
22 ; CHECK:           .section .text,"xr",discard,"#has_varargs$hp_target"
23 ; CHECK-NEXT:      .globl  "#has_varargs$hp_target"        // -- Begin function #has_varargs$hp_target
24 ; CHECK-NEXT:      .p2align 2
25 ; CHECK-NEXT:  "#has_varargs$hp_target":               // @"#has_varargs$hp_target"
26 ; CHECK-NEXT:  // %bb.0:
27 ; CHECK-NEXT:      sub     sp, sp, #32
28 ; CHECK-NEXT:      stp     x0, x1, [x4, #-32]
29 ; CHECK-NEXT:      stp     x2, x3, [x4, #-16]
30 ; CHECK-NEXT:      add     sp, sp, #32
31 ; CHECK-NEXT:      ret
32   ret void
35 define void @has_sret(ptr sret([100 x i8])) hybrid_patchable nounwind {
36 ; SYM: [14](sec  6)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 #has_sret$hp_target
37 ; CHECK-LABEL:     .def    "#has_sret$hp_target";
38 ; CHECK:           .section        .text,"xr",discard,"#has_sret$hp_target"
39 ; CHECK-NEXT:      .globl  "#has_sret$hp_target"           // -- Begin function #has_sret$hp_target
40 ; CHECK-NEXT:      .p2align        2
41 ; CHECK-NEXT:  "#has_sret$hp_target":                  // @"#has_sret$hp_target"
42 ; CHECK-NEXT:  // %bb.0:
43 ; CHECK-NEXT:      ret
44   ret void
47 define dllexport void @exp() hybrid_patchable nounwind {
48 ; CHECK-LABEL:     .def    "#exp$hp_target";
49 ; CHECK:           .section        .text,"xr",discard,"#exp$hp_target"
50 ; CHECK-NEXT:      .globl  "#exp$hp_target"                // -- Begin function #exp$hp_target
51 ; CHECK-NEXT:      .p2align        2
52 ; CHECK-NEXT:  "#exp$hp_target":                       // @"#exp$hp_target"
53 ; CHECK-NEXT:  // %bb.0:
54 ; CHECK-NEXT:      ret
55   ret void
58 ; hybrid_patchable attribute is ignored on internal functions
59 define internal i32 @static_func() hybrid_patchable nounwind {
60 ; CHECK-LABEL:     .def    static_func;
61 ; CHECK:       static_func:                            // @static_func
62 ; CHECK-NEXT:      // %bb.0:
63 ; CHECK-NEXT:      mov     w0, #2                          // =0x2
64 ; CHECK-NEXT:      ret
65   ret i32 2
68 define dso_local void @caller() nounwind {
69 ; CHECK-LABEL:     .def    "#caller";
70 ; CHECK:           .section        .text,"xr",discard,"#caller"
71 ; CHECK-NEXT:      .globl  "#caller"                       // -- Begin function #caller
72 ; CHECK-NEXT:      .p2align        2
73 ; CHECK-NEXT:  "#caller":                              // @"#caller"
74 ; CHECK-NEXT:      .weak_anti_dep  caller
75 ; CHECK-NEXT:  .set caller, "#caller"{{$}}
76 ; CHECK-NEXT:  // %bb.0:
77 ; CHECK-NEXT:      str     x30, [sp, #-16]!                // 8-byte Folded Spill
78 ; CHECK-NEXT:      bl      "#func"
79 ; CHECK-NEXT:      bl      static_func
80 ; CHECK-NEXT:      adrp    x8, __os_arm64x_check_icall
81 ; CHECK-NEXT:      adrp    x11, func
82 ; CHECK-NEXT:      add     x11, x11, :lo12:func
83 ; CHECK-NEXT:      ldr     x8, [x8, :lo12:__os_arm64x_check_icall]
84 ; CHECK-NEXT:      adrp    x10, ($iexit_thunk$cdecl$v$v)
85 ; CHECK-NEXT:      add     x10, x10, :lo12:($iexit_thunk$cdecl$v$v)
86 ; CHECK-NEXT:      str     x11, [sp, #8]
87 ; CHECK-NEXT:      blr     x8
88 ; CHECK-NEXT:      blr     x11
89 ; CHECK-NEXT:      ldr     x30, [sp], #16                  // 8-byte Folded Reload
90 ; CHECK-NEXT:      ret
91   %1 = call i32 @func()
92   %2 = call i32 @static_func()
93   %3 = alloca ptr, align 8
94   store ptr @func, ptr %3, align 8
95   %4 = load ptr, ptr %3, align 8
96   call void %4()
97   ret void
100 ; CHECK-LABEL:       def    "#func$hybpatch_thunk";
101 ; CHECK:            .section        .wowthk$aa,"xr",discard,"#func$hybpatch_thunk"
102 ; CHECK-NEXT:       .globl  "#func$hybpatch_thunk"          // -- Begin function #func$hybpatch_thunk
103 ; CHECK-NEXT:       .p2align        2
104 ; CHECK-NEXT:   "#func$hybpatch_thunk":                 // @"#func$hybpatch_thunk"
105 ; CHECK-NEXT:   .seh_proc "#func$hybpatch_thunk"
106 ; CHECK-NEXT:   // %bb.0:
107 ; CHECK-NEXT:       str     x30, [sp, #-16]!                // 8-byte Folded Spill
108 ; CHECK-NEXT:       .seh_save_reg_x x30, 16
109 ; CHECK-NEXT:       .seh_endprologue
110 ; CHECK-NEXT:       adrp    x8, __os_arm64x_dispatch_call
111 ; CHECK-NEXT:       adrp    x11, func
112 ; CHECK-NEXT:       add     x11, x11, :lo12:func
113 ; CHECK-NEXT:       ldr     x8, [x8, :lo12:__os_arm64x_dispatch_call]
114 ; CHECK-NEXT:       adrp    x10, ($iexit_thunk$cdecl$i8$v)
115 ; CHECK-NEXT:       add     x10, x10, :lo12:($iexit_thunk$cdecl$i8$v)
116 ; CHECK-NEXT:       adrp    x9, "#func$hp_target"
117 ; CHECK-NEXT:       add     x9, x9, :lo12:"#func$hp_target"
118 ; CHECK-NEXT:       blr     x8
119 ; CHECK-NEXT:       .seh_startepilogue
120 ; CHECK-NEXT:       ldr     x30, [sp], #16                  // 8-byte Folded Reload
121 ; CHECK-NEXT:       .seh_save_reg_x x30, 16
122 ; CHECK-NEXT:       .seh_endepilogue
123 ; CHECK-NEXT:       br      x11
124 ; CHECK-NEXT:       .seh_endfunclet
125 ; CHECK-NEXT:       .seh_endproc
127 ; CHECK-LABEL:      .def    "#has_varargs$hybpatch_thunk";
128 ; CHECK:            .section        .wowthk$aa,"xr",discard,"#has_varargs$hybpatch_thunk"
129 ; CHECK-NEXT:       .globl  "#has_varargs$hybpatch_thunk"   // -- Begin function #has_varargs$hybpatch_thunk
130 ; CHECK-NEXT:       .p2align        2
131 ; CHECK-NEXT:"#has_varargs$hybpatch_thunk":          // @"#has_varargs$hybpatch_thunk"
132 ; CHECK-NEXT:.seh_proc "#has_varargs$hybpatch_thunk"
133 ; CHECK-NEXT:// %bb.0:
134 ; CHECK-NEXT:       str     x30, [sp, #-16]!                // 8-byte Folded Spill
135 ; CHECK-NEXT:       .seh_save_reg_x x30, 16
136 ; CHECK-NEXT:       .seh_endprologue
137 ; CHECK-NEXT:       adrp    x8, __os_arm64x_dispatch_call
138 ; CHECK-NEXT:       adrp    x11, has_varargs
139 ; CHECK-NEXT:       add     x11, x11, :lo12:has_varargs
140 ; CHECK-NEXT:       ldr     x8, [x8, :lo12:__os_arm64x_dispatch_call]
141 ; CHECK-NEXT:       adrp    x10, ($iexit_thunk$cdecl$v$varargs)
142 ; CHECK-NEXT:       add     x10, x10, :lo12:($iexit_thunk$cdecl$v$varargs)
143 ; CHECK-NEXT:       adrp    x9, "#has_varargs$hp_target"
144 ; CHECK-NEXT:       add     x9, x9, :lo12:"#has_varargs$hp_target"
145 ; CHECK-NEXT:       blr     x8
146 ; CHECK-NEXT:       .seh_startepilogue
147 ; CHECK-NEXT:       ldr     x30, [sp], #16                  // 8-byte Folded Reload
148 ; CHECK-NEXT:       .seh_save_reg_x x30, 16
149 ; CHECK-NEXT:       .seh_endepilogue
150 ; CHECK-NEXT:       br      x11
151 ; CHECK-NEXT:       .seh_endfunclet
152 ; CHECK-NEXT:       .seh_endproc
154 ; CHECK-LABEL:     .def    "#has_sret$hybpatch_thunk";
155 ; CHECK:           .section        .wowthk$aa,"xr",discard,"#has_sret$hybpatch_thunk"
156 ; CHECK-NEXT:      .globl  "#has_sret$hybpatch_thunk"      // -- Begin function #has_sret$hybpatch_thunk
157 ; CHECK-NEXT:      .p2align        2
158 ; CHECK-NEXT:  "#has_sret$hybpatch_thunk":             // @"#has_sret$hybpatch_thunk"
159 ; CHECK-NEXT:  .seh_proc "#has_sret$hybpatch_thunk"
160 ; CHECK-NEXT:  // %bb.0:
161 ; CHECK-NEXT:      str     x30, [sp, #-16]!                // 8-byte Folded Spill
162 ; CHECK-NEXT:      .seh_save_reg_x x30, 16
163 ; CHECK-NEXT:      .seh_endprologue
164 ; CHECK-NEXT:      adrp    x9, __os_arm64x_dispatch_call
165 ; CHECK-NEXT:      adrp    x11, has_sret
166 ; CHECK-NEXT:      add     x11, x11, :lo12:has_sret
167 ; CHECK-NEXT:      ldr     x12, [x9, :lo12:__os_arm64x_dispatch_call]
168 ; CHECK-NEXT:      adrp    x10, ($iexit_thunk$cdecl$m100$v)
169 ; CHECK-NEXT:      add     x10, x10, :lo12:($iexit_thunk$cdecl$m100$v)
170 ; CHECK-NEXT:      adrp    x9, "#has_sret$hp_target"
171 ; CHECK-NEXT:      add     x9, x9, :lo12:"#has_sret$hp_target"
172 ; CHECK-NEXT:      blr     x12
173 ; CHECK-NEXT:      .seh_startepilogue
174 ; CHECK-NEXT:      ldr     x30, [sp], #16                  // 8-byte Folded Reload
175 ; CHECK-NEXT:      .seh_save_reg_x x30, 16
176 ; CHECK-NEXT:      .seh_endepilogue
177 ; CHECK-NEXT:      br      x11
178 ; CHECK-NEXT:      .seh_endfunclet
179 ; CHECK-NEXT:      .seh_endproc
181 ; CHECK-LABEL:     .def    "#exp$hybpatch_thunk";
182 ; CHECK:           .section        .wowthk$aa,"xr",discard,"#exp$hybpatch_thunk"
183 ; CHECK-NEXT:      .globl  "#exp$hybpatch_thunk"           // -- Begin function #exp$hybpatch_thunk
184 ; CHECK-NEXT:      .p2align        2
185 ; CHECK-NEXT:  "#exp$hybpatch_thunk":                // @"#exp$hybpatch_thunk"
186 ; CHECK-NEXT:  .seh_proc "#exp$hybpatch_thunk"
187 ; CHECK-NEXT:  // %bb.0:
188 ; CHECK-NEXT:      str     x30, [sp, #-16]!                // 8-byte Folded Spill
189 ; CHECK-NEXT:      .seh_save_reg_x x30, 16
190 ; CHECK-NEXT:      .seh_endprologue
191 ; CHECK-NEXT:      adrp    x8, __os_arm64x_dispatch_call
192 ; CHECK-NEXT:      adrp    x11, exp
193 ; CHECK-NEXT:      add     x11, x11, :lo12:exp
194 ; CHECK-NEXT:      ldr     x8, [x8, :lo12:__os_arm64x_dispatch_call]
195 ; CHECK-NEXT:      adrp    x10, ($iexit_thunk$cdecl$v$v)
196 ; CHECK-NEXT:      add     x10, x10, :lo12:($iexit_thunk$cdecl$v$v)
197 ; CHECK-NEXT:      adrp    x9, "#exp$hp_target"
198 ; CHECK-NEXT:      add     x9, x9, :lo12:"#exp$hp_target"
199 ; CHECK-NEXT:      blr     x8
200 ; CHECK-NEXT:      .seh_startepilogue
201 ; CHECK-NEXT:      ldr     x30, [sp], #16                  // 8-byte Folded Reload
202 ; CHECK-NEXT:      .seh_save_reg_x x30, 16
203 ; CHECK-NEXT:      .seh_endepilogue
204 ; CHECK-NEXT:      br      x11
205 ; CHECK-NEXT:      .seh_endfunclet
206 ; CHECK-NEXT:      .seh_endproc
208 ; Verify the hybrid bitmap
209 ; CHECK-LABEL:     .section        .hybmp$x,"yi"
210 ; CHECK-NEXT:      .symidx "#func$hp_target"
211 ; CHECK-NEXT:      .symidx $ientry_thunk$cdecl$i8$v
212 ; CHECK-NEXT:      .word   1
213 ; CHECK-NEXT:      .symidx "#has_varargs$hp_target"
214 ; CHECK-NEXT:      .symidx $ientry_thunk$cdecl$v$varargs
215 ; CHECK-NEXT:      .word   1
216 ; CHECK-NEXT:      .symidx "#has_sret$hp_target"
217 ; CHECK-NEXT:      .symidx $ientry_thunk$cdecl$m100$v
218 ; CHECK-NEXT:      .word   1
219 ; CHECK-NEXT:      .symidx "#exp$hp_target"
220 ; CHECK-NEXT:      .symidx $ientry_thunk$cdecl$v$v
221 ; CHECK-NEXT:      .word   1
222 ; CHECK-NEXT:      .symidx "#caller"
223 ; CHECK-NEXT:      .symidx $ientry_thunk$cdecl$v$v
224 ; CHECK-NEXT:      .word   1
225 ; CHECK-NEXT:      .symidx func
226 ; CHECK-NEXT:      .symidx $iexit_thunk$cdecl$i8$v
227 ; CHECK-NEXT:      .word   4
228 ; CHECK-NEXT:      .symidx "#func$hybpatch_thunk"
229 ; CHECK-NEXT:      .symidx func
230 ; CHECK-NEXT:      .word   0
231 ; CHECK-NEXT:      .symidx "#has_varargs$hybpatch_thunk"
232 ; CHECK-NEXT:      .symidx has_varargs
233 ; CHECK-NEXT:      .word   0
234 ; CHECK-NEXT:      .symidx "#has_sret$hybpatch_thunk"
235 ; CHECK-NEXT:      .symidx has_sret
236 ; CHECK-NEXT:      .word   0
237 ; CHECK-NEXT:      .symidx "#exp$hybpatch_thunk"
238 ; CHECK-NEXT:      .symidx exp
239 ; CHECK-NEXT:      .word   0
240 ; CHECK-NEXT:      .section        .drectve,"yni"
241 ; CHECK-NEXT:      .ascii  " /EXPORT:exp"
243 ; CHECK-NEXT:      .def    "EXP+#func";
244 ; CHECK-NEXT:      .scl    2;
245 ; CHECK-NEXT:      .type   32;
246 ; CHECK-NEXT:      .endef
247 ; CHECK-NEXT:      .def    func;
248 ; CHECK-NEXT:      .scl    2;
249 ; CHECK-NEXT:      .type   32;
250 ; CHECK-NEXT:      .endef
251 ; CHECK-NEXT:      .weak  func
252 ; CHECK-NEXT:  .set func, "EXP+#func"{{$}}
253 ; CHECK-NEXT:      .weak  "#func"
254 ; CHECK-NEXT:      .def    "#func";
255 ; CHECK-NEXT:      .scl    2;
256 ; CHECK-NEXT:      .type   32;
257 ; CHECK-NEXT:      .endef
258 ; CHECK-NEXT:  .set "#func", "#func$hybpatch_thunk"{{$}}
259 ; CHECK-NEXT:      .def    "EXP+#has_varargs";
260 ; CHECK-NEXT:      .scl    2;
261 ; CHECK-NEXT:      .type   32;
262 ; CHECK-NEXT:      .endef
263 ; CHECK-NEXT:      .def    has_varargs;
264 ; CHECK-NEXT:      .scl    2;
265 ; CHECK-NEXT:      .type   32;
266 ; CHECK-NEXT:      .endef
267 ; CHECK-NEXT:      .weak   has_varargs
268 ; CHECK-NEXT:  .set has_varargs, "EXP+#has_varargs"
269 ; CHECK-NEXT:      .weak   "#has_varargs"
270 ; CHECK-NEXT:      .def    "#has_varargs";
271 ; CHECK-NEXT:      .scl    2;
272 ; CHECK-NEXT:      .type   32;
273 ; CHECK-NEXT:      .endef
274 ; CHECK-NEXT:  .set "#has_varargs", "#has_varargs$hybpatch_thunk"
275 ; CHECK-NEXT:      .def    "EXP+#has_sret";
276 ; CHECK-NEXT:      .scl    2;
277 ; CHECK-NEXT:      .type   32;
278 ; CHECK-NEXT:      .endef
279 ; CHECK-NEXT:      .def    has_sret;
280 ; CHECK-NEXT:      .scl    2;
281 ; CHECK-NEXT:      .type   32;
282 ; CHECK-NEXT:      .endef
283 ; CHECK-NEXT:      .weak   has_sret
284 ; CHECK-NEXT:  .set has_sret, "EXP+#has_sret"
285 ; CHECK-NEXT:      .weak   "#has_sret"
286 ; CHECK-NEXT:      .def    "#has_sret";
287 ; CHECK-NEXT:      .scl    2;
288 ; CHECK-NEXT:      .type   32;
289 ; CHECK-NEXT:      .endef
290 ; CHECK-NEXT:  .set "#has_sret", "#has_sret$hybpatch_thunk"
291 ; CHECK-NEXT:      .def    "EXP+#exp";
292 ; CHECK-NEXT:      .scl    2;
293 ; CHECK-NEXT:      .type   32;
294 ; CHECK-NEXT:      .endef
295 ; CHECK-NEXT:      .def    exp;
296 ; CHECK-NEXT:      .scl    2;
297 ; CHECK-NEXT:      .type   32;
298 ; CHECK-NEXT:      .endef
299 ; CHECK-NEXT:      .weak   exp
300 ; CHECK-NEXT:  .set exp, "EXP+#exp"
301 ; CHECK-NEXT:      .weak   "#exp"
302 ; CHECK-NEXT:      .def    "#exp";
303 ; CHECK-NEXT:      .scl    2;
304 ; CHECK-NEXT:      .type   32;
305 ; CHECK-NEXT:      .endef
306 ; CHECK-NEXT:  .set "#exp", "#exp$hybpatch_thunk"
308 ; SYM:      [53](sec 15)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 #func$hybpatch_thunk
309 ; SYM:      [58](sec 16)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 #has_varargs$hybpatch_thunk
310 ; SYM:      [68](sec 18)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 #has_sret$hybpatch_thunk
311 ; SYM:      [78](sec 20)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 #exp$hybpatch_thunk
312 ; SYM:      [110](sec  0)(fl 0x00)(ty   0)(scl  69) (nx 1) 0x00000000 func
313 ; SYM-NEXT: AUX indx 112 srch 3
314 ; SYM-NEXT: [112](sec  0)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 EXP+#func
315 ; SYM:      [116](sec  0)(fl 0x00)(ty   0)(scl  69) (nx 1) 0x00000000 #func
316 ; SYM-NEXT: AUX indx 53 srch 3
317 ; SYM:      [122](sec  0)(fl 0x00)(ty   0)(scl  69) (nx 1) 0x00000000 has_varargs
318 ; SYM-NEXT: AUX indx 124 srch 3
319 ; SYM-NEXT: [124](sec  0)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 EXP+#has_varargs
320 ; SYM-NEXT: [125](sec  0)(fl 0x00)(ty   0)(scl  69) (nx 1) 0x00000000 has_sret
321 ; SYM-NEXT: AUX indx 127 srch 3
322 ; SYM-NEXT: [127](sec  0)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 EXP+#has_sret
323 ; SYM-NEXT: [128](sec  0)(fl 0x00)(ty   0)(scl  69) (nx 1) 0x00000000 exp
324 ; SYM-NEXT: AUX indx 130 srch 3
325 ; SYM-NEXT: [130](sec  0)(fl 0x00)(ty  20)(scl   2) (nx 0) 0x00000000 EXP+#exp
326 ; SYM-NEXT: [131](sec  0)(fl 0x00)(ty   0)(scl  69) (nx 1) 0x00000000 #has_varargs
327 ; SYM-NEXT: AUX indx 58 srch 3
328 ; SYM-NEXT: [133](sec  0)(fl 0x00)(ty   0)(scl  69) (nx 1) 0x00000000 #has_sret
329 ; SYM-NEXT: AUX indx 68 srch 3
330 ; SYM-NEXT: [135](sec  0)(fl 0x00)(ty   0)(scl  69) (nx 1) 0x00000000 #exp
331 ; SYM-NEXT: AUX indx 78 srch 3