1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 4
2 ; RUN: opt < %s -msan-check-access-address=0 -S -passes=msan 2>&1 | FileCheck %s
4 target datalayout = "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-i128:128-f80:128-n8:16:32:64-S128"
5 target triple = "x86_64-unknown-linux-gnu"
7 %struct.Double4 = type { [4 x double] }
8 %struct.LongDouble2 = type { [2 x x86_fp80] }
9 %struct.LongDouble4 = type { [4 x x86_fp80] }
10 %struct.IntInt = type { i32, i32 }
11 %struct.Int64Int64 = type { i64, i64 }
12 %struct.DoubleDouble = type { double, double }
13 %struct.DoubleFloat = type { double, float }
14 %struct.__va_list_tag = type { i32, i32, ptr, ptr }
16 define linkonce_odr dso_local void @_Z4testIcEvT_(i8 noundef signext %arg) sanitize_memory {
17 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testIcEvT_(
18 ; CHECK-SAME: i8 noundef signext [[ARG:%.*]]) #[[ATTR0:[0-9]+]] {
20 ; CHECK-NEXT: [[TMP0:%.*]] = load i8, ptr @__msan_param_tls, align 8
21 ; CHECK-NEXT: call void @llvm.donothing()
22 ; CHECK-NEXT: [[ARG_ADDR:%.*]] = alloca i8, align 1
23 ; CHECK-NEXT: [[TMP1:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
24 ; CHECK-NEXT: [[TMP2:%.*]] = xor i64 [[TMP1]], 87960930222080
25 ; CHECK-NEXT: [[TMP3:%.*]] = inttoptr i64 [[TMP2]] to ptr
26 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 1 [[TMP3]], i8 -1, i64 1, i1 false)
27 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
28 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
29 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
30 ; CHECK-NEXT: store i8 [[TMP0]], ptr [[TMP6]], align 1
31 ; CHECK-NEXT: store i8 [[ARG]], ptr [[ARG_ADDR]], align 1
32 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
33 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG_ADDR]])
34 ; CHECK-NEXT: [[TMP7:%.*]] = load i8, ptr [[ARG_ADDR]], align 1
35 ; CHECK-NEXT: [[TMP8:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
36 ; CHECK-NEXT: [[TMP9:%.*]] = xor i64 [[TMP8]], 87960930222080
37 ; CHECK-NEXT: [[TMP10:%.*]] = inttoptr i64 [[TMP9]] to ptr
38 ; CHECK-NEXT: [[_MSLD:%.*]] = load i8, ptr [[TMP10]], align 1
39 ; CHECK-NEXT: [[_MSPROP:%.*]] = sext i8 [[_MSLD]] to i32
40 ; CHECK-NEXT: [[CONV:%.*]] = sext i8 [[TMP7]] to i32
41 ; CHECK-NEXT: store i8 [[_MSLD]], ptr @__msan_param_tls, align 8
42 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
43 ; CHECK-NEXT: store i32 [[_MSPROP]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 16) to ptr), align 8
44 ; CHECK-NEXT: store i32 [[_MSPROP]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 16) to ptr), align 8
45 ; CHECK-NEXT: store i64 0, ptr @__msan_va_arg_overflow_size_tls, align 8
46 ; CHECK-NEXT: call void (i8, i32, ...) @_Z5test2IcEvT_iz(i8 noundef signext [[TMP7]], i32 noundef 1, i32 noundef [[CONV]])
47 ; CHECK-NEXT: ret void
50 %arg.addr = alloca i8, align 1
51 store i8 %arg, ptr %arg.addr, align 1
52 call void @_Z3usePv(ptr noundef nonnull %arg.addr)
53 %0 = load i8, ptr %arg.addr, align 1
54 %conv = sext i8 %0 to i32
55 call void (i8, i32, ...) @_Z5test2IcEvT_iz(i8 noundef signext %0, i32 noundef 1, i32 noundef %conv)
59 define linkonce_odr dso_local void @_Z4testIiEvT_(i32 noundef %arg) sanitize_memory {
60 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testIiEvT_(
61 ; CHECK-SAME: i32 noundef [[ARG:%.*]]) #[[ATTR0]] {
63 ; CHECK-NEXT: [[TMP0:%.*]] = load i32, ptr @__msan_param_tls, align 8
64 ; CHECK-NEXT: call void @llvm.donothing()
65 ; CHECK-NEXT: [[ARG_ADDR:%.*]] = alloca i32, align 4
66 ; CHECK-NEXT: [[TMP1:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
67 ; CHECK-NEXT: [[TMP2:%.*]] = xor i64 [[TMP1]], 87960930222080
68 ; CHECK-NEXT: [[TMP3:%.*]] = inttoptr i64 [[TMP2]] to ptr
69 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 4 [[TMP3]], i8 -1, i64 4, i1 false)
70 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
71 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
72 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
73 ; CHECK-NEXT: store i32 [[TMP0]], ptr [[TMP6]], align 4
74 ; CHECK-NEXT: store i32 [[ARG]], ptr [[ARG_ADDR]], align 4
75 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
76 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG_ADDR]])
77 ; CHECK-NEXT: [[TMP7:%.*]] = load i32, ptr [[ARG_ADDR]], align 4
78 ; CHECK-NEXT: [[TMP8:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
79 ; CHECK-NEXT: [[TMP9:%.*]] = xor i64 [[TMP8]], 87960930222080
80 ; CHECK-NEXT: [[TMP10:%.*]] = inttoptr i64 [[TMP9]] to ptr
81 ; CHECK-NEXT: [[_MSLD:%.*]] = load i32, ptr [[TMP10]], align 4
82 ; CHECK-NEXT: store i32 [[_MSLD]], ptr @__msan_param_tls, align 8
83 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
84 ; CHECK-NEXT: store i32 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 16) to ptr), align 8
85 ; CHECK-NEXT: store i32 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 16) to ptr), align 8
86 ; CHECK-NEXT: store i64 0, ptr @__msan_va_arg_overflow_size_tls, align 8
87 ; CHECK-NEXT: call void (i32, i32, ...) @_Z5test2IiEvT_iz(i32 noundef [[TMP7]], i32 noundef 1, i32 noundef [[TMP7]])
88 ; CHECK-NEXT: ret void
91 %arg.addr = alloca i32, align 4
92 store i32 %arg, ptr %arg.addr, align 4
93 call void @_Z3usePv(ptr noundef nonnull %arg.addr)
94 %0 = load i32, ptr %arg.addr, align 4
95 call void (i32, i32, ...) @_Z5test2IiEvT_iz(i32 noundef %0, i32 noundef 1, i32 noundef %0)
99 define linkonce_odr dso_local void @_Z4testIfEvT_(float noundef %arg) sanitize_memory {
100 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testIfEvT_(
101 ; CHECK-SAME: float noundef [[ARG:%.*]]) #[[ATTR0]] {
103 ; CHECK-NEXT: [[TMP0:%.*]] = load i32, ptr @__msan_param_tls, align 8
104 ; CHECK-NEXT: call void @llvm.donothing()
105 ; CHECK-NEXT: [[ARG_ADDR:%.*]] = alloca float, align 4
106 ; CHECK-NEXT: [[TMP1:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
107 ; CHECK-NEXT: [[TMP2:%.*]] = xor i64 [[TMP1]], 87960930222080
108 ; CHECK-NEXT: [[TMP3:%.*]] = inttoptr i64 [[TMP2]] to ptr
109 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 4 [[TMP3]], i8 -1, i64 4, i1 false)
110 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
111 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
112 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
113 ; CHECK-NEXT: store i32 [[TMP0]], ptr [[TMP6]], align 4
114 ; CHECK-NEXT: store float [[ARG]], ptr [[ARG_ADDR]], align 4
115 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
116 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG_ADDR]])
117 ; CHECK-NEXT: [[TMP7:%.*]] = load float, ptr [[ARG_ADDR]], align 4
118 ; CHECK-NEXT: [[TMP8:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
119 ; CHECK-NEXT: [[TMP9:%.*]] = xor i64 [[TMP8]], 87960930222080
120 ; CHECK-NEXT: [[TMP10:%.*]] = inttoptr i64 [[TMP9]] to ptr
121 ; CHECK-NEXT: [[_MSLD:%.*]] = load i32, ptr [[TMP10]], align 4
122 ; CHECK-NEXT: [[TMP11:%.*]] = zext i32 [[_MSLD]] to i64
123 ; CHECK-NEXT: [[CONV:%.*]] = fpext float [[TMP7]] to double
124 ; CHECK-NEXT: store i32 [[_MSLD]], ptr @__msan_param_tls, align 8
125 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
126 ; CHECK-NEXT: store i64 [[TMP11]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 16) to ptr), align 8
127 ; CHECK-NEXT: store i64 [[TMP11]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 64) to ptr), align 8
128 ; CHECK-NEXT: store i64 0, ptr @__msan_va_arg_overflow_size_tls, align 8
129 ; CHECK-NEXT: call void (float, i32, ...) @_Z5test2IfEvT_iz(float noundef [[TMP7]], i32 noundef 1, double noundef [[CONV]])
130 ; CHECK-NEXT: ret void
133 %arg.addr = alloca float, align 4
134 store float %arg, ptr %arg.addr, align 4
135 call void @_Z3usePv(ptr noundef nonnull %arg.addr)
136 %0 = load float, ptr %arg.addr, align 4
137 %conv = fpext float %0 to double
138 call void (float, i32, ...) @_Z5test2IfEvT_iz(float noundef %0, i32 noundef 1, double noundef %conv)
142 define linkonce_odr dso_local void @_Z4testIdEvT_(double noundef %arg) sanitize_memory {
143 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testIdEvT_(
144 ; CHECK-SAME: double noundef [[ARG:%.*]]) #[[ATTR0]] {
146 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_param_tls, align 8
147 ; CHECK-NEXT: call void @llvm.donothing()
148 ; CHECK-NEXT: [[ARG_ADDR:%.*]] = alloca double, align 8
149 ; CHECK-NEXT: [[TMP1:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
150 ; CHECK-NEXT: [[TMP2:%.*]] = xor i64 [[TMP1]], 87960930222080
151 ; CHECK-NEXT: [[TMP3:%.*]] = inttoptr i64 [[TMP2]] to ptr
152 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP3]], i8 -1, i64 8, i1 false)
153 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
154 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
155 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
156 ; CHECK-NEXT: store i64 [[TMP0]], ptr [[TMP6]], align 8
157 ; CHECK-NEXT: store double [[ARG]], ptr [[ARG_ADDR]], align 8
158 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
159 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG_ADDR]])
160 ; CHECK-NEXT: [[TMP7:%.*]] = load double, ptr [[ARG_ADDR]], align 8
161 ; CHECK-NEXT: [[TMP8:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
162 ; CHECK-NEXT: [[TMP9:%.*]] = xor i64 [[TMP8]], 87960930222080
163 ; CHECK-NEXT: [[TMP10:%.*]] = inttoptr i64 [[TMP9]] to ptr
164 ; CHECK-NEXT: [[_MSLD:%.*]] = load i64, ptr [[TMP10]], align 8
165 ; CHECK-NEXT: store i64 [[_MSLD]], ptr @__msan_param_tls, align 8
166 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
167 ; CHECK-NEXT: store i64 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 16) to ptr), align 8
168 ; CHECK-NEXT: store i64 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 64) to ptr), align 8
169 ; CHECK-NEXT: store i64 0, ptr @__msan_va_arg_overflow_size_tls, align 8
170 ; CHECK-NEXT: call void (double, i32, ...) @_Z5test2IdEvT_iz(double noundef [[TMP7]], i32 noundef 1, double noundef [[TMP7]])
171 ; CHECK-NEXT: ret void
174 %arg.addr = alloca double, align 8
175 store double %arg, ptr %arg.addr, align 8
176 call void @_Z3usePv(ptr noundef nonnull %arg.addr)
177 %0 = load double, ptr %arg.addr, align 8
178 call void (double, i32, ...) @_Z5test2IdEvT_iz(double noundef %0, i32 noundef 1, double noundef %0)
182 define linkonce_odr dso_local void @_Z4testIeEvT_(x86_fp80 noundef %arg) sanitize_memory {
183 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testIeEvT_(
184 ; CHECK-SAME: x86_fp80 noundef [[ARG:%.*]]) #[[ATTR0]] {
186 ; CHECK-NEXT: [[TMP0:%.*]] = load i80, ptr @__msan_param_tls, align 8
187 ; CHECK-NEXT: call void @llvm.donothing()
188 ; CHECK-NEXT: [[ARG_ADDR:%.*]] = alloca x86_fp80, align 16
189 ; CHECK-NEXT: [[TMP1:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
190 ; CHECK-NEXT: [[TMP2:%.*]] = xor i64 [[TMP1]], 87960930222080
191 ; CHECK-NEXT: [[TMP3:%.*]] = inttoptr i64 [[TMP2]] to ptr
192 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP3]], i8 -1, i64 16, i1 false)
193 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
194 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
195 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
196 ; CHECK-NEXT: store i80 [[TMP0]], ptr [[TMP6]], align 16
197 ; CHECK-NEXT: store x86_fp80 [[ARG]], ptr [[ARG_ADDR]], align 16
198 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
199 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG_ADDR]])
200 ; CHECK-NEXT: [[TMP7:%.*]] = load x86_fp80, ptr [[ARG_ADDR]], align 16
201 ; CHECK-NEXT: [[TMP8:%.*]] = ptrtoint ptr [[ARG_ADDR]] to i64
202 ; CHECK-NEXT: [[TMP9:%.*]] = xor i64 [[TMP8]], 87960930222080
203 ; CHECK-NEXT: [[TMP10:%.*]] = inttoptr i64 [[TMP9]] to ptr
204 ; CHECK-NEXT: [[_MSLD:%.*]] = load i80, ptr [[TMP10]], align 16
205 ; CHECK-NEXT: store i80 [[_MSLD]], ptr @__msan_param_tls, align 8
206 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 16) to ptr), align 8
207 ; CHECK-NEXT: store i80 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 24) to ptr), align 8
208 ; CHECK-NEXT: store i80 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 176) to ptr), align 8
209 ; CHECK-NEXT: store i64 16, ptr @__msan_va_arg_overflow_size_tls, align 8
210 ; CHECK-NEXT: call void (x86_fp80, i32, ...) @_Z5test2IeEvT_iz(x86_fp80 noundef [[TMP7]], i32 noundef 1, x86_fp80 noundef [[TMP7]])
211 ; CHECK-NEXT: ret void
214 %arg.addr = alloca x86_fp80, align 16
215 store x86_fp80 %arg, ptr %arg.addr, align 16
216 call void @_Z3usePv(ptr noundef nonnull %arg.addr)
217 %0 = load x86_fp80, ptr %arg.addr, align 16
218 call void (x86_fp80, i32, ...) @_Z5test2IeEvT_iz(x86_fp80 noundef %0, i32 noundef 1, x86_fp80 noundef %0)
222 define linkonce_odr dso_local void @_Z4testI6IntIntEvT_(i64 %arg.coerce) sanitize_memory {
223 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testI6IntIntEvT_(
224 ; CHECK-SAME: i64 [[ARG_COERCE:%.*]]) #[[ATTR0]] {
226 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_param_tls, align 8
227 ; CHECK-NEXT: call void @llvm.donothing()
228 ; CHECK-NEXT: [[ARG:%.*]] = alloca [[STRUCT_INTINT:%.*]], align 8
229 ; CHECK-NEXT: [[TMP1:%.*]] = ptrtoint ptr [[ARG]] to i64
230 ; CHECK-NEXT: [[TMP2:%.*]] = xor i64 [[TMP1]], 87960930222080
231 ; CHECK-NEXT: [[TMP3:%.*]] = inttoptr i64 [[TMP2]] to ptr
232 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP3]], i8 -1, i64 8, i1 false)
233 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARG]] to i64
234 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
235 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
236 ; CHECK-NEXT: store i64 [[TMP0]], ptr [[TMP6]], align 8
237 ; CHECK-NEXT: store i64 [[ARG_COERCE]], ptr [[ARG]], align 8
238 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
239 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG]])
240 ; CHECK-NEXT: [[AGG_TMP_SROA_0_0_COPYLOAD:%.*]] = load i64, ptr [[ARG]], align 8
241 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARG]] to i64
242 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
243 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
244 ; CHECK-NEXT: [[_MSLD:%.*]] = load i64, ptr [[TMP9]], align 8
245 ; CHECK-NEXT: store i64 [[_MSLD]], ptr @__msan_param_tls, align 8
246 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
247 ; CHECK-NEXT: store i64 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 16) to ptr), align 8
248 ; CHECK-NEXT: store i64 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 16) to ptr), align 8
249 ; CHECK-NEXT: store i64 0, ptr @__msan_va_arg_overflow_size_tls, align 8
250 ; CHECK-NEXT: call void (i64, i32, ...) @_Z5test2I6IntIntEvT_iz(i64 [[AGG_TMP_SROA_0_0_COPYLOAD]], i32 noundef 1, i64 [[AGG_TMP_SROA_0_0_COPYLOAD]])
251 ; CHECK-NEXT: ret void
254 %arg = alloca %struct.IntInt, align 8
255 store i64 %arg.coerce, ptr %arg, align 8
256 call void @_Z3usePv(ptr noundef nonnull %arg)
257 %agg.tmp.sroa.0.0.copyload = load i64, ptr %arg, align 8
258 call void (i64, i32, ...) @_Z5test2I6IntIntEvT_iz(i64 %agg.tmp.sroa.0.0.copyload, i32 noundef 1, i64 %agg.tmp.sroa.0.0.copyload)
262 define linkonce_odr dso_local void @_Z4testI10Int64Int64EvT_(i64 %arg.coerce0, i64 %arg.coerce1) sanitize_memory {
263 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testI10Int64Int64EvT_(
264 ; CHECK-SAME: i64 [[ARG_COERCE0:%.*]], i64 [[ARG_COERCE1:%.*]]) #[[ATTR0]] {
266 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_param_tls, align 8
267 ; CHECK-NEXT: [[TMP1:%.*]] = load i64, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
268 ; CHECK-NEXT: call void @llvm.donothing()
269 ; CHECK-NEXT: [[ARG:%.*]] = alloca [[STRUCT_INT64INT64:%.*]], align 8
270 ; CHECK-NEXT: [[TMP2:%.*]] = ptrtoint ptr [[ARG]] to i64
271 ; CHECK-NEXT: [[TMP3:%.*]] = xor i64 [[TMP2]], 87960930222080
272 ; CHECK-NEXT: [[TMP4:%.*]] = inttoptr i64 [[TMP3]] to ptr
273 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP4]], i8 -1, i64 16, i1 false)
274 ; CHECK-NEXT: [[TMP5:%.*]] = ptrtoint ptr [[ARG]] to i64
275 ; CHECK-NEXT: [[TMP6:%.*]] = xor i64 [[TMP5]], 87960930222080
276 ; CHECK-NEXT: [[TMP7:%.*]] = inttoptr i64 [[TMP6]] to ptr
277 ; CHECK-NEXT: store i64 [[TMP0]], ptr [[TMP7]], align 8
278 ; CHECK-NEXT: store i64 [[ARG_COERCE0]], ptr [[ARG]], align 8
279 ; CHECK-NEXT: [[TMP8:%.*]] = getelementptr inbounds { i64, i64 }, ptr [[ARG]], i64 0, i32 1
280 ; CHECK-NEXT: [[TMP9:%.*]] = ptrtoint ptr [[TMP8]] to i64
281 ; CHECK-NEXT: [[TMP10:%.*]] = xor i64 [[TMP9]], 87960930222080
282 ; CHECK-NEXT: [[TMP11:%.*]] = inttoptr i64 [[TMP10]] to ptr
283 ; CHECK-NEXT: store i64 [[TMP1]], ptr [[TMP11]], align 8
284 ; CHECK-NEXT: store i64 [[ARG_COERCE1]], ptr [[TMP8]], align 8
285 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
286 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG]])
287 ; CHECK-NEXT: [[AGG_TMP_SROA_0_0_COPYLOAD:%.*]] = load i64, ptr [[ARG]], align 8
288 ; CHECK-NEXT: [[TMP12:%.*]] = ptrtoint ptr [[ARG]] to i64
289 ; CHECK-NEXT: [[TMP13:%.*]] = xor i64 [[TMP12]], 87960930222080
290 ; CHECK-NEXT: [[TMP14:%.*]] = inttoptr i64 [[TMP13]] to ptr
291 ; CHECK-NEXT: [[_MSLD:%.*]] = load i64, ptr [[TMP14]], align 8
292 ; CHECK-NEXT: [[AGG_TMP_SROA_2_0_COPYLOAD:%.*]] = load i64, ptr [[TMP8]], align 8
293 ; CHECK-NEXT: [[TMP15:%.*]] = ptrtoint ptr [[TMP8]] to i64
294 ; CHECK-NEXT: [[TMP16:%.*]] = xor i64 [[TMP15]], 87960930222080
295 ; CHECK-NEXT: [[TMP17:%.*]] = inttoptr i64 [[TMP16]] to ptr
296 ; CHECK-NEXT: [[_MSLD1:%.*]] = load i64, ptr [[TMP17]], align 8
297 ; CHECK-NEXT: store i64 [[_MSLD]], ptr @__msan_param_tls, align 8
298 ; CHECK-NEXT: store i64 [[_MSLD1]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
299 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 16) to ptr), align 8
300 ; CHECK-NEXT: store i64 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 24) to ptr), align 8
301 ; CHECK-NEXT: store i64 [[_MSLD1]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 32) to ptr), align 8
302 ; CHECK-NEXT: store i64 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 24) to ptr), align 8
303 ; CHECK-NEXT: store i64 [[_MSLD1]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 32) to ptr), align 8
304 ; CHECK-NEXT: store i64 0, ptr @__msan_va_arg_overflow_size_tls, align 8
305 ; CHECK-NEXT: call void (i64, i64, i32, ...) @_Z5test2I10Int64Int64EvT_iz(i64 [[AGG_TMP_SROA_0_0_COPYLOAD]], i64 [[AGG_TMP_SROA_2_0_COPYLOAD]], i32 noundef 1, i64 [[AGG_TMP_SROA_0_0_COPYLOAD]], i64 [[AGG_TMP_SROA_2_0_COPYLOAD]])
306 ; CHECK-NEXT: ret void
309 %arg = alloca %struct.Int64Int64, align 8
310 store i64 %arg.coerce0, ptr %arg, align 8
311 %0 = getelementptr inbounds { i64, i64 }, ptr %arg, i64 0, i32 1
312 store i64 %arg.coerce1, ptr %0, align 8
313 call void @_Z3usePv(ptr noundef nonnull %arg)
314 %agg.tmp.sroa.0.0.copyload = load i64, ptr %arg, align 8
315 %agg.tmp.sroa.2.0.copyload = load i64, ptr %0, align 8
316 call void (i64, i64, i32, ...) @_Z5test2I10Int64Int64EvT_iz(i64 %agg.tmp.sroa.0.0.copyload, i64 %agg.tmp.sroa.2.0.copyload, i32 noundef 1, i64 %agg.tmp.sroa.0.0.copyload, i64 %agg.tmp.sroa.2.0.copyload)
320 define linkonce_odr dso_local void @_Z4testI12DoubleDoubleEvT_(double %arg.coerce0, double %arg.coerce1) sanitize_memory {
321 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testI12DoubleDoubleEvT_(
322 ; CHECK-SAME: double [[ARG_COERCE0:%.*]], double [[ARG_COERCE1:%.*]]) #[[ATTR0]] {
324 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_param_tls, align 8
325 ; CHECK-NEXT: [[TMP1:%.*]] = load i64, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
326 ; CHECK-NEXT: call void @llvm.donothing()
327 ; CHECK-NEXT: [[ARG:%.*]] = alloca [[STRUCT_DOUBLEDOUBLE:%.*]], align 8
328 ; CHECK-NEXT: [[TMP2:%.*]] = ptrtoint ptr [[ARG]] to i64
329 ; CHECK-NEXT: [[TMP3:%.*]] = xor i64 [[TMP2]], 87960930222080
330 ; CHECK-NEXT: [[TMP4:%.*]] = inttoptr i64 [[TMP3]] to ptr
331 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP4]], i8 -1, i64 16, i1 false)
332 ; CHECK-NEXT: [[TMP5:%.*]] = ptrtoint ptr [[ARG]] to i64
333 ; CHECK-NEXT: [[TMP6:%.*]] = xor i64 [[TMP5]], 87960930222080
334 ; CHECK-NEXT: [[TMP7:%.*]] = inttoptr i64 [[TMP6]] to ptr
335 ; CHECK-NEXT: store i64 [[TMP0]], ptr [[TMP7]], align 8
336 ; CHECK-NEXT: store double [[ARG_COERCE0]], ptr [[ARG]], align 8
337 ; CHECK-NEXT: [[TMP8:%.*]] = getelementptr inbounds { double, double }, ptr [[ARG]], i64 0, i32 1
338 ; CHECK-NEXT: [[TMP9:%.*]] = ptrtoint ptr [[TMP8]] to i64
339 ; CHECK-NEXT: [[TMP10:%.*]] = xor i64 [[TMP9]], 87960930222080
340 ; CHECK-NEXT: [[TMP11:%.*]] = inttoptr i64 [[TMP10]] to ptr
341 ; CHECK-NEXT: store i64 [[TMP1]], ptr [[TMP11]], align 8
342 ; CHECK-NEXT: store double [[ARG_COERCE1]], ptr [[TMP8]], align 8
343 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
344 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG]])
345 ; CHECK-NEXT: [[AGG_TMP_SROA_0_0_COPYLOAD:%.*]] = load double, ptr [[ARG]], align 8
346 ; CHECK-NEXT: [[TMP12:%.*]] = ptrtoint ptr [[ARG]] to i64
347 ; CHECK-NEXT: [[TMP13:%.*]] = xor i64 [[TMP12]], 87960930222080
348 ; CHECK-NEXT: [[TMP14:%.*]] = inttoptr i64 [[TMP13]] to ptr
349 ; CHECK-NEXT: [[_MSLD:%.*]] = load i64, ptr [[TMP14]], align 8
350 ; CHECK-NEXT: [[AGG_TMP_SROA_2_0_COPYLOAD:%.*]] = load double, ptr [[TMP8]], align 8
351 ; CHECK-NEXT: [[TMP15:%.*]] = ptrtoint ptr [[TMP8]] to i64
352 ; CHECK-NEXT: [[TMP16:%.*]] = xor i64 [[TMP15]], 87960930222080
353 ; CHECK-NEXT: [[TMP17:%.*]] = inttoptr i64 [[TMP16]] to ptr
354 ; CHECK-NEXT: [[_MSLD1:%.*]] = load i64, ptr [[TMP17]], align 8
355 ; CHECK-NEXT: store i64 [[_MSLD]], ptr @__msan_param_tls, align 8
356 ; CHECK-NEXT: store i64 [[_MSLD1]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
357 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 16) to ptr), align 8
358 ; CHECK-NEXT: store i64 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 24) to ptr), align 8
359 ; CHECK-NEXT: store i64 [[_MSLD1]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 32) to ptr), align 8
360 ; CHECK-NEXT: store i64 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 80) to ptr), align 8
361 ; CHECK-NEXT: store i64 [[_MSLD1]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 96) to ptr), align 8
362 ; CHECK-NEXT: store i64 0, ptr @__msan_va_arg_overflow_size_tls, align 8
363 ; CHECK-NEXT: call void (double, double, i32, ...) @_Z5test2I12DoubleDoubleEvT_iz(double [[AGG_TMP_SROA_0_0_COPYLOAD]], double [[AGG_TMP_SROA_2_0_COPYLOAD]], i32 noundef 1, double [[AGG_TMP_SROA_0_0_COPYLOAD]], double [[AGG_TMP_SROA_2_0_COPYLOAD]])
364 ; CHECK-NEXT: ret void
367 %arg = alloca %struct.DoubleDouble, align 8
368 store double %arg.coerce0, ptr %arg, align 8
369 %0 = getelementptr inbounds { double, double }, ptr %arg, i64 0, i32 1
370 store double %arg.coerce1, ptr %0, align 8
371 call void @_Z3usePv(ptr noundef nonnull %arg)
372 %agg.tmp.sroa.0.0.copyload = load double, ptr %arg, align 8
373 %agg.tmp.sroa.2.0.copyload = load double, ptr %0, align 8
374 call void (double, double, i32, ...) @_Z5test2I12DoubleDoubleEvT_iz(double %agg.tmp.sroa.0.0.copyload, double %agg.tmp.sroa.2.0.copyload, i32 noundef 1, double %agg.tmp.sroa.0.0.copyload, double %agg.tmp.sroa.2.0.copyload)
378 define linkonce_odr dso_local void @_Z4testI7Double4EvT_(ptr noundef byval(%struct.Double4) align 8 %arg) sanitize_memory {
379 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testI7Double4EvT_(
380 ; CHECK-SAME: ptr noundef byval([[STRUCT_DOUBLE4:%.*]]) align 8 [[ARG:%.*]]) #[[ATTR0]] {
382 ; CHECK-NEXT: [[TMP0:%.*]] = ptrtoint ptr [[ARG]] to i64
383 ; CHECK-NEXT: [[TMP1:%.*]] = xor i64 [[TMP0]], 87960930222080
384 ; CHECK-NEXT: [[TMP2:%.*]] = inttoptr i64 [[TMP1]] to ptr
385 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_param_tls, i64 32, i1 false)
386 ; CHECK-NEXT: call void @llvm.donothing()
387 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
388 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG]])
389 ; CHECK-NEXT: [[TMP3:%.*]] = ptrtoint ptr [[ARG]] to i64
390 ; CHECK-NEXT: [[TMP4:%.*]] = xor i64 [[TMP3]], 87960930222080
391 ; CHECK-NEXT: [[TMP5:%.*]] = inttoptr i64 [[TMP4]] to ptr
392 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 @__msan_param_tls, ptr align 8 [[TMP5]], i64 32, i1 false)
393 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 32) to ptr), align 8
394 ; CHECK-NEXT: [[TMP6:%.*]] = ptrtoint ptr [[ARG]] to i64
395 ; CHECK-NEXT: [[TMP7:%.*]] = xor i64 [[TMP6]], 87960930222080
396 ; CHECK-NEXT: [[TMP8:%.*]] = inttoptr i64 [[TMP7]] to ptr
397 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 40) to ptr), ptr align 8 [[TMP8]], i64 32, i1 false)
398 ; CHECK-NEXT: [[TMP9:%.*]] = ptrtoint ptr [[ARG]] to i64
399 ; CHECK-NEXT: [[TMP10:%.*]] = xor i64 [[TMP9]], 87960930222080
400 ; CHECK-NEXT: [[TMP11:%.*]] = inttoptr i64 [[TMP10]] to ptr
401 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 176) to ptr), ptr align 8 [[TMP11]], i64 32, i1 false)
402 ; CHECK-NEXT: store i64 32, ptr @__msan_va_arg_overflow_size_tls, align 8
403 ; CHECK-NEXT: call void (ptr, i32, ...) @_Z5test2I7Double4EvT_iz(ptr noundef nonnull byval([[STRUCT_DOUBLE4]]) align 8 [[ARG]], i32 noundef 1, ptr noundef nonnull byval([[STRUCT_DOUBLE4]]) align 8 [[ARG]])
404 ; CHECK-NEXT: ret void
407 call void @_Z3usePv(ptr noundef nonnull %arg)
408 call void (ptr, i32, ...) @_Z5test2I7Double4EvT_iz(ptr noundef nonnull byval(%struct.Double4) align 8 %arg, i32 noundef 1, ptr noundef nonnull byval(%struct.Double4) align 8 %arg)
412 declare void @llvm.memset.p0.i64(ptr nocapture writeonly, i8, i64, i1 immarg) #2
414 define linkonce_odr dso_local void @_Z4testI11DoubleFloatEvT_(double %arg.coerce0, float %arg.coerce1) sanitize_memory {
415 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testI11DoubleFloatEvT_(
416 ; CHECK-SAME: double [[ARG_COERCE0:%.*]], float [[ARG_COERCE1:%.*]]) #[[ATTR0]] {
418 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_param_tls, align 8
419 ; CHECK-NEXT: [[TMP1:%.*]] = load i32, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
420 ; CHECK-NEXT: call void @llvm.donothing()
421 ; CHECK-NEXT: [[ARG:%.*]] = alloca [[STRUCT_DOUBLEFLOAT:%.*]], align 8
422 ; CHECK-NEXT: [[TMP2:%.*]] = ptrtoint ptr [[ARG]] to i64
423 ; CHECK-NEXT: [[TMP3:%.*]] = xor i64 [[TMP2]], 87960930222080
424 ; CHECK-NEXT: [[TMP4:%.*]] = inttoptr i64 [[TMP3]] to ptr
425 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP4]], i8 -1, i64 16, i1 false)
426 ; CHECK-NEXT: [[TMP5:%.*]] = ptrtoint ptr [[ARG]] to i64
427 ; CHECK-NEXT: [[TMP6:%.*]] = xor i64 [[TMP5]], 87960930222080
428 ; CHECK-NEXT: [[TMP7:%.*]] = inttoptr i64 [[TMP6]] to ptr
429 ; CHECK-NEXT: store i64 [[TMP0]], ptr [[TMP7]], align 8
430 ; CHECK-NEXT: store double [[ARG_COERCE0]], ptr [[ARG]], align 8
431 ; CHECK-NEXT: [[TMP8:%.*]] = getelementptr inbounds { double, float }, ptr [[ARG]], i64 0, i32 1
432 ; CHECK-NEXT: [[TMP9:%.*]] = ptrtoint ptr [[TMP8]] to i64
433 ; CHECK-NEXT: [[TMP10:%.*]] = xor i64 [[TMP9]], 87960930222080
434 ; CHECK-NEXT: [[TMP11:%.*]] = inttoptr i64 [[TMP10]] to ptr
435 ; CHECK-NEXT: store i32 [[TMP1]], ptr [[TMP11]], align 8
436 ; CHECK-NEXT: store float [[ARG_COERCE1]], ptr [[TMP8]], align 8
437 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
438 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG]])
439 ; CHECK-NEXT: [[AGG_TMP_SROA_0_0_COPYLOAD:%.*]] = load double, ptr [[ARG]], align 8
440 ; CHECK-NEXT: [[TMP12:%.*]] = ptrtoint ptr [[ARG]] to i64
441 ; CHECK-NEXT: [[TMP13:%.*]] = xor i64 [[TMP12]], 87960930222080
442 ; CHECK-NEXT: [[TMP14:%.*]] = inttoptr i64 [[TMP13]] to ptr
443 ; CHECK-NEXT: [[_MSLD:%.*]] = load i64, ptr [[TMP14]], align 8
444 ; CHECK-NEXT: [[AGG_TMP_SROA_2_0_COPYLOAD:%.*]] = load float, ptr [[TMP8]], align 8
445 ; CHECK-NEXT: [[TMP15:%.*]] = ptrtoint ptr [[TMP8]] to i64
446 ; CHECK-NEXT: [[TMP16:%.*]] = xor i64 [[TMP15]], 87960930222080
447 ; CHECK-NEXT: [[TMP17:%.*]] = inttoptr i64 [[TMP16]] to ptr
448 ; CHECK-NEXT: [[_MSLD1:%.*]] = load i32, ptr [[TMP17]], align 8
449 ; CHECK-NEXT: store i64 [[_MSLD]], ptr @__msan_param_tls, align 8
450 ; CHECK-NEXT: store i32 [[_MSLD1]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 8) to ptr), align 8
451 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 16) to ptr), align 8
452 ; CHECK-NEXT: store i64 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 24) to ptr), align 8
453 ; CHECK-NEXT: store i32 [[_MSLD1]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 32) to ptr), align 8
454 ; CHECK-NEXT: store i64 [[_MSLD]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 80) to ptr), align 8
455 ; CHECK-NEXT: store i32 [[_MSLD1]], ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 96) to ptr), align 8
456 ; CHECK-NEXT: store i64 0, ptr @__msan_va_arg_overflow_size_tls, align 8
457 ; CHECK-NEXT: call void (double, float, i32, ...) @_Z5test2I11DoubleFloatEvT_iz(double [[AGG_TMP_SROA_0_0_COPYLOAD]], float [[AGG_TMP_SROA_2_0_COPYLOAD]], i32 noundef 1, double [[AGG_TMP_SROA_0_0_COPYLOAD]], float [[AGG_TMP_SROA_2_0_COPYLOAD]])
458 ; CHECK-NEXT: ret void
461 %arg = alloca %struct.DoubleFloat, align 8
462 store double %arg.coerce0, ptr %arg, align 8
463 %0 = getelementptr inbounds { double, float }, ptr %arg, i64 0, i32 1
464 store float %arg.coerce1, ptr %0, align 8
465 call void @_Z3usePv(ptr noundef nonnull %arg)
466 %agg.tmp.sroa.0.0.copyload = load double, ptr %arg, align 8
467 %agg.tmp.sroa.2.0.copyload = load float, ptr %0, align 8
468 call void (double, float, i32, ...) @_Z5test2I11DoubleFloatEvT_iz(double %agg.tmp.sroa.0.0.copyload, float %agg.tmp.sroa.2.0.copyload, i32 noundef 1, double %agg.tmp.sroa.0.0.copyload, float %agg.tmp.sroa.2.0.copyload)
472 define linkonce_odr dso_local void @_Z4testI11LongDouble2EvT_(ptr noundef byval(%struct.LongDouble2) align 16 %arg) sanitize_memory {
473 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testI11LongDouble2EvT_(
474 ; CHECK-SAME: ptr noundef byval([[STRUCT_LONGDOUBLE2:%.*]]) align 16 [[ARG:%.*]]) #[[ATTR0]] {
476 ; CHECK-NEXT: [[TMP0:%.*]] = ptrtoint ptr [[ARG]] to i64
477 ; CHECK-NEXT: [[TMP1:%.*]] = xor i64 [[TMP0]], 87960930222080
478 ; CHECK-NEXT: [[TMP2:%.*]] = inttoptr i64 [[TMP1]] to ptr
479 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_param_tls, i64 32, i1 false)
480 ; CHECK-NEXT: call void @llvm.donothing()
481 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
482 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG]])
483 ; CHECK-NEXT: [[TMP3:%.*]] = ptrtoint ptr [[ARG]] to i64
484 ; CHECK-NEXT: [[TMP4:%.*]] = xor i64 [[TMP3]], 87960930222080
485 ; CHECK-NEXT: [[TMP5:%.*]] = inttoptr i64 [[TMP4]] to ptr
486 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 @__msan_param_tls, ptr align 8 [[TMP5]], i64 32, i1 false)
487 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 32) to ptr), align 8
488 ; CHECK-NEXT: [[TMP6:%.*]] = ptrtoint ptr [[ARG]] to i64
489 ; CHECK-NEXT: [[TMP7:%.*]] = xor i64 [[TMP6]], 87960930222080
490 ; CHECK-NEXT: [[TMP8:%.*]] = inttoptr i64 [[TMP7]] to ptr
491 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 40) to ptr), ptr align 8 [[TMP8]], i64 32, i1 false)
492 ; CHECK-NEXT: [[TMP9:%.*]] = ptrtoint ptr [[ARG]] to i64
493 ; CHECK-NEXT: [[TMP10:%.*]] = xor i64 [[TMP9]], 87960930222080
494 ; CHECK-NEXT: [[TMP11:%.*]] = inttoptr i64 [[TMP10]] to ptr
495 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 176) to ptr), ptr align 8 [[TMP11]], i64 32, i1 false)
496 ; CHECK-NEXT: store i64 32, ptr @__msan_va_arg_overflow_size_tls, align 8
497 ; CHECK-NEXT: call void (ptr, i32, ...) @_Z5test2I11LongDouble2EvT_iz(ptr noundef nonnull byval([[STRUCT_LONGDOUBLE2]]) align 16 [[ARG]], i32 noundef 1, ptr noundef nonnull byval([[STRUCT_LONGDOUBLE2]]) align 16 [[ARG]])
498 ; CHECK-NEXT: ret void
501 call void @_Z3usePv(ptr noundef nonnull %arg)
502 call void (ptr, i32, ...) @_Z5test2I11LongDouble2EvT_iz(ptr noundef nonnull byval(%struct.LongDouble2) align 16 %arg, i32 noundef 1, ptr noundef nonnull byval(%struct.LongDouble2) align 16 %arg)
506 define linkonce_odr dso_local void @_Z4testI11LongDouble4EvT_(ptr noundef byval(%struct.LongDouble4) align 16 %arg) sanitize_memory {
507 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4testI11LongDouble4EvT_(
508 ; CHECK-SAME: ptr noundef byval([[STRUCT_LONGDOUBLE4:%.*]]) align 16 [[ARG:%.*]]) #[[ATTR0]] {
510 ; CHECK-NEXT: [[TMP0:%.*]] = ptrtoint ptr [[ARG]] to i64
511 ; CHECK-NEXT: [[TMP1:%.*]] = xor i64 [[TMP0]], 87960930222080
512 ; CHECK-NEXT: [[TMP2:%.*]] = inttoptr i64 [[TMP1]] to ptr
513 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_param_tls, i64 64, i1 false)
514 ; CHECK-NEXT: call void @llvm.donothing()
515 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
516 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG]])
517 ; CHECK-NEXT: [[TMP3:%.*]] = ptrtoint ptr [[ARG]] to i64
518 ; CHECK-NEXT: [[TMP4:%.*]] = xor i64 [[TMP3]], 87960930222080
519 ; CHECK-NEXT: [[TMP5:%.*]] = inttoptr i64 [[TMP4]] to ptr
520 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 @__msan_param_tls, ptr align 8 [[TMP5]], i64 64, i1 false)
521 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 64) to ptr), align 8
522 ; CHECK-NEXT: [[TMP6:%.*]] = ptrtoint ptr [[ARG]] to i64
523 ; CHECK-NEXT: [[TMP7:%.*]] = xor i64 [[TMP6]], 87960930222080
524 ; CHECK-NEXT: [[TMP8:%.*]] = inttoptr i64 [[TMP7]] to ptr
525 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 72) to ptr), ptr align 8 [[TMP8]], i64 64, i1 false)
526 ; CHECK-NEXT: [[TMP9:%.*]] = ptrtoint ptr [[ARG]] to i64
527 ; CHECK-NEXT: [[TMP10:%.*]] = xor i64 [[TMP9]], 87960930222080
528 ; CHECK-NEXT: [[TMP11:%.*]] = inttoptr i64 [[TMP10]] to ptr
529 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 176) to ptr), ptr align 8 [[TMP11]], i64 64, i1 false)
530 ; CHECK-NEXT: store i64 64, ptr @__msan_va_arg_overflow_size_tls, align 8
531 ; CHECK-NEXT: call void (ptr, i32, ...) @_Z5test2I11LongDouble4EvT_iz(ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], i32 noundef 1, ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]])
532 ; CHECK-NEXT: ret void
535 call void @_Z3usePv(ptr noundef nonnull %arg)
536 call void (ptr, i32, ...) @_Z5test2I11LongDouble4EvT_iz(ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, i32 noundef 1, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg)
540 declare void @_Z3usePv(ptr noundef) local_unnamed_addr #3
542 define linkonce_odr dso_local void @_Z5test2IcEvT_iz(i8 noundef signext %t, i32 noundef %n, ...) sanitize_memory {
543 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2IcEvT_iz(
544 ; CHECK-SAME: i8 noundef signext [[T:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
546 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
547 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
548 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
549 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
550 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
551 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
552 ; CHECK-NEXT: call void @llvm.donothing()
553 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
554 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
555 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
556 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
557 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
558 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
559 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
560 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
561 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
562 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
563 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
564 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
565 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
566 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
567 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
568 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
569 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
570 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
571 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
572 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
573 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
574 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
575 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
576 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
577 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
578 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
579 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
580 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
581 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
582 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
583 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
584 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
585 ; CHECK-NEXT: ret void
588 %args = alloca [1 x %struct.__va_list_tag], align 16
589 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
590 call void @llvm.va_start(ptr nonnull %args)
591 call void @_Z3usePv(ptr noundef nonnull %args)
592 call void @llvm.va_end(ptr nonnull %args)
593 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
597 declare void @llvm.lifetime.start.p0(i64 immarg, ptr nocapture) #4
599 declare void @llvm.va_start(ptr) #5
601 declare void @llvm.va_end(ptr) #5
603 declare void @llvm.lifetime.end.p0(i64 immarg, ptr nocapture) #4
605 define linkonce_odr dso_local void @_Z5test2IiEvT_iz(i32 noundef %t, i32 noundef %n, ...) sanitize_memory {
606 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2IiEvT_iz(
607 ; CHECK-SAME: i32 noundef [[T:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
609 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
610 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
611 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
612 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
613 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
614 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
615 ; CHECK-NEXT: call void @llvm.donothing()
616 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
617 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
618 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
619 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
620 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
621 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
622 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
623 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
624 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
625 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
626 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
627 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
628 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
629 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
630 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
631 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
632 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
633 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
634 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
635 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
636 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
637 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
638 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
639 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
640 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
641 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
642 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
643 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
644 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
645 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
646 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
647 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
648 ; CHECK-NEXT: ret void
651 %args = alloca [1 x %struct.__va_list_tag], align 16
652 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
653 call void @llvm.va_start(ptr nonnull %args)
654 call void @_Z3usePv(ptr noundef nonnull %args)
655 call void @llvm.va_end(ptr nonnull %args)
656 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
660 define linkonce_odr dso_local void @_Z5test2IfEvT_iz(float noundef %t, i32 noundef %n, ...) sanitize_memory {
661 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2IfEvT_iz(
662 ; CHECK-SAME: float noundef [[T:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
664 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
665 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
666 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
667 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
668 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
669 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
670 ; CHECK-NEXT: call void @llvm.donothing()
671 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
672 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
673 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
674 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
675 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
676 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
677 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
678 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
679 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
680 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
681 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
682 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
683 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
684 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
685 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
686 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
687 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
688 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
689 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
690 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
691 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
692 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
693 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
694 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
695 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
696 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
697 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
698 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
699 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
700 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
701 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
702 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
703 ; CHECK-NEXT: ret void
706 %args = alloca [1 x %struct.__va_list_tag], align 16
707 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
708 call void @llvm.va_start(ptr nonnull %args)
709 call void @_Z3usePv(ptr noundef nonnull %args)
710 call void @llvm.va_end(ptr nonnull %args)
711 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
715 define linkonce_odr dso_local void @_Z5test2IdEvT_iz(double noundef %t, i32 noundef %n, ...) sanitize_memory {
716 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2IdEvT_iz(
717 ; CHECK-SAME: double noundef [[T:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
719 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
720 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
721 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
722 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
723 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
724 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
725 ; CHECK-NEXT: call void @llvm.donothing()
726 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
727 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
728 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
729 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
730 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
731 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
732 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
733 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
734 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
735 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
736 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
737 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
738 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
739 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
740 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
741 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
742 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
743 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
744 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
745 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
746 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
747 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
748 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
749 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
750 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
751 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
752 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
753 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
754 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
755 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
756 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
757 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
758 ; CHECK-NEXT: ret void
761 %args = alloca [1 x %struct.__va_list_tag], align 16
762 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
763 call void @llvm.va_start(ptr nonnull %args)
764 call void @_Z3usePv(ptr noundef nonnull %args)
765 call void @llvm.va_end(ptr nonnull %args)
766 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
770 define linkonce_odr dso_local void @_Z5test2IeEvT_iz(x86_fp80 noundef %t, i32 noundef %n, ...) sanitize_memory {
771 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2IeEvT_iz(
772 ; CHECK-SAME: x86_fp80 noundef [[T:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
774 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
775 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
776 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
777 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
778 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
779 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
780 ; CHECK-NEXT: call void @llvm.donothing()
781 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
782 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
783 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
784 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
785 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
786 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
787 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
788 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
789 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
790 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
791 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
792 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
793 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
794 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
795 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
796 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
797 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
798 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
799 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
800 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
801 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
802 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
803 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
804 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
805 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
806 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
807 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
808 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
809 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
810 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
811 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
812 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
813 ; CHECK-NEXT: ret void
816 %args = alloca [1 x %struct.__va_list_tag], align 16
817 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
818 call void @llvm.va_start(ptr nonnull %args)
819 call void @_Z3usePv(ptr noundef nonnull %args)
820 call void @llvm.va_end(ptr nonnull %args)
821 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
825 define linkonce_odr dso_local void @_Z5test2I6IntIntEvT_iz(i64 %t.coerce, i32 noundef %n, ...) sanitize_memory {
826 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2I6IntIntEvT_iz(
827 ; CHECK-SAME: i64 [[T_COERCE:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
829 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
830 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
831 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
832 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
833 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
834 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
835 ; CHECK-NEXT: call void @llvm.donothing()
836 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
837 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
838 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
839 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
840 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
841 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
842 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
843 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
844 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
845 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
846 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
847 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
848 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
849 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
850 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
851 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
852 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
853 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
854 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
855 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
856 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
857 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
858 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
859 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
860 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
861 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
862 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
863 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
864 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
865 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
866 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
867 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
868 ; CHECK-NEXT: ret void
871 %args = alloca [1 x %struct.__va_list_tag], align 16
872 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
873 call void @llvm.va_start(ptr nonnull %args)
874 call void @_Z3usePv(ptr noundef nonnull %args)
875 call void @llvm.va_end(ptr nonnull %args)
876 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
880 define linkonce_odr dso_local void @_Z5test2I10Int64Int64EvT_iz(i64 %t.coerce0, i64 %t.coerce1, i32 noundef %n, ...) sanitize_memory {
881 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2I10Int64Int64EvT_iz(
882 ; CHECK-SAME: i64 [[T_COERCE0:%.*]], i64 [[T_COERCE1:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
884 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
885 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
886 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
887 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
888 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
889 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
890 ; CHECK-NEXT: call void @llvm.donothing()
891 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
892 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
893 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
894 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
895 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
896 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
897 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
898 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
899 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
900 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
901 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
902 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
903 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
904 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
905 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
906 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
907 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
908 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
909 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
910 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
911 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
912 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
913 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
914 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
915 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
916 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
917 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
918 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
919 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
920 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
921 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
922 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
923 ; CHECK-NEXT: ret void
926 %args = alloca [1 x %struct.__va_list_tag], align 16
927 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
928 call void @llvm.va_start(ptr nonnull %args)
929 call void @_Z3usePv(ptr noundef nonnull %args)
930 call void @llvm.va_end(ptr nonnull %args)
931 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
935 define linkonce_odr dso_local void @_Z5test2I12DoubleDoubleEvT_iz(double %t.coerce0, double %t.coerce1, i32 noundef %n, ...) sanitize_memory {
936 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2I12DoubleDoubleEvT_iz(
937 ; CHECK-SAME: double [[T_COERCE0:%.*]], double [[T_COERCE1:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
939 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
940 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
941 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
942 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
943 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
944 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
945 ; CHECK-NEXT: call void @llvm.donothing()
946 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
947 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
948 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
949 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
950 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
951 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
952 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
953 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
954 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
955 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
956 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
957 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
958 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
959 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
960 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
961 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
962 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
963 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
964 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
965 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
966 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
967 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
968 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
969 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
970 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
971 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
972 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
973 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
974 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
975 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
976 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
977 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
978 ; CHECK-NEXT: ret void
981 %args = alloca [1 x %struct.__va_list_tag], align 16
982 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
983 call void @llvm.va_start(ptr nonnull %args)
984 call void @_Z3usePv(ptr noundef nonnull %args)
985 call void @llvm.va_end(ptr nonnull %args)
986 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
990 define linkonce_odr dso_local void @_Z5test2I7Double4EvT_iz(ptr noundef byval(%struct.Double4) align 8 %t, i32 noundef %n, ...) sanitize_memory {
991 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2I7Double4EvT_iz(
992 ; CHECK-SAME: ptr noundef byval([[STRUCT_DOUBLE4:%.*]]) align 8 [[T:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
994 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
995 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
996 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
997 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
998 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
999 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
1000 ; CHECK-NEXT: call void @llvm.donothing()
1001 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
1002 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
1003 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
1004 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
1005 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
1006 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
1007 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
1008 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
1009 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
1010 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
1011 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
1012 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
1013 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
1014 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
1015 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
1016 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
1017 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
1018 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
1019 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
1020 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
1021 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
1022 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
1023 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
1024 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
1025 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
1026 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
1027 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
1028 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
1029 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
1030 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
1031 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
1032 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
1033 ; CHECK-NEXT: ret void
1036 %args = alloca [1 x %struct.__va_list_tag], align 16
1037 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
1038 call void @llvm.va_start(ptr nonnull %args)
1039 call void @_Z3usePv(ptr noundef nonnull %args)
1040 call void @llvm.va_end(ptr nonnull %args)
1041 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
1045 define linkonce_odr dso_local void @_Z5test2I11DoubleFloatEvT_iz(double %t.coerce0, float %t.coerce1, i32 noundef %n, ...) sanitize_memory {
1046 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2I11DoubleFloatEvT_iz(
1047 ; CHECK-SAME: double [[T_COERCE0:%.*]], float [[T_COERCE1:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
1048 ; CHECK-NEXT: entry:
1049 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
1050 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
1051 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
1052 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
1053 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
1054 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
1055 ; CHECK-NEXT: call void @llvm.donothing()
1056 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
1057 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
1058 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
1059 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
1060 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
1061 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
1062 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
1063 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
1064 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
1065 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
1066 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
1067 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
1068 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
1069 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
1070 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
1071 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
1072 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
1073 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
1074 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
1075 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
1076 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
1077 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
1078 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
1079 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
1080 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
1081 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
1082 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
1083 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
1084 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
1085 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
1086 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
1087 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
1088 ; CHECK-NEXT: ret void
1091 %args = alloca [1 x %struct.__va_list_tag], align 16
1092 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
1093 call void @llvm.va_start(ptr nonnull %args)
1094 call void @_Z3usePv(ptr noundef nonnull %args)
1095 call void @llvm.va_end(ptr nonnull %args)
1096 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
1100 define linkonce_odr dso_local void @_Z5test2I11LongDouble2EvT_iz(ptr noundef byval(%struct.LongDouble2) align 16 %t, i32 noundef %n, ...) sanitize_memory {
1101 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2I11LongDouble2EvT_iz(
1102 ; CHECK-SAME: ptr noundef byval([[STRUCT_LONGDOUBLE2:%.*]]) align 16 [[T:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
1103 ; CHECK-NEXT: entry:
1104 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
1105 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
1106 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
1107 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
1108 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
1109 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
1110 ; CHECK-NEXT: call void @llvm.donothing()
1111 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
1112 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
1113 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
1114 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
1115 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
1116 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
1117 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
1118 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
1119 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
1120 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
1121 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
1122 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
1123 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
1124 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
1125 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
1126 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
1127 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
1128 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
1129 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
1130 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
1131 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
1132 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
1133 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
1134 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
1135 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
1136 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
1137 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
1138 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
1139 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
1140 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
1141 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
1142 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
1143 ; CHECK-NEXT: ret void
1146 %args = alloca [1 x %struct.__va_list_tag], align 16
1147 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
1148 call void @llvm.va_start(ptr nonnull %args)
1149 call void @_Z3usePv(ptr noundef nonnull %args)
1150 call void @llvm.va_end(ptr nonnull %args)
1151 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
1155 define linkonce_odr dso_local void @_Z5test2I11LongDouble4EvT_iz(ptr noundef byval(%struct.LongDouble4) align 16 %t, i32 noundef %n, ...) sanitize_memory {
1156 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z5test2I11LongDouble4EvT_iz(
1157 ; CHECK-SAME: ptr noundef byval([[STRUCT_LONGDOUBLE4:%.*]]) align 16 [[T:%.*]], i32 noundef [[N:%.*]], ...) #[[ATTR0]] {
1158 ; CHECK-NEXT: entry:
1159 ; CHECK-NEXT: [[TMP0:%.*]] = load i64, ptr @__msan_va_arg_overflow_size_tls, align 8
1160 ; CHECK-NEXT: [[TMP1:%.*]] = add i64 176, [[TMP0]]
1161 ; CHECK-NEXT: [[TMP2:%.*]] = alloca i8, i64 [[TMP1]], align 8
1162 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP2]], i8 0, i64 [[TMP1]], i1 false)
1163 ; CHECK-NEXT: [[TMP3:%.*]] = call i64 @llvm.umin.i64(i64 [[TMP1]], i64 800)
1164 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_va_arg_tls, i64 [[TMP3]], i1 false)
1165 ; CHECK-NEXT: call void @llvm.donothing()
1166 ; CHECK-NEXT: [[ARGS:%.*]] = alloca [1 x %struct.__va_list_tag], align 16
1167 ; CHECK-NEXT: call void @llvm.lifetime.start.p0(i64 24, ptr nonnull [[ARGS]])
1168 ; CHECK-NEXT: [[TMP4:%.*]] = ptrtoint ptr [[ARGS]] to i64
1169 ; CHECK-NEXT: [[TMP5:%.*]] = xor i64 [[TMP4]], 87960930222080
1170 ; CHECK-NEXT: [[TMP6:%.*]] = inttoptr i64 [[TMP5]] to ptr
1171 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 16 [[TMP6]], i8 -1, i64 24, i1 false)
1172 ; CHECK-NEXT: [[TMP7:%.*]] = ptrtoint ptr [[ARGS]] to i64
1173 ; CHECK-NEXT: [[TMP8:%.*]] = xor i64 [[TMP7]], 87960930222080
1174 ; CHECK-NEXT: [[TMP9:%.*]] = inttoptr i64 [[TMP8]] to ptr
1175 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 8 [[TMP9]], i8 0, i64 24, i1 false)
1176 ; CHECK-NEXT: call void @llvm.va_start(ptr nonnull [[ARGS]])
1177 ; CHECK-NEXT: [[TMP10:%.*]] = ptrtoint ptr [[ARGS]] to i64
1178 ; CHECK-NEXT: [[TMP11:%.*]] = add i64 [[TMP10]], 16
1179 ; CHECK-NEXT: [[TMP12:%.*]] = inttoptr i64 [[TMP11]] to ptr
1180 ; CHECK-NEXT: [[TMP13:%.*]] = load ptr, ptr [[TMP12]], align 8
1181 ; CHECK-NEXT: [[TMP14:%.*]] = ptrtoint ptr [[TMP13]] to i64
1182 ; CHECK-NEXT: [[TMP15:%.*]] = xor i64 [[TMP14]], 87960930222080
1183 ; CHECK-NEXT: [[TMP16:%.*]] = inttoptr i64 [[TMP15]] to ptr
1184 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP16]], ptr align 16 [[TMP2]], i64 176, i1 false)
1185 ; CHECK-NEXT: [[TMP17:%.*]] = ptrtoint ptr [[ARGS]] to i64
1186 ; CHECK-NEXT: [[TMP18:%.*]] = add i64 [[TMP17]], 8
1187 ; CHECK-NEXT: [[TMP19:%.*]] = inttoptr i64 [[TMP18]] to ptr
1188 ; CHECK-NEXT: [[TMP20:%.*]] = load ptr, ptr [[TMP19]], align 8
1189 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[TMP20]] to i64
1190 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
1191 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
1192 ; CHECK-NEXT: [[TMP24:%.*]] = getelementptr i8, ptr [[TMP2]], i32 176
1193 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 16 [[TMP23]], ptr align 16 [[TMP24]], i64 [[TMP0]], i1 false)
1194 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
1195 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARGS]])
1196 ; CHECK-NEXT: call void @llvm.va_end(ptr nonnull [[ARGS]])
1197 ; CHECK-NEXT: call void @llvm.lifetime.end.p0(i64 24, ptr nonnull [[ARGS]])
1198 ; CHECK-NEXT: ret void
1201 %args = alloca [1 x %struct.__va_list_tag], align 16
1202 call void @llvm.lifetime.start.p0(i64 24, ptr nonnull %args) #6
1203 call void @llvm.va_start(ptr nonnull %args)
1204 call void @_Z3usePv(ptr noundef nonnull %args)
1205 call void @llvm.va_end(ptr nonnull %args)
1206 call void @llvm.lifetime.end.p0(i64 24, ptr nonnull %args) #6
1210 define linkonce_odr dso_local void @_Z4test3I11LongDouble4EvT_(ptr noundef byval(%struct.LongDouble4) align 16 %arg) sanitize_memory {
1211 ; CHECK-LABEL: define linkonce_odr dso_local void @_Z4test3I11LongDouble4EvT_(
1212 ; CHECK-SAME: ptr noundef byval([[STRUCT_LONGDOUBLE4:%.*]]) align 16 [[ARG:%.*]]) #[[ATTR0]] {
1213 ; CHECK-NEXT: entry:
1214 ; CHECK-NEXT: [[TMP0:%.*]] = ptrtoint ptr [[ARG]] to i64
1215 ; CHECK-NEXT: [[TMP1:%.*]] = xor i64 [[TMP0]], 87960930222080
1216 ; CHECK-NEXT: [[TMP2:%.*]] = inttoptr i64 [[TMP1]] to ptr
1217 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[TMP2]], ptr align 8 @__msan_param_tls, i64 64, i1 false)
1218 ; CHECK-NEXT: call void @llvm.donothing()
1219 ; CHECK-NEXT: store i64 0, ptr @__msan_param_tls, align 8
1220 ; CHECK-NEXT: call void @_Z3usePv(ptr noundef nonnull [[ARG]])
1221 ; CHECK-NEXT: [[TMP3:%.*]] = ptrtoint ptr [[ARG]] to i64
1222 ; CHECK-NEXT: [[TMP4:%.*]] = xor i64 [[TMP3]], 87960930222080
1223 ; CHECK-NEXT: [[TMP5:%.*]] = inttoptr i64 [[TMP4]] to ptr
1224 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 @__msan_param_tls, ptr align 8 [[TMP5]], i64 64, i1 false)
1225 ; CHECK-NEXT: store i32 0, ptr inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 64) to ptr), align 8
1226 ; CHECK-NEXT: [[TMP6:%.*]] = ptrtoint ptr [[ARG]] to i64
1227 ; CHECK-NEXT: [[TMP7:%.*]] = xor i64 [[TMP6]], 87960930222080
1228 ; CHECK-NEXT: [[TMP8:%.*]] = inttoptr i64 [[TMP7]] to ptr
1229 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 72) to ptr), ptr align 8 [[TMP8]], i64 64, i1 false)
1230 ; CHECK-NEXT: [[TMP9:%.*]] = ptrtoint ptr [[ARG]] to i64
1231 ; CHECK-NEXT: [[TMP10:%.*]] = xor i64 [[TMP9]], 87960930222080
1232 ; CHECK-NEXT: [[TMP11:%.*]] = inttoptr i64 [[TMP10]] to ptr
1233 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 136) to ptr), ptr align 8 [[TMP11]], i64 64, i1 false)
1234 ; CHECK-NEXT: [[TMP12:%.*]] = ptrtoint ptr [[ARG]] to i64
1235 ; CHECK-NEXT: [[TMP13:%.*]] = xor i64 [[TMP12]], 87960930222080
1236 ; CHECK-NEXT: [[TMP14:%.*]] = inttoptr i64 [[TMP13]] to ptr
1237 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 200) to ptr), ptr align 8 [[TMP14]], i64 64, i1 false)
1238 ; CHECK-NEXT: [[TMP15:%.*]] = ptrtoint ptr [[ARG]] to i64
1239 ; CHECK-NEXT: [[TMP16:%.*]] = xor i64 [[TMP15]], 87960930222080
1240 ; CHECK-NEXT: [[TMP17:%.*]] = inttoptr i64 [[TMP16]] to ptr
1241 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 264) to ptr), ptr align 8 [[TMP17]], i64 64, i1 false)
1242 ; CHECK-NEXT: [[TMP18:%.*]] = ptrtoint ptr [[ARG]] to i64
1243 ; CHECK-NEXT: [[TMP19:%.*]] = xor i64 [[TMP18]], 87960930222080
1244 ; CHECK-NEXT: [[TMP20:%.*]] = inttoptr i64 [[TMP19]] to ptr
1245 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 328) to ptr), ptr align 8 [[TMP20]], i64 64, i1 false)
1246 ; CHECK-NEXT: [[TMP21:%.*]] = ptrtoint ptr [[ARG]] to i64
1247 ; CHECK-NEXT: [[TMP22:%.*]] = xor i64 [[TMP21]], 87960930222080
1248 ; CHECK-NEXT: [[TMP23:%.*]] = inttoptr i64 [[TMP22]] to ptr
1249 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 392) to ptr), ptr align 8 [[TMP23]], i64 64, i1 false)
1250 ; CHECK-NEXT: [[TMP24:%.*]] = ptrtoint ptr [[ARG]] to i64
1251 ; CHECK-NEXT: [[TMP25:%.*]] = xor i64 [[TMP24]], 87960930222080
1252 ; CHECK-NEXT: [[TMP26:%.*]] = inttoptr i64 [[TMP25]] to ptr
1253 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 456) to ptr), ptr align 8 [[TMP26]], i64 64, i1 false)
1254 ; CHECK-NEXT: [[TMP27:%.*]] = ptrtoint ptr [[ARG]] to i64
1255 ; CHECK-NEXT: [[TMP28:%.*]] = xor i64 [[TMP27]], 87960930222080
1256 ; CHECK-NEXT: [[TMP29:%.*]] = inttoptr i64 [[TMP28]] to ptr
1257 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 520) to ptr), ptr align 8 [[TMP29]], i64 64, i1 false)
1258 ; CHECK-NEXT: [[TMP30:%.*]] = ptrtoint ptr [[ARG]] to i64
1259 ; CHECK-NEXT: [[TMP31:%.*]] = xor i64 [[TMP30]], 87960930222080
1260 ; CHECK-NEXT: [[TMP32:%.*]] = inttoptr i64 [[TMP31]] to ptr
1261 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 584) to ptr), ptr align 8 [[TMP32]], i64 64, i1 false)
1262 ; CHECK-NEXT: [[TMP33:%.*]] = ptrtoint ptr [[ARG]] to i64
1263 ; CHECK-NEXT: [[TMP34:%.*]] = xor i64 [[TMP33]], 87960930222080
1264 ; CHECK-NEXT: [[TMP35:%.*]] = inttoptr i64 [[TMP34]] to ptr
1265 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 648) to ptr), ptr align 8 [[TMP35]], i64 64, i1 false)
1266 ; CHECK-NEXT: [[TMP36:%.*]] = ptrtoint ptr [[ARG]] to i64
1267 ; CHECK-NEXT: [[TMP37:%.*]] = xor i64 [[TMP36]], 87960930222080
1268 ; CHECK-NEXT: [[TMP38:%.*]] = inttoptr i64 [[TMP37]] to ptr
1269 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_param_tls to i64), i64 712) to ptr), ptr align 8 [[TMP38]], i64 64, i1 false)
1270 ; CHECK-NEXT: [[TMP39:%.*]] = ptrtoint ptr [[ARG]] to i64
1271 ; CHECK-NEXT: [[TMP40:%.*]] = xor i64 [[TMP39]], 87960930222080
1272 ; CHECK-NEXT: [[TMP41:%.*]] = inttoptr i64 [[TMP40]] to ptr
1273 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 176) to ptr), ptr align 8 [[TMP41]], i64 64, i1 false)
1274 ; CHECK-NEXT: [[TMP42:%.*]] = ptrtoint ptr [[ARG]] to i64
1275 ; CHECK-NEXT: [[TMP43:%.*]] = xor i64 [[TMP42]], 87960930222080
1276 ; CHECK-NEXT: [[TMP44:%.*]] = inttoptr i64 [[TMP43]] to ptr
1277 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 240) to ptr), ptr align 8 [[TMP44]], i64 64, i1 false)
1278 ; CHECK-NEXT: [[TMP45:%.*]] = ptrtoint ptr [[ARG]] to i64
1279 ; CHECK-NEXT: [[TMP46:%.*]] = xor i64 [[TMP45]], 87960930222080
1280 ; CHECK-NEXT: [[TMP47:%.*]] = inttoptr i64 [[TMP46]] to ptr
1281 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 304) to ptr), ptr align 8 [[TMP47]], i64 64, i1 false)
1282 ; CHECK-NEXT: [[TMP48:%.*]] = ptrtoint ptr [[ARG]] to i64
1283 ; CHECK-NEXT: [[TMP49:%.*]] = xor i64 [[TMP48]], 87960930222080
1284 ; CHECK-NEXT: [[TMP50:%.*]] = inttoptr i64 [[TMP49]] to ptr
1285 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 368) to ptr), ptr align 8 [[TMP50]], i64 64, i1 false)
1286 ; CHECK-NEXT: [[TMP51:%.*]] = ptrtoint ptr [[ARG]] to i64
1287 ; CHECK-NEXT: [[TMP52:%.*]] = xor i64 [[TMP51]], 87960930222080
1288 ; CHECK-NEXT: [[TMP53:%.*]] = inttoptr i64 [[TMP52]] to ptr
1289 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 432) to ptr), ptr align 8 [[TMP53]], i64 64, i1 false)
1290 ; CHECK-NEXT: [[TMP54:%.*]] = ptrtoint ptr [[ARG]] to i64
1291 ; CHECK-NEXT: [[TMP55:%.*]] = xor i64 [[TMP54]], 87960930222080
1292 ; CHECK-NEXT: [[TMP56:%.*]] = inttoptr i64 [[TMP55]] to ptr
1293 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 496) to ptr), ptr align 8 [[TMP56]], i64 64, i1 false)
1294 ; CHECK-NEXT: [[TMP57:%.*]] = ptrtoint ptr [[ARG]] to i64
1295 ; CHECK-NEXT: [[TMP58:%.*]] = xor i64 [[TMP57]], 87960930222080
1296 ; CHECK-NEXT: [[TMP59:%.*]] = inttoptr i64 [[TMP58]] to ptr
1297 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 560) to ptr), ptr align 8 [[TMP59]], i64 64, i1 false)
1298 ; CHECK-NEXT: [[TMP60:%.*]] = ptrtoint ptr [[ARG]] to i64
1299 ; CHECK-NEXT: [[TMP61:%.*]] = xor i64 [[TMP60]], 87960930222080
1300 ; CHECK-NEXT: [[TMP62:%.*]] = inttoptr i64 [[TMP61]] to ptr
1301 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 624) to ptr), ptr align 8 [[TMP62]], i64 64, i1 false)
1302 ; CHECK-NEXT: [[TMP63:%.*]] = ptrtoint ptr [[ARG]] to i64
1303 ; CHECK-NEXT: [[TMP64:%.*]] = xor i64 [[TMP63]], 87960930222080
1304 ; CHECK-NEXT: [[TMP65:%.*]] = inttoptr i64 [[TMP64]] to ptr
1305 ; CHECK-NEXT: call void @llvm.memcpy.p0.p0.i64(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 688) to ptr), ptr align 8 [[TMP65]], i64 64, i1 false)
1306 ; CHECK-NEXT: call void @llvm.memset.p0.i32(ptr align 8 inttoptr (i64 add (i64 ptrtoint (ptr @__msan_va_arg_tls to i64), i64 752) to ptr), i8 0, i32 48, i1 false)
1307 ; CHECK-NEXT: store i64 1280, ptr @__msan_va_arg_overflow_size_tls, align 8
1308 ; CHECK-NEXT: call void (ptr, i32, ...) @_Z5test2I11LongDouble4EvT_iz(ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], i32 noundef 20, ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]], ptr noundef nonnull byval([[STRUCT_LONGDOUBLE4]]) align 16 [[ARG]])
1309 ; CHECK-NEXT: ret void
1312 call void @_Z3usePv(ptr noundef nonnull %arg)
1313 call void (ptr, i32, ...) @_Z5test2I11LongDouble4EvT_iz(ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, i32 noundef 20, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg, ptr noundef nonnull byval(%struct.LongDouble4) align 16 %arg)