1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=riscv32 -mattr=+m,+v \
3 ; RUN: | FileCheck %s --check-prefixes=RV32-BOTH,RV32
4 ; RUN: llc < %s -mtriple=riscv64 -mattr=+m,+v \
5 ; RUN: | FileCheck %s --check-prefixes=RV64-BOTH,RV64
6 ; RUN: llc < %s -mtriple=riscv32 -mattr=+m,+v,+fast-unaligned-access \
7 ; RUN: | FileCheck %s --check-prefixes=RV32-BOTH,RV32-FAST
8 ; RUN: llc < %s -mtriple=riscv64 -mattr=+m,+v,+fast-unaligned-access \
9 ; RUN: | FileCheck %s --check-prefixes=RV64-BOTH,RV64-FAST
10 %struct.x = type { i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8 }
12 declare void @llvm.memset.p0.i64(ptr nocapture, i8, i64, i1) nounwind
13 declare void @llvm.memset.inline.p0.i64(ptr nocapture, i8, i64, i1) nounwind
15 ; /////////////////////////////////////////////////////////////////////////////
17 define void @memset_1(ptr %a, i8 %value) nounwind {
18 ; RV32-BOTH-LABEL: memset_1:
20 ; RV32-BOTH-NEXT: sb a1, 0(a0)
23 ; RV64-BOTH-LABEL: memset_1:
25 ; RV64-BOTH-NEXT: sb a1, 0(a0)
27 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 %value, i64 1, i1 0)
31 define void @memset_2(ptr %a, i8 %value) nounwind {
32 ; RV32-LABEL: memset_2:
34 ; RV32-NEXT: sb a1, 1(a0)
35 ; RV32-NEXT: sb a1, 0(a0)
38 ; RV64-LABEL: memset_2:
40 ; RV64-NEXT: sb a1, 1(a0)
41 ; RV64-NEXT: sb a1, 0(a0)
44 ; RV32-FAST-LABEL: memset_2:
46 ; RV32-FAST-NEXT: andi a2, a1, 255
47 ; RV32-FAST-NEXT: slli a1, a1, 8
48 ; RV32-FAST-NEXT: or a1, a1, a2
49 ; RV32-FAST-NEXT: sh a1, 0(a0)
52 ; RV64-FAST-LABEL: memset_2:
54 ; RV64-FAST-NEXT: andi a2, a1, 255
55 ; RV64-FAST-NEXT: slli a1, a1, 8
56 ; RV64-FAST-NEXT: or a1, a1, a2
57 ; RV64-FAST-NEXT: sh a1, 0(a0)
59 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 %value, i64 2, i1 0)
63 define void @memset_4(ptr %a, i8 %value) nounwind {
64 ; RV32-LABEL: memset_4:
66 ; RV32-NEXT: sb a1, 3(a0)
67 ; RV32-NEXT: sb a1, 2(a0)
68 ; RV32-NEXT: sb a1, 1(a0)
69 ; RV32-NEXT: sb a1, 0(a0)
72 ; RV64-LABEL: memset_4:
74 ; RV64-NEXT: sb a1, 3(a0)
75 ; RV64-NEXT: sb a1, 2(a0)
76 ; RV64-NEXT: sb a1, 1(a0)
77 ; RV64-NEXT: sb a1, 0(a0)
80 ; RV32-FAST-LABEL: memset_4:
82 ; RV32-FAST-NEXT: andi a1, a1, 255
83 ; RV32-FAST-NEXT: lui a2, 4112
84 ; RV32-FAST-NEXT: addi a2, a2, 257
85 ; RV32-FAST-NEXT: mul a1, a1, a2
86 ; RV32-FAST-NEXT: sw a1, 0(a0)
89 ; RV64-FAST-LABEL: memset_4:
91 ; RV64-FAST-NEXT: slli a1, a1, 56
92 ; RV64-FAST-NEXT: lui a2, 65793
93 ; RV64-FAST-NEXT: slli a2, a2, 4
94 ; RV64-FAST-NEXT: addi a2, a2, 256
95 ; RV64-FAST-NEXT: mulhu a1, a1, a2
96 ; RV64-FAST-NEXT: sw a1, 0(a0)
98 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 %value, i64 4, i1 0)
102 define void @memset_8(ptr %a, i8 %value) nounwind {
103 ; RV32-LABEL: memset_8:
105 ; RV32-NEXT: sb a1, 7(a0)
106 ; RV32-NEXT: sb a1, 6(a0)
107 ; RV32-NEXT: sb a1, 5(a0)
108 ; RV32-NEXT: sb a1, 4(a0)
109 ; RV32-NEXT: sb a1, 3(a0)
110 ; RV32-NEXT: sb a1, 2(a0)
111 ; RV32-NEXT: sb a1, 1(a0)
112 ; RV32-NEXT: sb a1, 0(a0)
115 ; RV64-LABEL: memset_8:
117 ; RV64-NEXT: sb a1, 7(a0)
118 ; RV64-NEXT: sb a1, 6(a0)
119 ; RV64-NEXT: sb a1, 5(a0)
120 ; RV64-NEXT: sb a1, 4(a0)
121 ; RV64-NEXT: sb a1, 3(a0)
122 ; RV64-NEXT: sb a1, 2(a0)
123 ; RV64-NEXT: sb a1, 1(a0)
124 ; RV64-NEXT: sb a1, 0(a0)
127 ; RV32-FAST-LABEL: memset_8:
128 ; RV32-FAST: # %bb.0:
129 ; RV32-FAST-NEXT: andi a1, a1, 255
130 ; RV32-FAST-NEXT: lui a2, 4112
131 ; RV32-FAST-NEXT: addi a2, a2, 257
132 ; RV32-FAST-NEXT: mul a1, a1, a2
133 ; RV32-FAST-NEXT: sw a1, 4(a0)
134 ; RV32-FAST-NEXT: sw a1, 0(a0)
135 ; RV32-FAST-NEXT: ret
137 ; RV64-FAST-LABEL: memset_8:
138 ; RV64-FAST: # %bb.0:
139 ; RV64-FAST-NEXT: andi a1, a1, 255
140 ; RV64-FAST-NEXT: lui a2, 4112
141 ; RV64-FAST-NEXT: addiw a2, a2, 257
142 ; RV64-FAST-NEXT: slli a3, a2, 32
143 ; RV64-FAST-NEXT: add a2, a2, a3
144 ; RV64-FAST-NEXT: mul a1, a1, a2
145 ; RV64-FAST-NEXT: sd a1, 0(a0)
146 ; RV64-FAST-NEXT: ret
147 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 %value, i64 8, i1 0)
151 define void @memset_16(ptr %a, i8 %value) nounwind {
152 ; RV32-BOTH-LABEL: memset_16:
153 ; RV32-BOTH: # %bb.0:
154 ; RV32-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
155 ; RV32-BOTH-NEXT: vmv.v.x v8, a1
156 ; RV32-BOTH-NEXT: vse8.v v8, (a0)
157 ; RV32-BOTH-NEXT: ret
159 ; RV64-BOTH-LABEL: memset_16:
160 ; RV64-BOTH: # %bb.0:
161 ; RV64-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
162 ; RV64-BOTH-NEXT: vmv.v.x v8, a1
163 ; RV64-BOTH-NEXT: vse8.v v8, (a0)
164 ; RV64-BOTH-NEXT: ret
165 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 %value, i64 16, i1 0)
169 define void @memset_32(ptr %a, i8 %value) nounwind {
170 ; RV32-BOTH-LABEL: memset_32:
171 ; RV32-BOTH: # %bb.0:
172 ; RV32-BOTH-NEXT: addi a2, a0, 16
173 ; RV32-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
174 ; RV32-BOTH-NEXT: vmv.v.x v8, a1
175 ; RV32-BOTH-NEXT: vse8.v v8, (a2)
176 ; RV32-BOTH-NEXT: vse8.v v8, (a0)
177 ; RV32-BOTH-NEXT: ret
179 ; RV64-BOTH-LABEL: memset_32:
180 ; RV64-BOTH: # %bb.0:
181 ; RV64-BOTH-NEXT: addi a2, a0, 16
182 ; RV64-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
183 ; RV64-BOTH-NEXT: vmv.v.x v8, a1
184 ; RV64-BOTH-NEXT: vse8.v v8, (a2)
185 ; RV64-BOTH-NEXT: vse8.v v8, (a0)
186 ; RV64-BOTH-NEXT: ret
187 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 %value, i64 32, i1 0)
191 define void @memset_64(ptr %a, i8 %value) nounwind {
192 ; RV32-BOTH-LABEL: memset_64:
193 ; RV32-BOTH: # %bb.0:
194 ; RV32-BOTH-NEXT: addi a2, a0, 48
195 ; RV32-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
196 ; RV32-BOTH-NEXT: vmv.v.x v8, a1
197 ; RV32-BOTH-NEXT: vse8.v v8, (a2)
198 ; RV32-BOTH-NEXT: addi a1, a0, 32
199 ; RV32-BOTH-NEXT: vse8.v v8, (a1)
200 ; RV32-BOTH-NEXT: addi a1, a0, 16
201 ; RV32-BOTH-NEXT: vse8.v v8, (a1)
202 ; RV32-BOTH-NEXT: vse8.v v8, (a0)
203 ; RV32-BOTH-NEXT: ret
205 ; RV64-BOTH-LABEL: memset_64:
206 ; RV64-BOTH: # %bb.0:
207 ; RV64-BOTH-NEXT: addi a2, a0, 48
208 ; RV64-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
209 ; RV64-BOTH-NEXT: vmv.v.x v8, a1
210 ; RV64-BOTH-NEXT: vse8.v v8, (a2)
211 ; RV64-BOTH-NEXT: addi a1, a0, 32
212 ; RV64-BOTH-NEXT: vse8.v v8, (a1)
213 ; RV64-BOTH-NEXT: addi a1, a0, 16
214 ; RV64-BOTH-NEXT: vse8.v v8, (a1)
215 ; RV64-BOTH-NEXT: vse8.v v8, (a0)
216 ; RV64-BOTH-NEXT: ret
217 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 %value, i64 64, i1 0)
221 ; /////////////////////////////////////////////////////////////////////////////
223 define void @aligned_memset_2(ptr align 2 %a, i8 %value) nounwind {
224 ; RV32-BOTH-LABEL: aligned_memset_2:
225 ; RV32-BOTH: # %bb.0:
226 ; RV32-BOTH-NEXT: andi a2, a1, 255
227 ; RV32-BOTH-NEXT: slli a1, a1, 8
228 ; RV32-BOTH-NEXT: or a1, a1, a2
229 ; RV32-BOTH-NEXT: sh a1, 0(a0)
230 ; RV32-BOTH-NEXT: ret
232 ; RV64-BOTH-LABEL: aligned_memset_2:
233 ; RV64-BOTH: # %bb.0:
234 ; RV64-BOTH-NEXT: andi a2, a1, 255
235 ; RV64-BOTH-NEXT: slli a1, a1, 8
236 ; RV64-BOTH-NEXT: or a1, a1, a2
237 ; RV64-BOTH-NEXT: sh a1, 0(a0)
238 ; RV64-BOTH-NEXT: ret
239 tail call void @llvm.memset.inline.p0.i64(ptr align 2 %a, i8 %value, i64 2, i1 0)
243 define void @aligned_memset_4(ptr align 4 %a, i8 %value) nounwind {
244 ; RV32-BOTH-LABEL: aligned_memset_4:
245 ; RV32-BOTH: # %bb.0:
246 ; RV32-BOTH-NEXT: andi a1, a1, 255
247 ; RV32-BOTH-NEXT: lui a2, 4112
248 ; RV32-BOTH-NEXT: addi a2, a2, 257
249 ; RV32-BOTH-NEXT: mul a1, a1, a2
250 ; RV32-BOTH-NEXT: sw a1, 0(a0)
251 ; RV32-BOTH-NEXT: ret
253 ; RV64-BOTH-LABEL: aligned_memset_4:
254 ; RV64-BOTH: # %bb.0:
255 ; RV64-BOTH-NEXT: slli a1, a1, 56
256 ; RV64-BOTH-NEXT: lui a2, 65793
257 ; RV64-BOTH-NEXT: slli a2, a2, 4
258 ; RV64-BOTH-NEXT: addi a2, a2, 256
259 ; RV64-BOTH-NEXT: mulhu a1, a1, a2
260 ; RV64-BOTH-NEXT: sw a1, 0(a0)
261 ; RV64-BOTH-NEXT: ret
262 tail call void @llvm.memset.inline.p0.i64(ptr align 4 %a, i8 %value, i64 4, i1 0)
266 define void @aligned_memset_8(ptr align 8 %a, i8 %value) nounwind {
267 ; RV32-BOTH-LABEL: aligned_memset_8:
268 ; RV32-BOTH: # %bb.0:
269 ; RV32-BOTH-NEXT: andi a1, a1, 255
270 ; RV32-BOTH-NEXT: lui a2, 4112
271 ; RV32-BOTH-NEXT: addi a2, a2, 257
272 ; RV32-BOTH-NEXT: mul a1, a1, a2
273 ; RV32-BOTH-NEXT: sw a1, 4(a0)
274 ; RV32-BOTH-NEXT: sw a1, 0(a0)
275 ; RV32-BOTH-NEXT: ret
277 ; RV64-BOTH-LABEL: aligned_memset_8:
278 ; RV64-BOTH: # %bb.0:
279 ; RV64-BOTH-NEXT: andi a1, a1, 255
280 ; RV64-BOTH-NEXT: lui a2, 4112
281 ; RV64-BOTH-NEXT: addiw a2, a2, 257
282 ; RV64-BOTH-NEXT: slli a3, a2, 32
283 ; RV64-BOTH-NEXT: add a2, a2, a3
284 ; RV64-BOTH-NEXT: mul a1, a1, a2
285 ; RV64-BOTH-NEXT: sd a1, 0(a0)
286 ; RV64-BOTH-NEXT: ret
287 tail call void @llvm.memset.inline.p0.i64(ptr align 8 %a, i8 %value, i64 8, i1 0)
291 define void @aligned_memset_16(ptr align 16 %a, i8 %value) nounwind {
292 ; RV32-BOTH-LABEL: aligned_memset_16:
293 ; RV32-BOTH: # %bb.0:
294 ; RV32-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
295 ; RV32-BOTH-NEXT: vmv.v.x v8, a1
296 ; RV32-BOTH-NEXT: vse8.v v8, (a0)
297 ; RV32-BOTH-NEXT: ret
299 ; RV64-BOTH-LABEL: aligned_memset_16:
300 ; RV64-BOTH: # %bb.0:
301 ; RV64-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
302 ; RV64-BOTH-NEXT: vmv.v.x v8, a1
303 ; RV64-BOTH-NEXT: vse8.v v8, (a0)
304 ; RV64-BOTH-NEXT: ret
305 tail call void @llvm.memset.inline.p0.i64(ptr align 16 %a, i8 %value, i64 16, i1 0)
309 define void @aligned_memset_32(ptr align 32 %a, i8 %value) nounwind {
310 ; RV32-BOTH-LABEL: aligned_memset_32:
311 ; RV32-BOTH: # %bb.0:
312 ; RV32-BOTH-NEXT: addi a2, a0, 16
313 ; RV32-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
314 ; RV32-BOTH-NEXT: vmv.v.x v8, a1
315 ; RV32-BOTH-NEXT: vse8.v v8, (a2)
316 ; RV32-BOTH-NEXT: vse8.v v8, (a0)
317 ; RV32-BOTH-NEXT: ret
319 ; RV64-BOTH-LABEL: aligned_memset_32:
320 ; RV64-BOTH: # %bb.0:
321 ; RV64-BOTH-NEXT: addi a2, a0, 16
322 ; RV64-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
323 ; RV64-BOTH-NEXT: vmv.v.x v8, a1
324 ; RV64-BOTH-NEXT: vse8.v v8, (a2)
325 ; RV64-BOTH-NEXT: vse8.v v8, (a0)
326 ; RV64-BOTH-NEXT: ret
327 tail call void @llvm.memset.inline.p0.i64(ptr align 32 %a, i8 %value, i64 32, i1 0)
331 define void @aligned_memset_64(ptr align 64 %a, i8 %value) nounwind {
332 ; RV32-BOTH-LABEL: aligned_memset_64:
333 ; RV32-BOTH: # %bb.0:
334 ; RV32-BOTH-NEXT: addi a2, a0, 48
335 ; RV32-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
336 ; RV32-BOTH-NEXT: vmv.v.x v8, a1
337 ; RV32-BOTH-NEXT: vse8.v v8, (a2)
338 ; RV32-BOTH-NEXT: addi a1, a0, 32
339 ; RV32-BOTH-NEXT: vse8.v v8, (a1)
340 ; RV32-BOTH-NEXT: addi a1, a0, 16
341 ; RV32-BOTH-NEXT: vse8.v v8, (a1)
342 ; RV32-BOTH-NEXT: vse8.v v8, (a0)
343 ; RV32-BOTH-NEXT: ret
345 ; RV64-BOTH-LABEL: aligned_memset_64:
346 ; RV64-BOTH: # %bb.0:
347 ; RV64-BOTH-NEXT: addi a2, a0, 48
348 ; RV64-BOTH-NEXT: vsetivli zero, 16, e8, m1, ta, ma
349 ; RV64-BOTH-NEXT: vmv.v.x v8, a1
350 ; RV64-BOTH-NEXT: vse8.v v8, (a2)
351 ; RV64-BOTH-NEXT: addi a1, a0, 32
352 ; RV64-BOTH-NEXT: vse8.v v8, (a1)
353 ; RV64-BOTH-NEXT: addi a1, a0, 16
354 ; RV64-BOTH-NEXT: vse8.v v8, (a1)
355 ; RV64-BOTH-NEXT: vse8.v v8, (a0)
356 ; RV64-BOTH-NEXT: ret
357 tail call void @llvm.memset.inline.p0.i64(ptr align 64 %a, i8 %value, i64 64, i1 0)
361 ; /////////////////////////////////////////////////////////////////////////////
363 define void @bzero_1(ptr %a) nounwind {
364 ; RV32-BOTH-LABEL: bzero_1:
365 ; RV32-BOTH: # %bb.0:
366 ; RV32-BOTH-NEXT: sb zero, 0(a0)
367 ; RV32-BOTH-NEXT: ret
369 ; RV64-BOTH-LABEL: bzero_1:
370 ; RV64-BOTH: # %bb.0:
371 ; RV64-BOTH-NEXT: sb zero, 0(a0)
372 ; RV64-BOTH-NEXT: ret
373 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 0, i64 1, i1 0)
377 define void @bzero_2(ptr %a) nounwind {
378 ; RV32-LABEL: bzero_2:
380 ; RV32-NEXT: sb zero, 1(a0)
381 ; RV32-NEXT: sb zero, 0(a0)
384 ; RV64-LABEL: bzero_2:
386 ; RV64-NEXT: sb zero, 1(a0)
387 ; RV64-NEXT: sb zero, 0(a0)
390 ; RV32-FAST-LABEL: bzero_2:
391 ; RV32-FAST: # %bb.0:
392 ; RV32-FAST-NEXT: sh zero, 0(a0)
393 ; RV32-FAST-NEXT: ret
395 ; RV64-FAST-LABEL: bzero_2:
396 ; RV64-FAST: # %bb.0:
397 ; RV64-FAST-NEXT: sh zero, 0(a0)
398 ; RV64-FAST-NEXT: ret
399 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 0, i64 2, i1 0)
403 define void @bzero_4(ptr %a) nounwind {
404 ; RV32-LABEL: bzero_4:
406 ; RV32-NEXT: sb zero, 3(a0)
407 ; RV32-NEXT: sb zero, 2(a0)
408 ; RV32-NEXT: sb zero, 1(a0)
409 ; RV32-NEXT: sb zero, 0(a0)
412 ; RV64-LABEL: bzero_4:
414 ; RV64-NEXT: sb zero, 3(a0)
415 ; RV64-NEXT: sb zero, 2(a0)
416 ; RV64-NEXT: sb zero, 1(a0)
417 ; RV64-NEXT: sb zero, 0(a0)
420 ; RV32-FAST-LABEL: bzero_4:
421 ; RV32-FAST: # %bb.0:
422 ; RV32-FAST-NEXT: sw zero, 0(a0)
423 ; RV32-FAST-NEXT: ret
425 ; RV64-FAST-LABEL: bzero_4:
426 ; RV64-FAST: # %bb.0:
427 ; RV64-FAST-NEXT: sw zero, 0(a0)
428 ; RV64-FAST-NEXT: ret
429 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 0, i64 4, i1 0)
433 define void @bzero_8(ptr %a) nounwind {
434 ; RV32-LABEL: bzero_8:
436 ; RV32-NEXT: sb zero, 7(a0)
437 ; RV32-NEXT: sb zero, 6(a0)
438 ; RV32-NEXT: sb zero, 5(a0)
439 ; RV32-NEXT: sb zero, 4(a0)
440 ; RV32-NEXT: sb zero, 3(a0)
441 ; RV32-NEXT: sb zero, 2(a0)
442 ; RV32-NEXT: sb zero, 1(a0)
443 ; RV32-NEXT: sb zero, 0(a0)
446 ; RV64-LABEL: bzero_8:
448 ; RV64-NEXT: sb zero, 7(a0)
449 ; RV64-NEXT: sb zero, 6(a0)
450 ; RV64-NEXT: sb zero, 5(a0)
451 ; RV64-NEXT: sb zero, 4(a0)
452 ; RV64-NEXT: sb zero, 3(a0)
453 ; RV64-NEXT: sb zero, 2(a0)
454 ; RV64-NEXT: sb zero, 1(a0)
455 ; RV64-NEXT: sb zero, 0(a0)
458 ; RV32-FAST-LABEL: bzero_8:
459 ; RV32-FAST: # %bb.0:
460 ; RV32-FAST-NEXT: sw zero, 4(a0)
461 ; RV32-FAST-NEXT: sw zero, 0(a0)
462 ; RV32-FAST-NEXT: ret
464 ; RV64-FAST-LABEL: bzero_8:
465 ; RV64-FAST: # %bb.0:
466 ; RV64-FAST-NEXT: sd zero, 0(a0)
467 ; RV64-FAST-NEXT: ret
468 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 0, i64 8, i1 0)
472 define void @bzero_16(ptr %a) nounwind {
473 ; RV32-LABEL: bzero_16:
475 ; RV32-NEXT: vsetivli zero, 16, e8, m1, ta, ma
476 ; RV32-NEXT: vmv.v.i v8, 0
477 ; RV32-NEXT: vse8.v v8, (a0)
480 ; RV64-LABEL: bzero_16:
482 ; RV64-NEXT: vsetivli zero, 16, e8, m1, ta, ma
483 ; RV64-NEXT: vmv.v.i v8, 0
484 ; RV64-NEXT: vse8.v v8, (a0)
487 ; RV32-FAST-LABEL: bzero_16:
488 ; RV32-FAST: # %bb.0:
489 ; RV32-FAST-NEXT: vsetivli zero, 2, e64, m1, ta, ma
490 ; RV32-FAST-NEXT: vmv.v.i v8, 0
491 ; RV32-FAST-NEXT: vse64.v v8, (a0)
492 ; RV32-FAST-NEXT: ret
494 ; RV64-FAST-LABEL: bzero_16:
495 ; RV64-FAST: # %bb.0:
496 ; RV64-FAST-NEXT: vsetivli zero, 2, e64, m1, ta, ma
497 ; RV64-FAST-NEXT: vmv.v.i v8, 0
498 ; RV64-FAST-NEXT: vse64.v v8, (a0)
499 ; RV64-FAST-NEXT: ret
500 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 0, i64 16, i1 0)
504 define void @bzero_32(ptr %a) nounwind {
505 ; RV32-LABEL: bzero_32:
507 ; RV32-NEXT: vsetivli zero, 16, e8, m1, ta, ma
508 ; RV32-NEXT: vmv.v.i v8, 0
509 ; RV32-NEXT: vse8.v v8, (a0)
510 ; RV32-NEXT: addi a0, a0, 16
511 ; RV32-NEXT: vse8.v v8, (a0)
514 ; RV64-LABEL: bzero_32:
516 ; RV64-NEXT: vsetivli zero, 16, e8, m1, ta, ma
517 ; RV64-NEXT: vmv.v.i v8, 0
518 ; RV64-NEXT: vse8.v v8, (a0)
519 ; RV64-NEXT: addi a0, a0, 16
520 ; RV64-NEXT: vse8.v v8, (a0)
523 ; RV32-FAST-LABEL: bzero_32:
524 ; RV32-FAST: # %bb.0:
525 ; RV32-FAST-NEXT: vsetivli zero, 2, e64, m1, ta, ma
526 ; RV32-FAST-NEXT: vmv.v.i v8, 0
527 ; RV32-FAST-NEXT: vse64.v v8, (a0)
528 ; RV32-FAST-NEXT: addi a0, a0, 16
529 ; RV32-FAST-NEXT: vse64.v v8, (a0)
530 ; RV32-FAST-NEXT: ret
532 ; RV64-FAST-LABEL: bzero_32:
533 ; RV64-FAST: # %bb.0:
534 ; RV64-FAST-NEXT: vsetivli zero, 2, e64, m1, ta, ma
535 ; RV64-FAST-NEXT: vmv.v.i v8, 0
536 ; RV64-FAST-NEXT: vse64.v v8, (a0)
537 ; RV64-FAST-NEXT: addi a0, a0, 16
538 ; RV64-FAST-NEXT: vse64.v v8, (a0)
539 ; RV64-FAST-NEXT: ret
540 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 0, i64 32, i1 0)
544 define void @bzero_64(ptr %a) nounwind {
545 ; RV32-LABEL: bzero_64:
547 ; RV32-NEXT: li a1, 64
548 ; RV32-NEXT: vsetvli zero, a1, e8, m4, ta, ma
549 ; RV32-NEXT: vmv.v.i v8, 0
550 ; RV32-NEXT: vse8.v v8, (a0)
553 ; RV64-LABEL: bzero_64:
555 ; RV64-NEXT: li a1, 64
556 ; RV64-NEXT: vsetvli zero, a1, e8, m4, ta, ma
557 ; RV64-NEXT: vmv.v.i v8, 0
558 ; RV64-NEXT: vse8.v v8, (a0)
561 ; RV32-FAST-LABEL: bzero_64:
562 ; RV32-FAST: # %bb.0:
563 ; RV32-FAST-NEXT: vsetivli zero, 8, e64, m4, ta, ma
564 ; RV32-FAST-NEXT: vmv.v.i v8, 0
565 ; RV32-FAST-NEXT: vse64.v v8, (a0)
566 ; RV32-FAST-NEXT: ret
568 ; RV64-FAST-LABEL: bzero_64:
569 ; RV64-FAST: # %bb.0:
570 ; RV64-FAST-NEXT: vsetivli zero, 8, e64, m4, ta, ma
571 ; RV64-FAST-NEXT: vmv.v.i v8, 0
572 ; RV64-FAST-NEXT: vse64.v v8, (a0)
573 ; RV64-FAST-NEXT: ret
574 tail call void @llvm.memset.inline.p0.i64(ptr %a, i8 0, i64 64, i1 0)
578 ; /////////////////////////////////////////////////////////////////////////////
580 define void @aligned_bzero_2(ptr %a) nounwind {
581 ; RV32-BOTH-LABEL: aligned_bzero_2:
582 ; RV32-BOTH: # %bb.0:
583 ; RV32-BOTH-NEXT: sh zero, 0(a0)
584 ; RV32-BOTH-NEXT: ret
586 ; RV64-BOTH-LABEL: aligned_bzero_2:
587 ; RV64-BOTH: # %bb.0:
588 ; RV64-BOTH-NEXT: sh zero, 0(a0)
589 ; RV64-BOTH-NEXT: ret
590 tail call void @llvm.memset.inline.p0.i64(ptr align 2 %a, i8 0, i64 2, i1 0)
594 define void @aligned_bzero_4(ptr %a) nounwind {
595 ; RV32-BOTH-LABEL: aligned_bzero_4:
596 ; RV32-BOTH: # %bb.0:
597 ; RV32-BOTH-NEXT: sw zero, 0(a0)
598 ; RV32-BOTH-NEXT: ret
600 ; RV64-BOTH-LABEL: aligned_bzero_4:
601 ; RV64-BOTH: # %bb.0:
602 ; RV64-BOTH-NEXT: sw zero, 0(a0)
603 ; RV64-BOTH-NEXT: ret
604 tail call void @llvm.memset.inline.p0.i64(ptr align 4 %a, i8 0, i64 4, i1 0)
608 define void @aligned_bzero_8(ptr %a) nounwind {
609 ; RV32-BOTH-LABEL: aligned_bzero_8:
610 ; RV32-BOTH: # %bb.0:
611 ; RV32-BOTH-NEXT: sw zero, 4(a0)
612 ; RV32-BOTH-NEXT: sw zero, 0(a0)
613 ; RV32-BOTH-NEXT: ret
615 ; RV64-BOTH-LABEL: aligned_bzero_8:
616 ; RV64-BOTH: # %bb.0:
617 ; RV64-BOTH-NEXT: sd zero, 0(a0)
618 ; RV64-BOTH-NEXT: ret
619 tail call void @llvm.memset.inline.p0.i64(ptr align 8 %a, i8 0, i64 8, i1 0)
624 define void @aligned_bzero_16(ptr %a) nounwind {
625 ; RV32-BOTH-LABEL: aligned_bzero_16:
626 ; RV32-BOTH: # %bb.0:
627 ; RV32-BOTH-NEXT: vsetivli zero, 2, e64, m1, ta, ma
628 ; RV32-BOTH-NEXT: vmv.v.i v8, 0
629 ; RV32-BOTH-NEXT: vse64.v v8, (a0)
630 ; RV32-BOTH-NEXT: ret
632 ; RV64-BOTH-LABEL: aligned_bzero_16:
633 ; RV64-BOTH: # %bb.0:
634 ; RV64-BOTH-NEXT: vsetivli zero, 2, e64, m1, ta, ma
635 ; RV64-BOTH-NEXT: vmv.v.i v8, 0
636 ; RV64-BOTH-NEXT: vse64.v v8, (a0)
637 ; RV64-BOTH-NEXT: ret
638 tail call void @llvm.memset.inline.p0.i64(ptr align 16 %a, i8 0, i64 16, i1 0)
642 define void @aligned_bzero_32(ptr %a) nounwind {
643 ; RV32-BOTH-LABEL: aligned_bzero_32:
644 ; RV32-BOTH: # %bb.0:
645 ; RV32-BOTH-NEXT: vsetivli zero, 2, e64, m1, ta, ma
646 ; RV32-BOTH-NEXT: vmv.v.i v8, 0
647 ; RV32-BOTH-NEXT: vse64.v v8, (a0)
648 ; RV32-BOTH-NEXT: addi a0, a0, 16
649 ; RV32-BOTH-NEXT: vse64.v v8, (a0)
650 ; RV32-BOTH-NEXT: ret
652 ; RV64-BOTH-LABEL: aligned_bzero_32:
653 ; RV64-BOTH: # %bb.0:
654 ; RV64-BOTH-NEXT: vsetivli zero, 2, e64, m1, ta, ma
655 ; RV64-BOTH-NEXT: vmv.v.i v8, 0
656 ; RV64-BOTH-NEXT: vse64.v v8, (a0)
657 ; RV64-BOTH-NEXT: addi a0, a0, 16
658 ; RV64-BOTH-NEXT: vse64.v v8, (a0)
659 ; RV64-BOTH-NEXT: ret
660 tail call void @llvm.memset.inline.p0.i64(ptr align 32 %a, i8 0, i64 32, i1 0)
664 define void @aligned_bzero_64(ptr %a) nounwind {
665 ; RV32-BOTH-LABEL: aligned_bzero_64:
666 ; RV32-BOTH: # %bb.0:
667 ; RV32-BOTH-NEXT: vsetivli zero, 8, e64, m4, ta, ma
668 ; RV32-BOTH-NEXT: vmv.v.i v8, 0
669 ; RV32-BOTH-NEXT: vse64.v v8, (a0)
670 ; RV32-BOTH-NEXT: ret
672 ; RV64-BOTH-LABEL: aligned_bzero_64:
673 ; RV64-BOTH: # %bb.0:
674 ; RV64-BOTH-NEXT: vsetivli zero, 8, e64, m4, ta, ma
675 ; RV64-BOTH-NEXT: vmv.v.i v8, 0
676 ; RV64-BOTH-NEXT: vse64.v v8, (a0)
677 ; RV64-BOTH-NEXT: ret
678 tail call void @llvm.memset.inline.p0.i64(ptr align 64 %a, i8 0, i64 64, i1 0)
682 define void @aligned_bzero_66(ptr %a) nounwind {
683 ; RV32-BOTH-LABEL: aligned_bzero_66:
684 ; RV32-BOTH: # %bb.0:
685 ; RV32-BOTH-NEXT: sh zero, 64(a0)
686 ; RV32-BOTH-NEXT: vsetivli zero, 8, e64, m4, ta, ma
687 ; RV32-BOTH-NEXT: vmv.v.i v8, 0
688 ; RV32-BOTH-NEXT: vse64.v v8, (a0)
689 ; RV32-BOTH-NEXT: ret
691 ; RV64-BOTH-LABEL: aligned_bzero_66:
692 ; RV64-BOTH: # %bb.0:
693 ; RV64-BOTH-NEXT: sh zero, 64(a0)
694 ; RV64-BOTH-NEXT: vsetivli zero, 8, e64, m4, ta, ma
695 ; RV64-BOTH-NEXT: vmv.v.i v8, 0
696 ; RV64-BOTH-NEXT: vse64.v v8, (a0)
697 ; RV64-BOTH-NEXT: ret
698 tail call void @llvm.memset.inline.p0.i64(ptr align 64 %a, i8 0, i64 66, i1 0)
702 define void @aligned_bzero_96(ptr %a) nounwind {
703 ; RV32-BOTH-LABEL: aligned_bzero_96:
704 ; RV32-BOTH: # %bb.0:
705 ; RV32-BOTH-NEXT: vsetivli zero, 8, e64, m4, ta, ma
706 ; RV32-BOTH-NEXT: vmv.v.i v8, 0
707 ; RV32-BOTH-NEXT: vse64.v v8, (a0)
708 ; RV32-BOTH-NEXT: addi a1, a0, 80
709 ; RV32-BOTH-NEXT: vsetivli zero, 2, e64, m1, ta, ma
710 ; RV32-BOTH-NEXT: vmv.v.i v8, 0
711 ; RV32-BOTH-NEXT: vse64.v v8, (a1)
712 ; RV32-BOTH-NEXT: addi a0, a0, 64
713 ; RV32-BOTH-NEXT: vse64.v v8, (a0)
714 ; RV32-BOTH-NEXT: ret
716 ; RV64-BOTH-LABEL: aligned_bzero_96:
717 ; RV64-BOTH: # %bb.0:
718 ; RV64-BOTH-NEXT: vsetivli zero, 8, e64, m4, ta, ma
719 ; RV64-BOTH-NEXT: vmv.v.i v8, 0
720 ; RV64-BOTH-NEXT: vse64.v v8, (a0)
721 ; RV64-BOTH-NEXT: addi a1, a0, 80
722 ; RV64-BOTH-NEXT: vsetivli zero, 2, e64, m1, ta, ma
723 ; RV64-BOTH-NEXT: vmv.v.i v8, 0
724 ; RV64-BOTH-NEXT: vse64.v v8, (a1)
725 ; RV64-BOTH-NEXT: addi a0, a0, 64
726 ; RV64-BOTH-NEXT: vse64.v v8, (a0)
727 ; RV64-BOTH-NEXT: ret
728 tail call void @llvm.memset.inline.p0.i64(ptr align 64 %a, i8 0, i64 96, i1 0)
732 define void @aligned_bzero_128(ptr %a) nounwind {
733 ; RV32-BOTH-LABEL: aligned_bzero_128:
734 ; RV32-BOTH: # %bb.0:
735 ; RV32-BOTH-NEXT: vsetivli zero, 16, e64, m8, ta, ma
736 ; RV32-BOTH-NEXT: vmv.v.i v8, 0
737 ; RV32-BOTH-NEXT: vse64.v v8, (a0)
738 ; RV32-BOTH-NEXT: ret
740 ; RV64-BOTH-LABEL: aligned_bzero_128:
741 ; RV64-BOTH: # %bb.0:
742 ; RV64-BOTH-NEXT: vsetivli zero, 16, e64, m8, ta, ma
743 ; RV64-BOTH-NEXT: vmv.v.i v8, 0
744 ; RV64-BOTH-NEXT: vse64.v v8, (a0)
745 ; RV64-BOTH-NEXT: ret
746 tail call void @llvm.memset.inline.p0.i64(ptr align 64 %a, i8 0, i64 128, i1 0)
750 define void @aligned_bzero_256(ptr %a) nounwind {
751 ; RV32-BOTH-LABEL: aligned_bzero_256:
752 ; RV32-BOTH: # %bb.0:
753 ; RV32-BOTH-NEXT: vsetivli zero, 16, e64, m8, ta, ma
754 ; RV32-BOTH-NEXT: vmv.v.i v8, 0
755 ; RV32-BOTH-NEXT: vse64.v v8, (a0)
756 ; RV32-BOTH-NEXT: addi a0, a0, 128
757 ; RV32-BOTH-NEXT: vse64.v v8, (a0)
758 ; RV32-BOTH-NEXT: ret
760 ; RV64-BOTH-LABEL: aligned_bzero_256:
761 ; RV64-BOTH: # %bb.0:
762 ; RV64-BOTH-NEXT: vsetivli zero, 16, e64, m8, ta, ma
763 ; RV64-BOTH-NEXT: vmv.v.i v8, 0
764 ; RV64-BOTH-NEXT: vse64.v v8, (a0)
765 ; RV64-BOTH-NEXT: addi a0, a0, 128
766 ; RV64-BOTH-NEXT: vse64.v v8, (a0)
767 ; RV64-BOTH-NEXT: ret
768 tail call void @llvm.memset.inline.p0.i64(ptr align 64 %a, i8 0, i64 256, i1 0)