1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -march=aarch64 -run-pass=legalizer -verify-machineinstrs %s -o - | FileCheck %s
5 tracksRegLiveness: true
10 ; CHECK-LABEL: name: test_memcpy
11 ; CHECK: liveins: $w2, $x0, $x1
12 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
13 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
14 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2
15 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32)
16 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
17 ; CHECK: $x0 = COPY [[COPY]](p0)
18 ; CHECK: $x1 = COPY [[COPY1]](p0)
19 ; CHECK: $x2 = COPY [[ZEXT]](s64)
20 ; CHECK: BL &memcpy, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2
21 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
26 %3:_(s64) = G_ZEXT %2(s32)
27 G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.memcpy), %0(p0), %1(p0), %3(s64), 0
32 name: test_memcpy_tail
33 tracksRegLiveness: true
36 liveins: $w2, $x0, $x1
38 ; CHECK-LABEL: name: test_memcpy_tail
39 ; CHECK: liveins: $w2, $x0, $x1
40 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
41 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
42 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2
43 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32)
44 ; CHECK: $x0 = COPY [[COPY]](p0)
45 ; CHECK: $x1 = COPY [[COPY1]](p0)
46 ; CHECK: $x2 = COPY [[ZEXT]](s64)
47 ; CHECK: TCRETURNdi &memcpy, 0, csr_aarch64_aapcs, implicit $sp, implicit $x0, implicit $x1, implicit $x2
51 %3:_(s64) = G_ZEXT %2(s32)
52 G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.memcpy), %0(p0), %1(p0), %3(s64), 1
58 tracksRegLiveness: true
61 liveins: $w2, $x0, $x1
63 ; CHECK-LABEL: name: test_memmove
64 ; CHECK: liveins: $w2, $x0, $x1
65 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
66 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
67 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2
68 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32)
69 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
70 ; CHECK: $x0 = COPY [[COPY]](p0)
71 ; CHECK: $x1 = COPY [[COPY1]](p0)
72 ; CHECK: $x2 = COPY [[ZEXT]](s64)
73 ; CHECK: BL &memmove, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2
74 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
79 %3:_(s64) = G_ZEXT %2(s32)
80 G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.memmove), %0(p0), %1(p0), %3(s64), 0
86 tracksRegLiveness: true
89 liveins: $w1, $w2, $x0
91 ; CHECK-LABEL: name: test_memset
92 ; CHECK: liveins: $w1, $w2, $x0
93 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
94 ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $w1
95 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2
96 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32)
97 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
98 ; CHECK: $x0 = COPY [[COPY]](p0)
99 ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32)
100 ; CHECK: $w1 = COPY [[COPY3]](s32)
101 ; CHECK: $x2 = COPY [[ZEXT]](s64)
102 ; CHECK: BL &memset, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $w1, implicit $x2
103 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
104 ; CHECK: RET_ReallyLR
108 %3:_(s8) = G_TRUNC %1(s32)
109 %4:_(s64) = G_ZEXT %2(s32)
110 G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.memset), %0(p0), %3(s8), %4(s64), 0
116 tracksRegLiveness: true
119 liveins: $w2, $x0, $x1
121 ; CHECK-LABEL: name: no_tail_call
122 ; CHECK: liveins: $w2, $x0, $x1
123 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
124 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
125 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2
126 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32)
127 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
128 ; CHECK: $x0 = COPY [[COPY]](p0)
129 ; CHECK: $x1 = COPY [[COPY1]](p0)
130 ; CHECK: $x2 = COPY [[ZEXT]](s64)
131 ; CHECK: BL &memcpy, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2
132 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
133 ; CHECK: $x0 = COPY [[ZEXT]](s64)
134 ; CHECK: RET_ReallyLR implicit $x0
138 %3:_(s64) = G_ZEXT %2(s32)
139 G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.memcpy), %0(p0), %1(p0), %3(s64), 1
141 RET_ReallyLR implicit $x0
146 tracksRegLiveness: true
149 liveins: $w2, $x0, $x1
150 ; CHECK-LABEL: name: dont_tc_twice
151 ; CHECK: liveins: $w2, $x0, $x1
152 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
153 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
154 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2
155 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32)
156 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
157 ; CHECK: $x0 = COPY [[COPY]](p0)
158 ; CHECK: $x1 = COPY [[COPY1]](p0)
159 ; CHECK: $x2 = COPY [[ZEXT]](s64)
160 ; CHECK: BL &memcpy, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2
161 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
162 ; CHECK: TCRETURNdi &memset, 0, csr_aarch64_aapcs, implicit $sp
166 %4:_(s1) = G_CONSTANT i1 false
167 %3:_(s64) = G_ZEXT %2(s32)
168 G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.memcpy), %0(p0), %1(p0), %3(s64), 1
169 TCRETURNdi &memset, 0, csr_aarch64_aapcs, implicit $sp