1 ; NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 ; RUN: llc -mtriple=aarch64-unknown-unknown -global-isel -global-isel-abort=1 -verify-machineinstrs -stop-after=irtranslator %s -o - | FileCheck %s
4 define void @copy(ptr %dst, ptr %src) {
5 ; CHECK-LABEL: name: copy
7 ; CHECK: liveins: $x0, $x1
8 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
9 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
10 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
11 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
12 ; CHECK: G_MEMCPY [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64), 0 :: (store (s8) into %ir.dst), (load (s8) from %ir.src)
15 call void @llvm.memcpy.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 false)
19 define void @inline_copy(ptr %dst, ptr %src) {
20 ; CHECK-LABEL: name: inline_copy
22 ; CHECK: liveins: $x0, $x1
23 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
24 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
25 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
26 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
27 ; CHECK: G_MEMCPY_INLINE [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64) :: (store (s8) into %ir.dst), (load (s8) from %ir.src)
30 call void @llvm.memcpy.inline.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 false)
34 define void @copy_volatile(ptr %dst, ptr %src) {
35 ; CHECK-LABEL: name: copy_volatile
37 ; CHECK: liveins: $x0, $x1
38 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
39 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
40 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
41 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
42 ; CHECK: G_MEMCPY [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64), 0 :: (volatile store (s8) into %ir.dst), (volatile load (s8) from %ir.src)
45 call void @llvm.memcpy.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 true)
49 define void @inline_copy_volatile(ptr %dst, ptr %src) {
50 ; CHECK-LABEL: name: inline_copy_volatile
52 ; CHECK: liveins: $x0, $x1
53 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
54 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
55 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
56 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
57 ; CHECK: G_MEMCPY_INLINE [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64) :: (volatile store (s8) into %ir.dst), (volatile load (s8) from %ir.src)
60 call void @llvm.memcpy.inline.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 true)
64 define void @tail_copy(ptr %dst, ptr %src) {
65 ; CHECK-LABEL: name: tail_copy
67 ; CHECK: liveins: $x0, $x1
68 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
69 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
70 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
71 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
72 ; CHECK: G_MEMCPY [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64), 1 :: (store (s8) into %ir.dst), (load (s8) from %ir.src)
75 tail call void @llvm.memcpy.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 false)
79 define void @tail_inline_copy(ptr %dst, ptr %src) {
80 ; CHECK-LABEL: name: tail_inline_copy
82 ; CHECK: liveins: $x0, $x1
83 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
84 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
85 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
86 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
87 ; CHECK: G_MEMCPY_INLINE [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64) :: (store (s8) into %ir.dst), (load (s8) from %ir.src)
90 tail call void @llvm.memcpy.inline.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 false)
94 define void @tail_copy_volatile(ptr %dst, ptr %src) {
95 ; CHECK-LABEL: name: tail_copy_volatile
97 ; CHECK: liveins: $x0, $x1
98 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
99 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
100 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
101 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
102 ; CHECK: G_MEMCPY [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64), 1 :: (volatile store (s8) into %ir.dst), (volatile load (s8) from %ir.src)
103 ; CHECK: RET_ReallyLR
105 tail call void @llvm.memcpy.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 true)
109 define void @tail_inline_copy_volatile(ptr %dst, ptr %src) {
110 ; CHECK-LABEL: name: tail_inline_copy_volatile
112 ; CHECK: liveins: $x0, $x1
113 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
114 ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
115 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
116 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[C]](s32)
117 ; CHECK: G_MEMCPY_INLINE [[COPY]](p0), [[COPY1]](p0), [[ZEXT]](s64) :: (volatile store (s8) into %ir.dst), (volatile load (s8) from %ir.src)
118 ; CHECK: RET_ReallyLR
120 tail call void @llvm.memcpy.inline.p0.p0.i32(ptr %dst, ptr %src, i32 4, i1 true)
124 declare void @llvm.memcpy.p0.p0.i32(ptr nocapture writeonly, ptr nocapture readonly, i32, i1) nounwind
125 declare void @llvm.memcpy.inline.p0.p0.i32(ptr nocapture writeonly, ptr nocapture readonly, i32, i1) nounwind