1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=aarch64-- -run-pass=instruction-select -verify-machineinstrs %s -o - | FileCheck %s
5 define void @test_store_i8(i32, i8 %val, i8* %addr) { ret void }
6 define void @test_store_i16(i32, i16 %val, i16* %addr) { ret void }
7 define void @test_store_i32(i32, i32 %val, i32* %addr) { ret void }
8 define void @test_store_i64(i32, i64 %val, i64* %addr) { ret void }
15 tracksRegLiveness: true
16 machineFunctionInfo: {}
19 liveins: $w0, $w1, $x2
21 ; CHECK-LABEL: name: test_store_i8
22 ; CHECK: liveins: $w0, $w1, $x2
23 ; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
24 ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
25 ; CHECK: [[DEF:%[0-9]+]]:gpr64all = IMPLICIT_DEF
26 ; CHECK: [[INSERT_SUBREG:%[0-9]+]]:gpr64 = INSERT_SUBREG [[DEF]], [[COPY]], %subreg.sub_32
27 ; CHECK: [[COPY2:%[0-9]+]]:gpr32 = COPY [[INSERT_SUBREG]].sub_32
28 ; CHECK: early-clobber %5:gpr32 = STXRB [[COPY2]], [[COPY1]] :: (volatile store (s8) into %ir.addr)
29 ; CHECK: $w0 = COPY %5
30 ; CHECK: RET_ReallyLR implicit $w0
31 %3:gpr(s32) = COPY $w1
33 %6:gpr(s64) = G_CONSTANT i64 255
34 %7:gpr(s64) = G_ANYEXT %3(s32)
35 %4:gpr(s64) = G_AND %7, %6
36 %5:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stxr), %4(s64), %2(p0) :: (volatile store (s8) into %ir.addr)
38 RET_ReallyLR implicit $w0
46 tracksRegLiveness: true
47 machineFunctionInfo: {}
50 liveins: $w0, $w1, $x2
52 ; CHECK-LABEL: name: test_store_i16
53 ; CHECK: liveins: $w0, $w1, $x2
54 ; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
55 ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
56 ; CHECK: [[DEF:%[0-9]+]]:gpr64all = IMPLICIT_DEF
57 ; CHECK: [[INSERT_SUBREG:%[0-9]+]]:gpr64 = INSERT_SUBREG [[DEF]], [[COPY]], %subreg.sub_32
58 ; CHECK: [[COPY2:%[0-9]+]]:gpr32 = COPY [[INSERT_SUBREG]].sub_32
59 ; CHECK: early-clobber %5:gpr32 = STXRH [[COPY2]], [[COPY1]] :: (volatile store (s16) into %ir.addr)
60 ; CHECK: $w0 = COPY %5
61 ; CHECK: RET_ReallyLR implicit $w0
62 %3:gpr(s32) = COPY $w1
64 %6:gpr(s64) = G_CONSTANT i64 65535
65 %7:gpr(s64) = G_ANYEXT %3(s32)
66 %4:gpr(s64) = G_AND %7, %6
67 %5:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stxr), %4(s64), %2(p0) :: (volatile store (s16) into %ir.addr)
69 RET_ReallyLR implicit $w0
77 tracksRegLiveness: true
78 machineFunctionInfo: {}
81 liveins: $w0, $w1, $x2
83 ; CHECK-LABEL: name: test_store_i32
84 ; CHECK: liveins: $w0, $w1, $x2
85 ; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
86 ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
87 ; CHECK: early-clobber %3:gpr32 = STXRW [[COPY]], [[COPY1]] :: (volatile store (s32) into %ir.addr)
88 ; CHECK: $w0 = COPY %3
89 ; CHECK: RET_ReallyLR implicit $w0
90 %1:gpr(s32) = COPY $w1
92 %3:gpr(s64) = G_ZEXT %1(s32)
93 %4:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stxr), %3(s64), %2(p0) :: (volatile store (s32) into %ir.addr)
95 RET_ReallyLR implicit $w0
102 regBankSelected: true
103 tracksRegLiveness: true
104 machineFunctionInfo: {}
107 liveins: $w0, $x1, $x2
109 ; CHECK-LABEL: name: test_store_i64
110 ; CHECK: liveins: $w0, $x1, $x2
111 ; CHECK: [[COPY:%[0-9]+]]:gpr64 = COPY $x1
112 ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
113 ; CHECK: early-clobber %2:gpr32 = STXRX [[COPY]], [[COPY1]] :: (volatile store (s64) into %ir.addr)
114 ; CHECK: $w0 = COPY %2
115 ; CHECK: RET_ReallyLR implicit $w0
116 %1:gpr(s64) = COPY $x1
117 %2:gpr(p0) = COPY $x2
118 %3:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stxr), %1(s64), %2(p0) :: (volatile store (s64) into %ir.addr)
120 RET_ReallyLR implicit $w0