1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=aarch64-unknown-unknown -run-pass=instruction-select -verify-machineinstrs %s -o - | FileCheck %s
5 define i32 @test_store_release_i64(i32 %a, i64* %addr) {
9 define i32 @test_store_release_i32(i32 %a, i64* %addr) {
13 define void @test_store_release_i8(i32, i8 %val, i8* %addr) { ret void }
14 define void @test_store_release_i16(i32, i16 %val, i16* %addr) { ret void }
17 name: test_store_release_i64
21 tracksRegLiveness: true
24 liveins: $w0, $x1, $x2
26 ; CHECK-LABEL: name: test_store_release_i64
27 ; CHECK: liveins: $w0, $x1, $x2
28 ; CHECK: [[COPY:%[0-9]+]]:gpr64 = COPY $x1
29 ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
30 ; CHECK: early-clobber %2:gpr32 = STLXRX [[COPY]], [[COPY1]] :: (volatile store 8 into %ir.addr)
31 ; CHECK: $w0 = COPY %2
32 ; CHECK: RET_ReallyLR implicit $w0
33 %1:gpr(s64) = COPY $x1
35 %3:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %1(s64), %2(p0) :: (volatile store 8 into %ir.addr)
37 RET_ReallyLR implicit $w0
41 name: test_store_release_i32
45 tracksRegLiveness: true
48 liveins: $w0, $w1, $x2
49 ; CHECK-LABEL: name: test_store_release_i32
50 ; CHECK: liveins: $w0, $w1, $x2
51 ; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
52 ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
53 ; CHECK: early-clobber %3:gpr32 = STLXRW [[COPY]], [[COPY1]] :: (volatile store 4 into %ir.addr)
54 ; CHECK: $w0 = COPY %3
55 ; CHECK: RET_ReallyLR implicit $w0
56 %1:gpr(s32) = COPY $w1
58 %3:gpr(s64) = G_ZEXT %1(s32)
59 %4:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %3(s64), %2(p0) :: (volatile store 4 into %ir.addr)
61 RET_ReallyLR implicit $w0
65 name: test_store_release_i8
69 tracksRegLiveness: true
72 liveins: $w0, $w1, $x2
74 ; CHECK-LABEL: name: test_store_release_i8
75 ; CHECK: liveins: $w0, $w1, $x2
76 ; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
77 ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
78 ; CHECK: [[DEF:%[0-9]+]]:gpr64all = IMPLICIT_DEF
79 ; CHECK: [[INSERT_SUBREG:%[0-9]+]]:gpr64 = INSERT_SUBREG [[DEF]], [[COPY]], %subreg.sub_32
80 ; CHECK: [[COPY2:%[0-9]+]]:gpr32 = COPY [[INSERT_SUBREG]].sub_32
81 ; CHECK: early-clobber %5:gpr32 = STLXRB [[COPY2]], [[COPY1]] :: (volatile store 1 into %ir.addr)
82 ; CHECK: $w0 = COPY %5
83 ; CHECK: RET_ReallyLR implicit $w0
84 %3:gpr(s32) = COPY $w1
86 %6:gpr(s64) = G_CONSTANT i64 255
87 %7:gpr(s64) = G_ANYEXT %3(s32)
88 %4:gpr(s64) = G_AND %7, %6
89 %5:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %4(s64), %2(p0) :: (volatile store 1 into %ir.addr)
91 RET_ReallyLR implicit $w0
95 name: test_store_release_i16
99 tracksRegLiveness: true
102 liveins: $w0, $w1, $x2
104 ; CHECK-LABEL: name: test_store_release_i16
105 ; CHECK: liveins: $w0, $w1, $x2
106 ; CHECK: [[COPY:%[0-9]+]]:gpr32 = COPY $w1
107 ; CHECK: [[COPY1:%[0-9]+]]:gpr64sp = COPY $x2
108 ; CHECK: [[DEF:%[0-9]+]]:gpr64all = IMPLICIT_DEF
109 ; CHECK: [[INSERT_SUBREG:%[0-9]+]]:gpr64 = INSERT_SUBREG [[DEF]], [[COPY]], %subreg.sub_32
110 ; CHECK: [[COPY2:%[0-9]+]]:gpr32 = COPY [[INSERT_SUBREG]].sub_32
111 ; CHECK: early-clobber %5:gpr32 = STLXRH [[COPY2]], [[COPY1]] :: (volatile store 2 into %ir.addr)
112 ; CHECK: $w0 = COPY %5
113 ; CHECK: RET_ReallyLR implicit $w0
114 %3:gpr(s32) = COPY $w1
115 %2:gpr(p0) = COPY $x2
116 %6:gpr(s64) = G_CONSTANT i64 65535
117 %7:gpr(s64) = G_ANYEXT %3(s32)
118 %4:gpr(s64) = G_AND %7, %6
119 %5:gpr(s32) = G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.stlxr), %4(s64), %2(p0) :: (volatile store 2 into %ir.addr)
121 RET_ReallyLR implicit $w0