1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple aarch64 -run-pass=aarch64-prelegalizer-combiner -verify-machineinstrs %s -o - | FileCheck %s
4 name: sextload_from_inreg
6 tracksRegLiveness: true
13 ; CHECK-LABEL: name: sextload_from_inreg
15 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
16 ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s16) = G_SEXTLOAD [[COPY]](p0) :: (load (s8), align 2)
17 ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[SEXTLOAD]](s16)
18 ; CHECK: $w0 = COPY [[ANYEXT]](s32)
19 ; CHECK: RET_ReallyLR implicit $w0
21 %1:_(s16) = G_LOAD %0(p0) :: (load (s16))
22 %2:_(s16) = G_SEXT_INREG %1, 8
23 %3:_(s32) = G_ANYEXT %2(s16)
25 RET_ReallyLR implicit $w0
29 name: sextload_from_inreg_across_store
31 tracksRegLiveness: true
37 ; Check that the extend gets folded into the load, not the other way around, which
38 ; could cause mem dependence violations.
39 ; CHECK-LABEL: name: sextload_from_inreg_across_store
41 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
42 ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s16) = G_SEXTLOAD [[COPY]](p0) :: (load (s8), align 2)
43 ; CHECK: G_STORE [[COPY]](p0), [[COPY]](p0) :: (store (p0))
44 ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[SEXTLOAD]](s16)
45 ; CHECK: $w0 = COPY [[ANYEXT]](s32)
46 ; CHECK: RET_ReallyLR implicit $w0
48 %1:_(s16) = G_LOAD %0(p0) :: (load (s16))
49 G_STORE %0(p0), %0(p0) :: (store (p0))
50 %2:_(s16) = G_SEXT_INREG %1, 8
51 %3:_(s32) = G_ANYEXT %2(s16)
53 RET_ReallyLR implicit $w0
59 tracksRegLiveness: true
66 ; CHECK-LABEL: name: non_pow_2_inreg
68 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
69 ; CHECK: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p0) :: (load (s32))
70 ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[LOAD]], 24
71 ; CHECK: $w0 = COPY [[SEXT_INREG]](s32)
72 ; CHECK: RET_ReallyLR implicit $w0
74 %1:_(s32) = G_LOAD %0(p0) :: (load (s32))
75 %2:_(s32) = G_SEXT_INREG %1, 24
77 RET_ReallyLR implicit $w0
83 tracksRegLiveness: true
90 ; CHECK-LABEL: name: atomic
92 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
93 ; CHECK: [[LOAD:%[0-9]+]]:_(s16) = G_LOAD [[COPY]](p0) :: (load acquire (s16))
94 ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s16) = G_SEXT_INREG [[LOAD]], 8
95 ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[SEXT_INREG]](s16)
96 ; CHECK: $w0 = COPY [[ANYEXT]](s32)
97 ; CHECK: RET_ReallyLR implicit $w0
99 %1:_(s16) = G_LOAD %0(p0) :: (load acquire (s16))
100 %2:_(s16) = G_SEXT_INREG %1, 8
101 %3:_(s32) = G_ANYEXT %2(s16)
103 RET_ReallyLR implicit $w0
109 tracksRegLiveness: true
116 ; CHECK-LABEL: name: volatile
117 ; CHECK: liveins: $x0
118 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
119 ; CHECK: [[LOAD:%[0-9]+]]:_(s16) = G_LOAD [[COPY]](p0) :: (volatile load (s16))
120 ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s16) = G_SEXT_INREG [[LOAD]], 8
121 ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[SEXT_INREG]](s16)
122 ; CHECK: $w0 = COPY [[ANYEXT]](s32)
123 ; CHECK: RET_ReallyLR implicit $w0
125 %1:_(s16) = G_LOAD %0(p0) :: (volatile load (s16))
126 %2:_(s16) = G_SEXT_INREG %1, 8
127 %3:_(s32) = G_ANYEXT %2(s16)
129 RET_ReallyLR implicit $w0