1 ; RUN: llc -mtriple=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,SICIVI,FUNC %s
2 ; RUN: llc -mtriple=amdgcn -mcpu=tonga -mattr=-flat-for-global,-enable-ds128 -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,SICIVI,FUNC %s
3 ; RUN: llc -mtriple=amdgcn -mcpu=gfx900 -mattr=-flat-for-global,-enable-ds128 -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,FUNC %s
4 ; RUN: llc -mtriple=amdgcn -mcpu=gfx908 -mattr=-flat-for-global,-enable-ds128 -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,FUNC %s
5 ; RUN: llc -mtriple=r600 -mcpu=redwood < %s | FileCheck --check-prefixes=EG,FUNC %s
7 ; Testing for ds_read/write_128
8 ; RUN: llc -mtriple=amdgcn -mcpu=tahiti -mattr=+enable-ds128 < %s | FileCheck -check-prefixes=SI,FUNC %s
9 ; RUN: llc -mtriple=amdgcn -mcpu=tonga -mattr=+enable-ds128 < %s | FileCheck -check-prefixes=CIVI,FUNC %s
10 ; RUN: llc -mtriple=amdgcn -mcpu=gfx900 -mattr=+enable-ds128 < %s | FileCheck -check-prefixes=CIVI,FUNC %s
12 ; FUNC-LABEL: {{^}}local_load_i32:
14 ; SICIVI: s_mov_b32 m0, -1
19 define amdgpu_kernel void @local_load_i32(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
21 %ld = load i32, ptr addrspace(3) %in
22 store i32 %ld, ptr addrspace(3) %out
26 ; FUNC-LABEL: {{^}}local_load_v2i32:
27 ; SICIVI: s_mov_b32 m0, -1
31 define amdgpu_kernel void @local_load_v2i32(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
33 %ld = load <2 x i32>, ptr addrspace(3) %in
34 store <2 x i32> %ld, ptr addrspace(3) %out
38 ; FUNC-LABEL: {{^}}local_load_v3i32:
39 ; SICIVI: s_mov_b32 m0, -1
44 ; CIVI-DAG: ds_read_b96
45 define amdgpu_kernel void @local_load_v3i32(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
47 %ld = load <3 x i32>, ptr addrspace(3) %in
48 store <3 x i32> %ld, ptr addrspace(3) %out
52 ; FUNC-LABEL: {{^}}local_load_v4i32:
53 ; SICIVI: s_mov_b32 m0, -1
56 ; GCN: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset1:1{{$}}
58 define amdgpu_kernel void @local_load_v4i32(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
60 %ld = load <4 x i32>, ptr addrspace(3) %in
61 store <4 x i32> %ld, ptr addrspace(3) %out
65 ; FUNC-LABEL: {{^}}local_load_v8i32:
66 ; SICIVI: s_mov_b32 m0, -1
69 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:2 offset1:3{{$}}
70 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset1:1{{$}}
71 define amdgpu_kernel void @local_load_v8i32(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
73 %ld = load <8 x i32>, ptr addrspace(3) %in
74 store <8 x i32> %ld, ptr addrspace(3) %out
78 ; FUNC-LABEL: {{^}}local_load_v16i32:
79 ; SICIVI: s_mov_b32 m0, -1
82 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:6 offset1:7{{$}}
83 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:4 offset1:5{{$}}
84 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:2 offset1:3{{$}}
85 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset1:1{{$}}
86 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset0:6 offset1:7
87 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset0:4 offset1:5
88 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset0:2 offset1:3
89 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset1:1
90 define amdgpu_kernel void @local_load_v16i32(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
92 %ld = load <16 x i32>, ptr addrspace(3) %in
93 store <16 x i32> %ld, ptr addrspace(3) %out
97 ; FUNC-LABEL: {{^}}local_zextload_i32_to_i64:
98 ; SICIVI: s_mov_b32 m0, -1
101 define amdgpu_kernel void @local_zextload_i32_to_i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
102 %ld = load i32, ptr addrspace(3) %in
103 %ext = zext i32 %ld to i64
104 store i64 %ext, ptr addrspace(3) %out
108 ; FUNC-LABEL: {{^}}local_sextload_i32_to_i64:
109 ; SICIVI: s_mov_b32 m0, -1
112 define amdgpu_kernel void @local_sextload_i32_to_i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
113 %ld = load i32, ptr addrspace(3) %in
114 %ext = sext i32 %ld to i64
115 store i64 %ext, ptr addrspace(3) %out
119 ; FUNC-LABEL: {{^}}local_zextload_v1i32_to_v1i64:
120 ; SICIVI: s_mov_b32 m0, -1
123 define amdgpu_kernel void @local_zextload_v1i32_to_v1i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
124 %ld = load <1 x i32>, ptr addrspace(3) %in
125 %ext = zext <1 x i32> %ld to <1 x i64>
126 store <1 x i64> %ext, ptr addrspace(3) %out
130 ; FUNC-LABEL: {{^}}local_sextload_v1i32_to_v1i64:
131 ; SICIVI: s_mov_b32 m0, -1
134 define amdgpu_kernel void @local_sextload_v1i32_to_v1i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
135 %ld = load <1 x i32>, ptr addrspace(3) %in
136 %ext = sext <1 x i32> %ld to <1 x i64>
137 store <1 x i64> %ext, ptr addrspace(3) %out
141 ; FUNC-LABEL: {{^}}local_zextload_v2i32_to_v2i64:
142 ; SICIVI: s_mov_b32 m0, -1
145 define amdgpu_kernel void @local_zextload_v2i32_to_v2i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
146 %ld = load <2 x i32>, ptr addrspace(3) %in
147 %ext = zext <2 x i32> %ld to <2 x i64>
148 store <2 x i64> %ext, ptr addrspace(3) %out
152 ; FUNC-LABEL: {{^}}local_sextload_v2i32_to_v2i64:
153 ; SICIVI: s_mov_b32 m0, -1
156 define amdgpu_kernel void @local_sextload_v2i32_to_v2i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
157 %ld = load <2 x i32>, ptr addrspace(3) %in
158 %ext = sext <2 x i32> %ld to <2 x i64>
159 store <2 x i64> %ext, ptr addrspace(3) %out
163 ; FUNC-LABEL: {{^}}local_zextload_v4i32_to_v4i64:
164 ; SICIVI: s_mov_b32 m0, -1
167 define amdgpu_kernel void @local_zextload_v4i32_to_v4i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
168 %ld = load <4 x i32>, ptr addrspace(3) %in
169 %ext = zext <4 x i32> %ld to <4 x i64>
170 store <4 x i64> %ext, ptr addrspace(3) %out
174 ; FUNC-LABEL: {{^}}local_sextload_v4i32_to_v4i64:
175 ; SICIVI: s_mov_b32 m0, -1
178 define amdgpu_kernel void @local_sextload_v4i32_to_v4i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
179 %ld = load <4 x i32>, ptr addrspace(3) %in
180 %ext = sext <4 x i32> %ld to <4 x i64>
181 store <4 x i64> %ext, ptr addrspace(3) %out
185 ; Tests if ds_read/write_b128 gets generated for the 16 byte aligned load.
186 ; FUNC-LABEL: {{^}}local_v4i32_to_128:
188 ; SI-NOT: ds_read_b128
189 ; SI-NOT: ds_write_b128
192 ; CIVI: ds_write_b128
198 define amdgpu_kernel void @local_v4i32_to_128(ptr addrspace(3) %out, ptr addrspace(3) %in) {
199 %ld = load <4 x i32>, ptr addrspace(3) %in, align 16
200 store <4 x i32> %ld, ptr addrspace(3) %out, align 16
204 ; FUNC-LABEL: {{^}}local_zextload_v8i32_to_v8i64:
205 ; SICIVI: s_mov_b32 m0, -1
208 define amdgpu_kernel void @local_zextload_v8i32_to_v8i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
209 %ld = load <8 x i32>, ptr addrspace(3) %in
210 %ext = zext <8 x i32> %ld to <8 x i64>
211 store <8 x i64> %ext, ptr addrspace(3) %out
215 ; FUNC-LABEL: {{^}}local_sextload_v8i32_to_v8i64:
216 ; SICIVI: s_mov_b32 m0, -1
219 define amdgpu_kernel void @local_sextload_v8i32_to_v8i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
220 %ld = load <8 x i32>, ptr addrspace(3) %in
221 %ext = sext <8 x i32> %ld to <8 x i64>
222 store <8 x i64> %ext, ptr addrspace(3) %out
226 ; FUNC-LABEL: {{^}}local_sextload_v16i32_to_v16i64:
227 ; SICIVI: s_mov_b32 m0, -1
230 define amdgpu_kernel void @local_sextload_v16i32_to_v16i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
231 %ld = load <16 x i32>, ptr addrspace(3) %in
232 %ext = sext <16 x i32> %ld to <16 x i64>
233 store <16 x i64> %ext, ptr addrspace(3) %out
237 ; FUNC-LABEL: {{^}}local_zextload_v16i32_to_v16i64
238 ; SICIVI: s_mov_b32 m0, -1
241 define amdgpu_kernel void @local_zextload_v16i32_to_v16i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
242 %ld = load <16 x i32>, ptr addrspace(3) %in
243 %ext = zext <16 x i32> %ld to <16 x i64>
244 store <16 x i64> %ext, ptr addrspace(3) %out
248 ; FUNC-LABEL: {{^}}local_sextload_v32i32_to_v32i64:
249 ; SICIVI: s_mov_b32 m0, -1
252 define amdgpu_kernel void @local_sextload_v32i32_to_v32i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
253 %ld = load <32 x i32>, ptr addrspace(3) %in
254 %ext = sext <32 x i32> %ld to <32 x i64>
255 store <32 x i64> %ext, ptr addrspace(3) %out
259 ; FUNC-LABEL: {{^}}local_zextload_v32i32_to_v32i64:
260 ; SICIVI: s_mov_b32 m0, -1
263 define amdgpu_kernel void @local_zextload_v32i32_to_v32i64(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
264 %ld = load <32 x i32>, ptr addrspace(3) %in
265 %ext = zext <32 x i32> %ld to <32 x i64>
266 store <32 x i64> %ext, ptr addrspace(3) %out
270 ; FUNC-LABEL: {{^}}local_load_v32i32:
271 ; SICIVI: s_mov_b32 m0, -1
275 define amdgpu_kernel void @local_load_v32i32(ptr addrspace(3) %out, ptr addrspace(3) %in) #0 {
276 %ld = load <32 x i32>, ptr addrspace(3) %in
277 store <32 x i32> %ld, ptr addrspace(3) %out
281 attributes #0 = { nounwind }