1 ; RUN: llc -march=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,SICIVI,FUNC %s
2 ; RUN: llc -march=amdgcn -mcpu=tonga -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,SICIVI,FUNC %s
3 ; RUN: llc -march=amdgcn -mcpu=gfx900 -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,VI,FUNC %s
4 ; RUN: llc -march=amdgcn -mcpu=gfx908 -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,VI,FUNC %s
5 ; RUN: llc -march=r600 -mcpu=redwood < %s | FileCheck -check-prefix=EG -check-prefix=FUNC %s
7 ; Testing for ds_read/write_128
8 ; RUN: llc -march=amdgcn -mcpu=tahiti -mattr=+enable-ds128 < %s | FileCheck -check-prefixes=SI,FUNC %s
9 ; RUN: llc -march=amdgcn -mcpu=tonga -mattr=+enable-ds128 < %s | FileCheck -check-prefixes=CIVI,FUNC %s
10 ; RUN: llc -march=amdgcn -mcpu=gfx900 -mattr=+enable-ds128 < %s | FileCheck -check-prefixes=CIVI,FUNC %s
12 ; FUNC-LABEL: {{^}}local_load_i32:
14 ; SICIVI: s_mov_b32 m0, -1
19 define amdgpu_kernel void @local_load_i32(i32 addrspace(3)* %out, i32 addrspace(3)* %in) #0 {
21 %ld = load i32, i32 addrspace(3)* %in
22 store i32 %ld, i32 addrspace(3)* %out
26 ; FUNC-LABEL: {{^}}local_load_v2i32:
27 ; SICIVI: s_mov_b32 m0, -1
31 define amdgpu_kernel void @local_load_v2i32(<2 x i32> addrspace(3)* %out, <2 x i32> addrspace(3)* %in) #0 {
33 %ld = load <2 x i32>, <2 x i32> addrspace(3)* %in
34 store <2 x i32> %ld, <2 x i32> addrspace(3)* %out
38 ; FUNC-LABEL: {{^}}local_load_v3i32:
39 ; SICIVI: s_mov_b32 m0, -1
42 ; GCN-DAG: ds_read_b64
43 ; GCN-DAG: ds_read_b32
44 define amdgpu_kernel void @local_load_v3i32(<3 x i32> addrspace(3)* %out, <3 x i32> addrspace(3)* %in) #0 {
46 %ld = load <3 x i32>, <3 x i32> addrspace(3)* %in
47 store <3 x i32> %ld, <3 x i32> addrspace(3)* %out
51 ; FUNC-LABEL: {{^}}local_load_v4i32:
52 ; SICIVI: s_mov_b32 m0, -1
55 ; GCN: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset1:1{{$}}
57 define amdgpu_kernel void @local_load_v4i32(<4 x i32> addrspace(3)* %out, <4 x i32> addrspace(3)* %in) #0 {
59 %ld = load <4 x i32>, <4 x i32> addrspace(3)* %in
60 store <4 x i32> %ld, <4 x i32> addrspace(3)* %out
64 ; FUNC-LABEL: {{^}}local_load_v8i32:
65 ; SICIVI: s_mov_b32 m0, -1
68 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:2 offset1:3{{$}}
69 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset1:1{{$}}
70 define amdgpu_kernel void @local_load_v8i32(<8 x i32> addrspace(3)* %out, <8 x i32> addrspace(3)* %in) #0 {
72 %ld = load <8 x i32>, <8 x i32> addrspace(3)* %in
73 store <8 x i32> %ld, <8 x i32> addrspace(3)* %out
77 ; FUNC-LABEL: {{^}}local_load_v16i32:
78 ; SICIVI: s_mov_b32 m0, -1
81 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:6 offset1:7{{$}}
82 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:4 offset1:5{{$}}
83 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:2 offset1:3{{$}}
84 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset1:1{{$}}
85 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset0:6 offset1:7
86 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset0:4 offset1:5
87 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset0:2 offset1:3
88 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset1:1
89 define amdgpu_kernel void @local_load_v16i32(<16 x i32> addrspace(3)* %out, <16 x i32> addrspace(3)* %in) #0 {
91 %ld = load <16 x i32>, <16 x i32> addrspace(3)* %in
92 store <16 x i32> %ld, <16 x i32> addrspace(3)* %out
96 ; FUNC-LABEL: {{^}}local_zextload_i32_to_i64:
97 ; SICIVI: s_mov_b32 m0, -1
100 define amdgpu_kernel void @local_zextload_i32_to_i64(i64 addrspace(3)* %out, i32 addrspace(3)* %in) #0 {
101 %ld = load i32, i32 addrspace(3)* %in
102 %ext = zext i32 %ld to i64
103 store i64 %ext, i64 addrspace(3)* %out
107 ; FUNC-LABEL: {{^}}local_sextload_i32_to_i64:
108 ; SICIVI: s_mov_b32 m0, -1
111 define amdgpu_kernel void @local_sextload_i32_to_i64(i64 addrspace(3)* %out, i32 addrspace(3)* %in) #0 {
112 %ld = load i32, i32 addrspace(3)* %in
113 %ext = sext i32 %ld to i64
114 store i64 %ext, i64 addrspace(3)* %out
118 ; FUNC-LABEL: {{^}}local_zextload_v1i32_to_v1i64:
119 ; SICIVI: s_mov_b32 m0, -1
122 define amdgpu_kernel void @local_zextload_v1i32_to_v1i64(<1 x i64> addrspace(3)* %out, <1 x i32> addrspace(3)* %in) #0 {
123 %ld = load <1 x i32>, <1 x i32> addrspace(3)* %in
124 %ext = zext <1 x i32> %ld to <1 x i64>
125 store <1 x i64> %ext, <1 x i64> addrspace(3)* %out
129 ; FUNC-LABEL: {{^}}local_sextload_v1i32_to_v1i64:
130 ; SICIVI: s_mov_b32 m0, -1
133 define amdgpu_kernel void @local_sextload_v1i32_to_v1i64(<1 x i64> addrspace(3)* %out, <1 x i32> addrspace(3)* %in) #0 {
134 %ld = load <1 x i32>, <1 x i32> addrspace(3)* %in
135 %ext = sext <1 x i32> %ld to <1 x i64>
136 store <1 x i64> %ext, <1 x i64> addrspace(3)* %out
140 ; FUNC-LABEL: {{^}}local_zextload_v2i32_to_v2i64:
141 ; SICIVI: s_mov_b32 m0, -1
144 define amdgpu_kernel void @local_zextload_v2i32_to_v2i64(<2 x i64> addrspace(3)* %out, <2 x i32> addrspace(3)* %in) #0 {
145 %ld = load <2 x i32>, <2 x i32> addrspace(3)* %in
146 %ext = zext <2 x i32> %ld to <2 x i64>
147 store <2 x i64> %ext, <2 x i64> addrspace(3)* %out
151 ; FUNC-LABEL: {{^}}local_sextload_v2i32_to_v2i64:
152 ; SICIVI: s_mov_b32 m0, -1
155 define amdgpu_kernel void @local_sextload_v2i32_to_v2i64(<2 x i64> addrspace(3)* %out, <2 x i32> addrspace(3)* %in) #0 {
156 %ld = load <2 x i32>, <2 x i32> addrspace(3)* %in
157 %ext = sext <2 x i32> %ld to <2 x i64>
158 store <2 x i64> %ext, <2 x i64> addrspace(3)* %out
162 ; FUNC-LABEL: {{^}}local_zextload_v4i32_to_v4i64:
163 ; SICIVI: s_mov_b32 m0, -1
166 define amdgpu_kernel void @local_zextload_v4i32_to_v4i64(<4 x i64> addrspace(3)* %out, <4 x i32> addrspace(3)* %in) #0 {
167 %ld = load <4 x i32>, <4 x i32> addrspace(3)* %in
168 %ext = zext <4 x i32> %ld to <4 x i64>
169 store <4 x i64> %ext, <4 x i64> addrspace(3)* %out
173 ; FUNC-LABEL: {{^}}local_sextload_v4i32_to_v4i64:
174 ; SICIVI: s_mov_b32 m0, -1
177 define amdgpu_kernel void @local_sextload_v4i32_to_v4i64(<4 x i64> addrspace(3)* %out, <4 x i32> addrspace(3)* %in) #0 {
178 %ld = load <4 x i32>, <4 x i32> addrspace(3)* %in
179 %ext = sext <4 x i32> %ld to <4 x i64>
180 store <4 x i64> %ext, <4 x i64> addrspace(3)* %out
184 ; Tests if ds_read/write_b128 gets generated for the 16 byte aligned load.
185 ; FUNC-LABEL: {{^}}local_v4i32_to_128:
187 ; SI-NOT: ds_read_b128
188 ; SI-NOT: ds_write_b128
191 ; CIVI: ds_write_b128
197 define amdgpu_kernel void @local_v4i32_to_128(<4 x i32> addrspace(3)* %out, <4 x i32> addrspace(3)* %in) {
198 %ld = load <4 x i32>, <4 x i32> addrspace(3)* %in, align 16
199 store <4 x i32> %ld, <4 x i32> addrspace(3)* %out, align 16
203 ; FUNC-LABEL: {{^}}local_zextload_v8i32_to_v8i64:
204 ; SICIVI: s_mov_b32 m0, -1
207 define amdgpu_kernel void @local_zextload_v8i32_to_v8i64(<8 x i64> addrspace(3)* %out, <8 x i32> addrspace(3)* %in) #0 {
208 %ld = load <8 x i32>, <8 x i32> addrspace(3)* %in
209 %ext = zext <8 x i32> %ld to <8 x i64>
210 store <8 x i64> %ext, <8 x i64> addrspace(3)* %out
214 ; FUNC-LABEL: {{^}}local_sextload_v8i32_to_v8i64:
215 ; SICIVI: s_mov_b32 m0, -1
218 define amdgpu_kernel void @local_sextload_v8i32_to_v8i64(<8 x i64> addrspace(3)* %out, <8 x i32> addrspace(3)* %in) #0 {
219 %ld = load <8 x i32>, <8 x i32> addrspace(3)* %in
220 %ext = sext <8 x i32> %ld to <8 x i64>
221 store <8 x i64> %ext, <8 x i64> addrspace(3)* %out
225 ; FUNC-LABEL: {{^}}local_sextload_v16i32_to_v16i64:
226 ; SICIVI: s_mov_b32 m0, -1
229 define amdgpu_kernel void @local_sextload_v16i32_to_v16i64(<16 x i64> addrspace(3)* %out, <16 x i32> addrspace(3)* %in) #0 {
230 %ld = load <16 x i32>, <16 x i32> addrspace(3)* %in
231 %ext = sext <16 x i32> %ld to <16 x i64>
232 store <16 x i64> %ext, <16 x i64> addrspace(3)* %out
236 ; FUNC-LABEL: {{^}}local_zextload_v16i32_to_v16i64
237 ; SICIVI: s_mov_b32 m0, -1
240 define amdgpu_kernel void @local_zextload_v16i32_to_v16i64(<16 x i64> addrspace(3)* %out, <16 x i32> addrspace(3)* %in) #0 {
241 %ld = load <16 x i32>, <16 x i32> addrspace(3)* %in
242 %ext = zext <16 x i32> %ld to <16 x i64>
243 store <16 x i64> %ext, <16 x i64> addrspace(3)* %out
247 ; FUNC-LABEL: {{^}}local_sextload_v32i32_to_v32i64:
248 ; SICIVI: s_mov_b32 m0, -1
251 define amdgpu_kernel void @local_sextload_v32i32_to_v32i64(<32 x i64> addrspace(3)* %out, <32 x i32> addrspace(3)* %in) #0 {
252 %ld = load <32 x i32>, <32 x i32> addrspace(3)* %in
253 %ext = sext <32 x i32> %ld to <32 x i64>
254 store <32 x i64> %ext, <32 x i64> addrspace(3)* %out
258 ; FUNC-LABEL: {{^}}local_zextload_v32i32_to_v32i64:
259 ; SICIVI: s_mov_b32 m0, -1
262 define amdgpu_kernel void @local_zextload_v32i32_to_v32i64(<32 x i64> addrspace(3)* %out, <32 x i32> addrspace(3)* %in) #0 {
263 %ld = load <32 x i32>, <32 x i32> addrspace(3)* %in
264 %ext = zext <32 x i32> %ld to <32 x i64>
265 store <32 x i64> %ext, <32 x i64> addrspace(3)* %out
269 ; FUNC-LABEL: {{^}}local_load_v32i32:
270 ; SICIVI: s_mov_b32 m0, -1
274 define amdgpu_kernel void @local_load_v32i32(<32 x i32> addrspace(3)* %out, <32 x i32> addrspace(3)* %in) #0 {
275 %ld = load <32 x i32>, <32 x i32> addrspace(3)* %in
276 store <32 x i32> %ld, <32 x i32> addrspace(3)* %out
280 attributes #0 = { nounwind }