1 ; RUN: llc -march=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,SICIVI,FUNC %s
2 ; RUN: llc -march=amdgcn -mcpu=tonga -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,SICIVI,FUNC %s
3 ; RUN: llc -march=amdgcn -mcpu=gfx900 -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefixes=GCN,VI,FUNC %s
4 ; RUN: llc -march=r600 -mcpu=redwood < %s | FileCheck -check-prefix=EG -check-prefix=FUNC %s
6 ; Testing for ds_read/write_128
7 ; RUN: llc -march=amdgcn -mcpu=tahiti -mattr=+enable-ds128 < %s | FileCheck -check-prefixes=SI,FUNC %s
8 ; RUN: llc -march=amdgcn -mcpu=tonga -mattr=+enable-ds128 < %s | FileCheck -check-prefixes=CIVI,FUNC %s
9 ; RUN: llc -march=amdgcn -mcpu=gfx900 -mattr=+enable-ds128 < %s | FileCheck -check-prefixes=CIVI,FUNC %s
11 ; FUNC-LABEL: {{^}}local_load_i32:
13 ; SICIVI: s_mov_b32 m0, -1
18 define amdgpu_kernel void @local_load_i32(i32 addrspace(3)* %out, i32 addrspace(3)* %in) #0 {
20 %ld = load i32, i32 addrspace(3)* %in
21 store i32 %ld, i32 addrspace(3)* %out
25 ; FUNC-LABEL: {{^}}local_load_v2i32:
26 ; SICIVI: s_mov_b32 m0, -1
30 define amdgpu_kernel void @local_load_v2i32(<2 x i32> addrspace(3)* %out, <2 x i32> addrspace(3)* %in) #0 {
32 %ld = load <2 x i32>, <2 x i32> addrspace(3)* %in
33 store <2 x i32> %ld, <2 x i32> addrspace(3)* %out
37 ; FUNC-LABEL: {{^}}local_load_v3i32:
38 ; SICIVI: s_mov_b32 m0, -1
41 ; GCN-DAG: ds_read_b64
42 ; GCN-DAG: ds_read_b32
43 define amdgpu_kernel void @local_load_v3i32(<3 x i32> addrspace(3)* %out, <3 x i32> addrspace(3)* %in) #0 {
45 %ld = load <3 x i32>, <3 x i32> addrspace(3)* %in
46 store <3 x i32> %ld, <3 x i32> addrspace(3)* %out
50 ; FUNC-LABEL: {{^}}local_load_v4i32:
51 ; SICIVI: s_mov_b32 m0, -1
54 ; GCN: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset1:1{{$}}
56 define amdgpu_kernel void @local_load_v4i32(<4 x i32> addrspace(3)* %out, <4 x i32> addrspace(3)* %in) #0 {
58 %ld = load <4 x i32>, <4 x i32> addrspace(3)* %in
59 store <4 x i32> %ld, <4 x i32> addrspace(3)* %out
63 ; FUNC-LABEL: {{^}}local_load_v8i32:
64 ; SICIVI: s_mov_b32 m0, -1
67 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:2 offset1:3{{$}}
68 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset1:1{{$}}
69 define amdgpu_kernel void @local_load_v8i32(<8 x i32> addrspace(3)* %out, <8 x i32> addrspace(3)* %in) #0 {
71 %ld = load <8 x i32>, <8 x i32> addrspace(3)* %in
72 store <8 x i32> %ld, <8 x i32> addrspace(3)* %out
76 ; FUNC-LABEL: {{^}}local_load_v16i32:
77 ; SICIVI: s_mov_b32 m0, -1
80 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:6 offset1:7{{$}}
81 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:4 offset1:5{{$}}
82 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset0:2 offset1:3{{$}}
83 ; GCN-DAG: ds_read2_b64 v{{\[[0-9]+:[0-9]+\]}}, v{{[0-9]+}} offset1:1{{$}}
84 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset0:6 offset1:7
85 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset0:4 offset1:5
86 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset0:2 offset1:3
87 ; GCN-DAG: ds_write2_b64 v{{[0-9]+}}, v{{\[[0-9]+:[0-9]+\]}}, v{{\[[0-9]+:[0-9]+\]}} offset1:1
88 define amdgpu_kernel void @local_load_v16i32(<16 x i32> addrspace(3)* %out, <16 x i32> addrspace(3)* %in) #0 {
90 %ld = load <16 x i32>, <16 x i32> addrspace(3)* %in
91 store <16 x i32> %ld, <16 x i32> addrspace(3)* %out
95 ; FUNC-LABEL: {{^}}local_zextload_i32_to_i64:
96 ; SICIVI: s_mov_b32 m0, -1
99 define amdgpu_kernel void @local_zextload_i32_to_i64(i64 addrspace(3)* %out, i32 addrspace(3)* %in) #0 {
100 %ld = load i32, i32 addrspace(3)* %in
101 %ext = zext i32 %ld to i64
102 store i64 %ext, i64 addrspace(3)* %out
106 ; FUNC-LABEL: {{^}}local_sextload_i32_to_i64:
107 ; SICIVI: s_mov_b32 m0, -1
110 define amdgpu_kernel void @local_sextload_i32_to_i64(i64 addrspace(3)* %out, i32 addrspace(3)* %in) #0 {
111 %ld = load i32, i32 addrspace(3)* %in
112 %ext = sext i32 %ld to i64
113 store i64 %ext, i64 addrspace(3)* %out
117 ; FUNC-LABEL: {{^}}local_zextload_v1i32_to_v1i64:
118 ; SICIVI: s_mov_b32 m0, -1
121 define amdgpu_kernel void @local_zextload_v1i32_to_v1i64(<1 x i64> addrspace(3)* %out, <1 x i32> addrspace(3)* %in) #0 {
122 %ld = load <1 x i32>, <1 x i32> addrspace(3)* %in
123 %ext = zext <1 x i32> %ld to <1 x i64>
124 store <1 x i64> %ext, <1 x i64> addrspace(3)* %out
128 ; FUNC-LABEL: {{^}}local_sextload_v1i32_to_v1i64:
129 ; SICIVI: s_mov_b32 m0, -1
132 define amdgpu_kernel void @local_sextload_v1i32_to_v1i64(<1 x i64> addrspace(3)* %out, <1 x i32> addrspace(3)* %in) #0 {
133 %ld = load <1 x i32>, <1 x i32> addrspace(3)* %in
134 %ext = sext <1 x i32> %ld to <1 x i64>
135 store <1 x i64> %ext, <1 x i64> addrspace(3)* %out
139 ; FUNC-LABEL: {{^}}local_zextload_v2i32_to_v2i64:
140 ; SICIVI: s_mov_b32 m0, -1
143 define amdgpu_kernel void @local_zextload_v2i32_to_v2i64(<2 x i64> addrspace(3)* %out, <2 x i32> addrspace(3)* %in) #0 {
144 %ld = load <2 x i32>, <2 x i32> addrspace(3)* %in
145 %ext = zext <2 x i32> %ld to <2 x i64>
146 store <2 x i64> %ext, <2 x i64> addrspace(3)* %out
150 ; FUNC-LABEL: {{^}}local_sextload_v2i32_to_v2i64:
151 ; SICIVI: s_mov_b32 m0, -1
154 define amdgpu_kernel void @local_sextload_v2i32_to_v2i64(<2 x i64> addrspace(3)* %out, <2 x i32> addrspace(3)* %in) #0 {
155 %ld = load <2 x i32>, <2 x i32> addrspace(3)* %in
156 %ext = sext <2 x i32> %ld to <2 x i64>
157 store <2 x i64> %ext, <2 x i64> addrspace(3)* %out
161 ; FUNC-LABEL: {{^}}local_zextload_v4i32_to_v4i64:
162 ; SICIVI: s_mov_b32 m0, -1
165 define amdgpu_kernel void @local_zextload_v4i32_to_v4i64(<4 x i64> addrspace(3)* %out, <4 x i32> addrspace(3)* %in) #0 {
166 %ld = load <4 x i32>, <4 x i32> addrspace(3)* %in
167 %ext = zext <4 x i32> %ld to <4 x i64>
168 store <4 x i64> %ext, <4 x i64> addrspace(3)* %out
172 ; FUNC-LABEL: {{^}}local_sextload_v4i32_to_v4i64:
173 ; SICIVI: s_mov_b32 m0, -1
176 define amdgpu_kernel void @local_sextload_v4i32_to_v4i64(<4 x i64> addrspace(3)* %out, <4 x i32> addrspace(3)* %in) #0 {
177 %ld = load <4 x i32>, <4 x i32> addrspace(3)* %in
178 %ext = sext <4 x i32> %ld to <4 x i64>
179 store <4 x i64> %ext, <4 x i64> addrspace(3)* %out
183 ; Tests if ds_read/write_b128 gets generated for the 16 byte aligned load.
184 ; FUNC-LABEL: {{^}}local_v4i32_to_128:
186 ; SI-NOT: ds_read_b128
187 ; SI-NOT: ds_write_b128
190 ; CIVI: ds_write_b128
196 define amdgpu_kernel void @local_v4i32_to_128(<4 x i32> addrspace(3)* %out, <4 x i32> addrspace(3)* %in) {
197 %ld = load <4 x i32>, <4 x i32> addrspace(3)* %in, align 16
198 store <4 x i32> %ld, <4 x i32> addrspace(3)* %out, align 16
202 ; FUNC-LABEL: {{^}}local_zextload_v8i32_to_v8i64:
203 ; SICIVI: s_mov_b32 m0, -1
206 define amdgpu_kernel void @local_zextload_v8i32_to_v8i64(<8 x i64> addrspace(3)* %out, <8 x i32> addrspace(3)* %in) #0 {
207 %ld = load <8 x i32>, <8 x i32> addrspace(3)* %in
208 %ext = zext <8 x i32> %ld to <8 x i64>
209 store <8 x i64> %ext, <8 x i64> addrspace(3)* %out
213 ; FUNC-LABEL: {{^}}local_sextload_v8i32_to_v8i64:
214 ; SICIVI: s_mov_b32 m0, -1
217 define amdgpu_kernel void @local_sextload_v8i32_to_v8i64(<8 x i64> addrspace(3)* %out, <8 x i32> addrspace(3)* %in) #0 {
218 %ld = load <8 x i32>, <8 x i32> addrspace(3)* %in
219 %ext = sext <8 x i32> %ld to <8 x i64>
220 store <8 x i64> %ext, <8 x i64> addrspace(3)* %out
224 ; FUNC-LABEL: {{^}}local_sextload_v16i32_to_v16i64:
225 ; SICIVI: s_mov_b32 m0, -1
228 define amdgpu_kernel void @local_sextload_v16i32_to_v16i64(<16 x i64> addrspace(3)* %out, <16 x i32> addrspace(3)* %in) #0 {
229 %ld = load <16 x i32>, <16 x i32> addrspace(3)* %in
230 %ext = sext <16 x i32> %ld to <16 x i64>
231 store <16 x i64> %ext, <16 x i64> addrspace(3)* %out
235 ; FUNC-LABEL: {{^}}local_zextload_v16i32_to_v16i64
236 ; SICIVI: s_mov_b32 m0, -1
239 define amdgpu_kernel void @local_zextload_v16i32_to_v16i64(<16 x i64> addrspace(3)* %out, <16 x i32> addrspace(3)* %in) #0 {
240 %ld = load <16 x i32>, <16 x i32> addrspace(3)* %in
241 %ext = zext <16 x i32> %ld to <16 x i64>
242 store <16 x i64> %ext, <16 x i64> addrspace(3)* %out
246 ; FUNC-LABEL: {{^}}local_sextload_v32i32_to_v32i64:
247 ; SICIVI: s_mov_b32 m0, -1
250 define amdgpu_kernel void @local_sextload_v32i32_to_v32i64(<32 x i64> addrspace(3)* %out, <32 x i32> addrspace(3)* %in) #0 {
251 %ld = load <32 x i32>, <32 x i32> addrspace(3)* %in
252 %ext = sext <32 x i32> %ld to <32 x i64>
253 store <32 x i64> %ext, <32 x i64> addrspace(3)* %out
257 ; FUNC-LABEL: {{^}}local_zextload_v32i32_to_v32i64:
258 ; SICIVI: s_mov_b32 m0, -1
261 define amdgpu_kernel void @local_zextload_v32i32_to_v32i64(<32 x i64> addrspace(3)* %out, <32 x i32> addrspace(3)* %in) #0 {
262 %ld = load <32 x i32>, <32 x i32> addrspace(3)* %in
263 %ext = zext <32 x i32> %ld to <32 x i64>
264 store <32 x i64> %ext, <32 x i64> addrspace(3)* %out
268 ; FUNC-LABEL: {{^}}local_load_v32i32:
269 ; SICIVI: s_mov_b32 m0, -1
272 define amdgpu_kernel void @local_load_v32i32(<32 x i32> addrspace(3)* %out, <32 x i32> addrspace(3)* %in) #0 {
273 %ld = load <32 x i32>, <32 x i32> addrspace(3)* %in
274 store <32 x i32> %ld, <32 x i32> addrspace(3)* %out
278 attributes #0 = { nounwind }