1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2 ; RUN: opt -passes=gvn -S < %s | FileCheck %s
4 ; Check that in both cases the second load is recognized as redundant
7 define <128 x i8> @f0(ptr %a0, <128 x i8> %a1, <128 x i8> %a2) {
9 ; CHECK-NEXT: [[V0:%.*]] = icmp eq <128 x i8> [[A1:%.*]], [[A2:%.*]]
10 ; CHECK-NEXT: [[V1:%.*]] = call <128 x i8> @llvm.masked.load.v128i8.p0(ptr [[A0:%.*]], i32 4, <128 x i1> [[V0]], <128 x i8> undef)
11 ; CHECK-NEXT: [[V3:%.*]] = add <128 x i8> [[V1]], [[V1]]
12 ; CHECK-NEXT: ret <128 x i8> [[V3]]
14 %v0 = icmp eq <128 x i8> %a1, %a2
15 %v1 = call <128 x i8> @llvm.masked.load.v128i8.p0(ptr %a0, i32 4, <128 x i1> %v0, <128 x i8> undef)
16 %v2 = call <128 x i8> @llvm.masked.load.v128i8.p0(ptr %a0, i32 4, <128 x i1> %v0, <128 x i8> undef)
17 %v3 = add <128 x i8> %v1, %v2
21 define <128 x i8> @f1(ptr %a0, <128 x i8> %a1, <128 x i8> %a2) {
23 ; CHECK-NEXT: [[V0:%.*]] = icmp eq <128 x i8> [[A1:%.*]], [[A2:%.*]]
24 ; CHECK-NEXT: [[V1:%.*]] = getelementptr <128 x i8>, ptr [[A0:%.*]], i32 1
25 ; CHECK-NEXT: [[V2:%.*]] = call <128 x i8> @llvm.masked.load.v128i8.p0(ptr [[A0]], i32 4, <128 x i1> [[V0]], <128 x i8> undef)
26 ; CHECK-NEXT: call void @llvm.masked.store.v128i8.p0(<128 x i8> [[A2]], ptr [[V1]], i32 4, <128 x i1> [[V0]])
27 ; CHECK-NEXT: [[V4:%.*]] = add <128 x i8> [[V2]], [[V2]]
28 ; CHECK-NEXT: ret <128 x i8> [[V4]]
30 %v0 = icmp eq <128 x i8> %a1, %a2
31 %v1 = getelementptr <128 x i8>, ptr %a0, i32 1
32 %v2 = call <128 x i8> @llvm.masked.load.v128i8.p0(ptr %a0, i32 4, <128 x i1> %v0, <128 x i8> undef)
33 call void @llvm.masked.store.v128i8.p0(<128 x i8> %a2, ptr %v1, i32 4, <128 x i1> %v0)
34 %v3 = call <128 x i8> @llvm.masked.load.v128i8.p0(ptr %a0, i32 4, <128 x i1> %v0, <128 x i8> undef)
35 %v4 = add <128 x i8> %v2, %v3
39 declare <128 x i8> @llvm.masked.load.v128i8.p0(ptr, i32, <128 x i1>, <128 x i8>)
40 declare void @llvm.masked.store.v128i8.p0(<128 x i8>, ptr, i32, <128 x i1>)