1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2 ; RUN: opt < %s -passes=slp-vectorizer -S -mtriple=x86_64-apple-macosx10.8.0 -mcpu=corei7 | FileCheck %s
4 ; Check if the SLPVectorizer does not crash when handling
5 ; unreachable blocks with unscheduleable instructions.
7 target datalayout = "e-m:o-i64:64-f80:128-n8:16:32:64-S128"
8 target triple = "x86_64-apple-macosx10.9.0"
10 define void @foo(ptr nocapture %x) #0 {
13 ; CHECK-NEXT: br label [[BB2:%.*]]
15 ; CHECK-NEXT: [[BAD:%.*]] = fadd float [[BAD]], 0.000000e+00
16 ; CHECK-NEXT: br label [[BB2]]
18 ; CHECK-NEXT: [[TMP0:%.*]] = phi <4 x i32> [ poison, [[BB1:%.*]] ], [ splat (i32 2), [[ENTRY:%.*]] ]
19 ; CHECK-NEXT: store <4 x i32> [[TMP0]], ptr [[X:%.*]], align 4
20 ; CHECK-NEXT: ret void
25 bb1: ; an unreachable block
26 %t3 = getelementptr inbounds i32, ptr %x, i64 4
27 %t4 = load i32, ptr %t3, align 4
28 %t5 = getelementptr inbounds i32, ptr %x, i64 5
29 %t6 = load i32, ptr %t5, align 4
30 %bad = fadd float %bad, 0.000000e+00 ; <- an instruction with self dependency,
31 ; but legal in unreachable code
32 %t7 = getelementptr inbounds i32, ptr %x, i64 6
33 %t8 = load i32, ptr %t7, align 4
34 %t9 = getelementptr inbounds i32, ptr %x, i64 7
35 %t10 = load i32, ptr %t9, align 4
39 %t1.0 = phi i32 [ %t4, %bb1 ], [ 2, %entry ]
40 %t2.0 = phi i32 [ %t6, %bb1 ], [ 2, %entry ]
41 %t3.0 = phi i32 [ %t8, %bb1 ], [ 2, %entry ]
42 %t4.0 = phi i32 [ %t10, %bb1 ], [ 2, %entry ]
43 store i32 %t1.0, ptr %x, align 4
44 %t12 = getelementptr inbounds i32, ptr %x, i64 1
45 store i32 %t2.0, ptr %t12, align 4
46 %t13 = getelementptr inbounds i32, ptr %x, i64 2
47 store i32 %t3.0, ptr %t13, align 4
48 %t14 = getelementptr inbounds i32, ptr %x, i64 3
49 store i32 %t4.0, ptr %t14, align 4
56 ; CHECK-NEXT: [[TMP:%.*]] = load atomic ptr, ptr undef unordered, align 8
57 ; CHECK-NEXT: br label [[BB6:%.*]]
59 ; CHECK-NEXT: [[TMP4:%.*]] = load atomic ptr, ptr undef unordered, align 8
60 ; CHECK-NEXT: br label [[BB6]]
62 ; CHECK-NEXT: [[TMP7:%.*]] = phi ptr [ [[TMP]], [[BB5:%.*]] ], [ undef, [[BB:%.*]] ]
63 ; CHECK-NEXT: [[TMP8:%.*]] = phi ptr [ [[TMP4]], [[BB5]] ], [ undef, [[BB]] ]
64 ; CHECK-NEXT: ret void
67 %tmp = load atomic ptr, ptr undef unordered, align 8
70 bb5: ; No predecessors!
71 %tmp4 = load atomic ptr, ptr undef unordered, align 8
74 bb6: ; preds = %bb5, %bb
75 %tmp7 = phi ptr [ %tmp, %bb5 ], [ undef, %bb ]
76 %tmp8 = phi ptr [ %tmp4, %bb5 ], [ undef, %bb ]