1 ; RUN: opt < %s -mtriple=x86_64-unknown-unknown -mattr=+amx-int8 -mattr=+avx512f -lower-amx-type -S | FileCheck %s
3 @buf = dso_local global [1024 x i8] zeroinitializer, align 16
4 @buf2 = dso_local global [1024 x i8] zeroinitializer, align 16
6 ; Function Attrs: nounwind uwtable
7 define dso_local void @test_api(i32 %cond, i16 signext %row, i16 signext %col) local_unnamed_addr {
10 ; CHECK: %{{[0-9]+}} = alloca <256 x i32>, align 1024
11 ; CHECK-NEXT: %{{[0-9]+}} = alloca <256 x i32>, align 1024
12 ; CHECK-NEXT: %{{[0-9]+}} = alloca <256 x i32>, align 1024
13 ; CHECK-NEXT: %{{[0-9]+}} = alloca <256 x i32>, align 1024
14 ; CHECK-NEXT: %tobool.not = icmp eq i32 %cond, 0
15 ; CHECK-NEXT: br i1 %tobool.not, label %if.else, label %if.then
17 ; CHECK-NEXT: %{{[0-9]+}} = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 8, ptr @buf, i64 32)
18 ; CHECK-NEXT: call void @llvm.x86.tilestored64.internal(i16 %row, i16 8, ptr %{{[0-9]+}}, i64 64, x86_amx %{{[0-9]+}})
19 ; CHECK-NEXT: %{{[0-9]+}} = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 8, i16 %col, ptr @buf, i64 32)
20 ; CHECK-NEXT: call void @llvm.x86.tilestored64.internal(i16 8, i16 %col, ptr %{{[0-9]+}}, i64 64, x86_amx %{{[0-9]+}})
21 ; CHECK-NEXT: %{{[0-9]+}} = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 %col, ptr @buf, i64 32)
22 ; CHECK-NEXT: call void @llvm.x86.tilestored64.internal(i16 %row, i16 %col, ptr %{{[0-9]+}}, i64 64, x86_amx %{{[0-9]+}})
23 ; CHECK-NEXT: br label %if.end
25 ; CHECK-NEXT: %{{[0-9]+}} = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 8, ptr @buf2, i64 32)
26 ; CHECK-NEXT: call void @llvm.x86.tilestored64.internal(i16 %row, i16 8, ptr %{{[0-9]+}}, i64 64, x86_amx %{{[0-9]+}})
27 ; CHECK-NEXT: %{{[0-9]+}} = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 8, i16 %col, ptr @buf2, i64 32)
28 ; CHECK-NEXT: call void @llvm.x86.tilestored64.internal(i16 8, i16 %col, ptr %{{[0-9]+}}, i64 64, x86_amx %{{[0-9]+}})
29 ; CHECK-NEXT: %{{[0-9]+}} = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 %col, ptr @buf2, i64 32)
30 ; CHECK-NEXT: call void @llvm.x86.tilestored64.internal(i16 %row, i16 %col, ptr %{{[0-9]+}}, i64 64, x86_amx %{{[0-9]+}})
31 ; CHECK-NEXT: br label %if.end
33 ; CHECK-NEXT: %{{[0-9]+}} = call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 8, ptr %{{[0-9]+}}, i64 64)
34 ; CHECK-NEXT: %{{[0-9]+}} = call x86_amx @llvm.x86.tileloadd64.internal(i16 8, i16 %col, ptr %{{[0-9]+}}, i64 64)
35 ; CHECK-NEXT: %{{[0-9]+}} = call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 %col, ptr %{{[0-9]+}}, i64 64)
36 ; CHECK-NEXT: %{{[0-9]+}} = tail call x86_amx @llvm.x86.tdpbssd.internal(i16 %row, i16 %col, i16 8, x86_amx %{{[0-9]+}}, x86_amx %{{[0-9]+}}, x86_amx %{{[0-9]+}})
37 ; CHECK-NEXT: call void @llvm.x86.tilestored64.internal(i16 %row, i16 %col, ptr %{{[0-9]+}}, i64 64, x86_amx %{{[0-9]+}})
38 ; CHECK-NEXT: %{{[0-9]+}} = call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 %col, ptr %{{[0-9]+}}, i64 64)
39 ; CHECK-NEXT: tail call void @llvm.x86.tilestored64.internal(i16 %row, i16 %col, ptr @buf, i64 32, x86_amx %{{[0-9]+}})
40 ; CHECK-NEXT: ret void
43 %tobool.not = icmp eq i32 %cond, 0
44 br i1 %tobool.not, label %if.else, label %if.then
46 if.then: ; preds = %entry
47 %0 = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 8, ptr @buf, i64 32)
48 %1 = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 8, i16 %col, ptr @buf, i64 32)
49 %2 = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 %col, ptr @buf, i64 32)
52 if.else: ; preds = %entry
53 %3 = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 8, ptr @buf2, i64 32)
54 %4 = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 8, i16 %col, ptr @buf2, i64 32)
55 %5 = tail call x86_amx @llvm.x86.tileloadd64.internal(i16 %row, i16 %col, ptr @buf2, i64 32)
58 if.end: ; preds = %if.else, %if.then
59 %a.sroa.1094.0.in = phi x86_amx [ %3, %if.else ], [ %0, %if.then ]
60 %b.sroa.1069.0.in = phi x86_amx [ %4, %if.else ], [ %1, %if.then ]
61 %c.sroa.1044.0.in = phi x86_amx [ %5, %if.else ], [ %2, %if.then ]
62 %6 = tail call x86_amx @llvm.x86.tdpbssd.internal(i16 %row, i16 %col, i16 8, x86_amx %c.sroa.1044.0.in, x86_amx %a.sroa.1094.0.in, x86_amx %b.sroa.1069.0.in)
63 tail call void @llvm.x86.tilestored64.internal(i16 %row, i16 %col, ptr @buf, i64 32, x86_amx %6)
67 ; Function Attrs: nounwind
68 declare x86_amx @llvm.x86.tileloadd64.internal(i16, i16, ptr, i64)
70 ; Function Attrs: nounwind
71 declare x86_amx @llvm.x86.tdpbssd.internal(i16, i16, i16, x86_amx, x86_amx, x86_amx)
73 ; Function Attrs: nounwind
74 declare void @llvm.x86.tilestored64.internal(i16, i16, ptr, i64, x86_amx)