1 ; NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 ; RUN: llc -verify-machineinstrs -stop-before=ppc-vsx-copy -vec-extabi \
3 ; RUN: -mcpu=pwr7 -mtriple powerpc-ibm-aix-xcoff < %s | \
6 ; Testing passing a vector <4 x i32> through ellipses of a variadic function.
7 define <4 x i32> @callee(i32 %count, ...) {
8 ; CHECK-LABEL: name: callee
10 ; CHECK: liveins: $r4, $r5, $r6, $r7, $r8, $r9, $r10
11 ; CHECK: [[COPY:%[0-9]+]]:gprc = COPY $r10
12 ; CHECK: [[COPY1:%[0-9]+]]:gprc = COPY $r9
13 ; CHECK: [[COPY2:%[0-9]+]]:gprc = COPY $r8
14 ; CHECK: [[COPY3:%[0-9]+]]:gprc = COPY $r7
15 ; CHECK: [[COPY4:%[0-9]+]]:gprc = COPY $r6
16 ; CHECK: [[COPY5:%[0-9]+]]:gprc = COPY $r5
17 ; CHECK: [[COPY6:%[0-9]+]]:gprc = COPY $r4
18 ; CHECK: STW [[COPY6]], 0, %fixed-stack.0 :: (store (s32) into %fixed-stack.0)
19 ; CHECK: STW [[COPY5]], 4, %fixed-stack.0 :: (store (s32) into %fixed-stack.0 + 4)
20 ; CHECK: STW [[COPY4]], 8, %fixed-stack.0 :: (store (s32))
21 ; CHECK: STW [[COPY3]], 12, %fixed-stack.0 :: (store (s32))
22 ; CHECK: STW [[COPY2]], 16, %fixed-stack.0 :: (store (s32))
23 ; CHECK: STW [[COPY1]], 20, %fixed-stack.0 :: (store (s32))
24 ; CHECK: STW [[COPY]], 24, %fixed-stack.0 :: (store (s32))
25 ; CHECK: LIFETIME_START %stack.0.arg_list
26 ; CHECK: [[ADDI:%[0-9]+]]:gprc = ADDI %fixed-stack.0, 0
27 ; CHECK: STW killed [[ADDI]], 0, %stack.0.arg_list :: (store (s32) into %ir.arg_list)
28 ; CHECK: [[ADDI1:%[0-9]+]]:gprc = ADDI %fixed-stack.0, 15
29 ; CHECK: [[RLWINM:%[0-9]+]]:gprc = RLWINM killed [[ADDI1]], 0, 0, 27
30 ; CHECK: [[LXVW4X:%[0-9]+]]:vsrc = LXVW4X $zero, killed [[RLWINM]] :: (load (s128) from %ir.argp.cur.aligned)
31 ; CHECK: LIFETIME_END %stack.0.arg_list
32 ; CHECK: $v2 = COPY [[LXVW4X]]
33 ; CHECK: BLR implicit $lr, implicit $rm, implicit $v2
35 %arg_list = alloca ptr, align 4
36 call void @llvm.lifetime.start.p0(i64 4, ptr nonnull %arg_list)
37 call void @llvm.va_start(ptr nonnull %arg_list)
38 %argp.cur = load ptr, ptr %arg_list, align 4
39 %0 = ptrtoint ptr %argp.cur to i32
42 %argp.cur.aligned = inttoptr i32 %2 to ptr
43 %argp.next = getelementptr inbounds i8, ptr %argp.cur.aligned, i32 16
44 store ptr %argp.next, ptr %arg_list, align 4
45 %3 = inttoptr i32 %2 to ptr
46 %4 = load <4 x i32>, ptr %3, align 16
47 call void @llvm.va_end(ptr nonnull %arg_list)
48 call void @llvm.lifetime.end.p0(i64 4, ptr nonnull %arg_list)
52 declare void @llvm.lifetime.start.p0(i64 immarg, ptr nocapture)
54 declare void @llvm.va_start(ptr)
56 declare void @llvm.va_end(ptr)
58 declare void @llvm.lifetime.end.p0(i64 immarg, ptr nocapture)