1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=riscv64 -mattr=+v -stop-after=prologepilog %s -o - 2>&1 | FileCheck %s
5 target datalayout = "e-m:e-p:64:64-i64:64-i128:128-n64-S128"
6 target triple = "riscv64"
8 define void @zvlsseg_spill(ptr %base, i64 %vl) {
14 tracksRegLiveness: true
16 - { id: 0, offset: 0, size: 64, alignment: 8, stack-id: scalable-vector }
21 ; CHECK-LABEL: name: zvlsseg_spill
22 ; CHECK: liveins: $x10, $x11
24 ; CHECK-NEXT: $x2 = frame-setup ADDI $x2, -16
25 ; CHECK-NEXT: frame-setup CFI_INSTRUCTION def_cfa_offset 16
26 ; CHECK-NEXT: $x12 = frame-setup PseudoReadVLENB
27 ; CHECK-NEXT: $x12 = frame-setup SLLI killed $x12, 3
28 ; CHECK-NEXT: $x2 = frame-setup SUB $x2, killed $x12
29 ; CHECK-NEXT: frame-setup CFI_INSTRUCTION escape 0x0f, 0x0d, 0x72, 0x00, 0x11, 0x10, 0x22, 0x11, 0x08, 0x92, 0xa2, 0x38, 0x00, 0x1e, 0x22
30 ; CHECK-NEXT: dead $x0 = PseudoVSETVLI killed renamable $x11, 216 /* e64, m1, ta, ma */, implicit-def $vl, implicit-def $vtype
31 ; CHECK-NEXT: $v0_v1_v2_v3_v4_v5_v6 = PseudoVLSEG7E64_V_M1 undef $v0_v1_v2_v3_v4_v5_v6, renamable $x10, $noreg, 6 /* e64 */, 0 /* tu, mu */, implicit $vl, implicit $vtype
32 ; CHECK-NEXT: $x11 = ADDI $x2, 16
33 ; CHECK-NEXT: $x12 = PseudoReadVLENB
34 ; CHECK-NEXT: VS1R_V $v0, $x11, implicit $v0_v1_v2_v3_v4_v5_v6 :: (store unknown-size into %stack.0, align 8)
35 ; CHECK-NEXT: $x11 = ADD killed $x11, $x12
36 ; CHECK-NEXT: VS1R_V $v1, $x11, implicit $v0_v1_v2_v3_v4_v5_v6 :: (store unknown-size into %stack.0, align 8)
37 ; CHECK-NEXT: $x11 = ADD killed $x11, $x12
38 ; CHECK-NEXT: VS1R_V $v2, $x11, implicit $v0_v1_v2_v3_v4_v5_v6 :: (store unknown-size into %stack.0, align 8)
39 ; CHECK-NEXT: $x11 = ADD killed $x11, $x12
40 ; CHECK-NEXT: VS1R_V $v3, $x11, implicit $v0_v1_v2_v3_v4_v5_v6 :: (store unknown-size into %stack.0, align 8)
41 ; CHECK-NEXT: $x11 = ADD killed $x11, $x12
42 ; CHECK-NEXT: VS1R_V $v4, $x11, implicit $v0_v1_v2_v3_v4_v5_v6 :: (store unknown-size into %stack.0, align 8)
43 ; CHECK-NEXT: $x11 = ADD killed $x11, $x12
44 ; CHECK-NEXT: VS1R_V $v5, $x11, implicit $v0_v1_v2_v3_v4_v5_v6 :: (store unknown-size into %stack.0, align 8)
45 ; CHECK-NEXT: $x11 = ADD killed $x11, killed $x12
46 ; CHECK-NEXT: VS1R_V $v6, killed $x11, implicit $v0_v1_v2_v3_v4_v5_v6 :: (store unknown-size into %stack.0, align 8)
47 ; CHECK-NEXT: $x11 = ADDI $x2, 16
48 ; CHECK-NEXT: $x12 = PseudoReadVLENB
49 ; CHECK-NEXT: $v7 = VL1RE8_V $x11 :: (load unknown-size from %stack.0, align 8)
50 ; CHECK-NEXT: $x11 = ADD killed $x11, $x12
51 ; CHECK-NEXT: $v8 = VL1RE8_V $x11 :: (load unknown-size from %stack.0, align 8)
52 ; CHECK-NEXT: $x11 = ADD killed $x11, $x12
53 ; CHECK-NEXT: $v9 = VL1RE8_V $x11 :: (load unknown-size from %stack.0, align 8)
54 ; CHECK-NEXT: $x11 = ADD killed $x11, $x12
55 ; CHECK-NEXT: $v10 = VL1RE8_V $x11 :: (load unknown-size from %stack.0, align 8)
56 ; CHECK-NEXT: $x11 = ADD killed $x11, $x12
57 ; CHECK-NEXT: $v11 = VL1RE8_V $x11 :: (load unknown-size from %stack.0, align 8)
58 ; CHECK-NEXT: $x11 = ADD killed $x11, $x12
59 ; CHECK-NEXT: $v12 = VL1RE8_V $x11 :: (load unknown-size from %stack.0, align 8)
60 ; CHECK-NEXT: $x11 = ADD killed $x11, killed $x12
61 ; CHECK-NEXT: $v13 = VL1RE8_V killed $x11 :: (load unknown-size from %stack.0, align 8)
62 ; CHECK-NEXT: VS1R_V killed $v8, killed renamable $x10
63 ; CHECK-NEXT: $x10 = frame-destroy PseudoReadVLENB
64 ; CHECK-NEXT: $x10 = frame-destroy SLLI killed $x10, 3
65 ; CHECK-NEXT: $x2 = frame-destroy ADD $x2, killed $x10
66 ; CHECK-NEXT: $x2 = frame-destroy ADDI $x2, 16
67 ; CHECK-NEXT: PseudoRET
69 %1:gprnox0 = COPY $x11
70 $v0_v1_v2_v3_v4_v5_v6 = PseudoVLSEG7E64_V_M1 undef $v0_v1_v2_v3_v4_v5_v6, %0, %1, 6, 0
71 PseudoVSPILL7_M1 killed renamable $v0_v1_v2_v3_v4_v5_v6, %stack.0 :: (store unknown-size into %stack.0, align 8)
72 renamable $v7_v8_v9_v10_v11_v12_v13 = PseudoVRELOAD7_M1 %stack.0 :: (load unknown-size from %stack.0, align 8)
73 VS1R_V killed $v8, %0:gpr