1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=riscv32 -mattr=+v -run-pass=legalizer %s -o - | FileCheck --check-prefix=HasF64 %s
3 # RUN: llc -mtriple=riscv32 -mattr=+Zve64x -run-pass=legalizer %s -o - | FileCheck --check-prefix=NoF64 %s
6 name: splatvector_nxv1i64
8 tracksRegLiveness: true
11 ; HasF64-LABEL: name: splatvector_nxv1i64
12 ; HasF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
13 ; HasF64-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[DEF]](s32), [[DEF]](s32)
14 ; HasF64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
15 ; HasF64-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 1 x s64>)
16 ; HasF64-NEXT: PseudoRET implicit $v8
18 ; NoF64-LABEL: name: splatvector_nxv1i64
19 ; NoF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
20 ; NoF64-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
21 ; NoF64-NEXT: [[VMSET_VL:%[0-9]+]]:_(<vscale x 1 x s1>) = G_VMSET_VL [[C]](s32)
22 ; NoF64-NEXT: [[DEF1:%[0-9]+]]:_(<vscale x 1 x s64>) = G_IMPLICIT_DEF
23 ; NoF64-NEXT: [[SPLAT_VECTOR_SPLIT_I64_VL:%[0-9]+]]:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR_SPLIT_I64_VL [[DEF1]], [[DEF]](s32), [[DEF]], [[C]](s32)
24 ; NoF64-NEXT: $v8 = COPY [[SPLAT_VECTOR_SPLIT_I64_VL]](<vscale x 1 x s64>)
25 ; NoF64-NEXT: PseudoRET implicit $v8
26 %0:_(s64) = G_IMPLICIT_DEF
27 %1:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR %0(s64)
28 $v8 = COPY %1(<vscale x 1 x s64>)
29 PseudoRET implicit $v8
33 name: splatvector_nxv2i64
35 tracksRegLiveness: true
38 ; HasF64-LABEL: name: splatvector_nxv2i64
39 ; HasF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
40 ; HasF64-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[DEF]](s32), [[DEF]](s32)
41 ; HasF64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
42 ; HasF64-NEXT: $v8m2 = COPY [[SPLAT_VECTOR]](<vscale x 2 x s64>)
43 ; HasF64-NEXT: PseudoRET implicit $v8m2
45 ; NoF64-LABEL: name: splatvector_nxv2i64
46 ; NoF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
47 ; NoF64-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
48 ; NoF64-NEXT: [[VMSET_VL:%[0-9]+]]:_(<vscale x 2 x s1>) = G_VMSET_VL [[C]](s32)
49 ; NoF64-NEXT: [[DEF1:%[0-9]+]]:_(<vscale x 2 x s64>) = G_IMPLICIT_DEF
50 ; NoF64-NEXT: [[SPLAT_VECTOR_SPLIT_I64_VL:%[0-9]+]]:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR_SPLIT_I64_VL [[DEF1]], [[DEF]](s32), [[DEF]], [[C]](s32)
51 ; NoF64-NEXT: $v8m2 = COPY [[SPLAT_VECTOR_SPLIT_I64_VL]](<vscale x 2 x s64>)
52 ; NoF64-NEXT: PseudoRET implicit $v8m2
53 %0:_(s64) = G_IMPLICIT_DEF
54 %1:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR %0(s64)
55 $v8m2 = COPY %1(<vscale x 2 x s64>)
56 PseudoRET implicit $v8m2
60 name: splatvector_nxv4i64
62 tracksRegLiveness: true
65 ; HasF64-LABEL: name: splatvector_nxv4i64
66 ; HasF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
67 ; HasF64-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[DEF]](s32), [[DEF]](s32)
68 ; HasF64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
69 ; HasF64-NEXT: $v8m4 = COPY [[SPLAT_VECTOR]](<vscale x 4 x s64>)
70 ; HasF64-NEXT: PseudoRET implicit $v8m4
72 ; NoF64-LABEL: name: splatvector_nxv4i64
73 ; NoF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
74 ; NoF64-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
75 ; NoF64-NEXT: [[VMSET_VL:%[0-9]+]]:_(<vscale x 4 x s1>) = G_VMSET_VL [[C]](s32)
76 ; NoF64-NEXT: [[DEF1:%[0-9]+]]:_(<vscale x 4 x s64>) = G_IMPLICIT_DEF
77 ; NoF64-NEXT: [[SPLAT_VECTOR_SPLIT_I64_VL:%[0-9]+]]:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR_SPLIT_I64_VL [[DEF1]], [[DEF]](s32), [[DEF]], [[C]](s32)
78 ; NoF64-NEXT: $v8m4 = COPY [[SPLAT_VECTOR_SPLIT_I64_VL]](<vscale x 4 x s64>)
79 ; NoF64-NEXT: PseudoRET implicit $v8m4
80 %0:_(s64) = G_IMPLICIT_DEF
81 %1:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR %0(s64)
82 $v8m4 = COPY %1(<vscale x 4 x s64>)
83 PseudoRET implicit $v8m4
87 name: splatvector_nxv8i64
89 tracksRegLiveness: true
92 ; HasF64-LABEL: name: splatvector_nxv8i64
93 ; HasF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
94 ; HasF64-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[DEF]](s32), [[DEF]](s32)
95 ; HasF64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
96 ; HasF64-NEXT: $v8m8 = COPY [[SPLAT_VECTOR]](<vscale x 8 x s64>)
97 ; HasF64-NEXT: PseudoRET implicit $v8m8
99 ; NoF64-LABEL: name: splatvector_nxv8i64
100 ; NoF64: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
101 ; NoF64-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
102 ; NoF64-NEXT: [[VMSET_VL:%[0-9]+]]:_(<vscale x 8 x s1>) = G_VMSET_VL [[C]](s32)
103 ; NoF64-NEXT: [[DEF1:%[0-9]+]]:_(<vscale x 8 x s64>) = G_IMPLICIT_DEF
104 ; NoF64-NEXT: [[SPLAT_VECTOR_SPLIT_I64_VL:%[0-9]+]]:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR_SPLIT_I64_VL [[DEF1]], [[DEF]](s32), [[DEF]], [[C]](s32)
105 ; NoF64-NEXT: $v8m8 = COPY [[SPLAT_VECTOR_SPLIT_I64_VL]](<vscale x 8 x s64>)
106 ; NoF64-NEXT: PseudoRET implicit $v8m8
107 %0:_(s64) = G_IMPLICIT_DEF
108 %1:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR %0(s64)
109 $v8m8 = COPY %1(<vscale x 8 x s64>)
110 PseudoRET implicit $v8m8