1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i686-- | FileCheck %s --check-prefixes=X86
3 ; RUN: llc < %s -mtriple=x86_64-- | FileCheck %s --check-prefixes=X64
6 ; fixed avg(x,y) = add(and(x,y),lshr(xor(x,y),1))
8 ; ext avg(x,y) = trunc(lshr(add(zext(x),zext(y)),1))
11 define i8 @test_fixed_i8(i8 %a0, i8 %a1) nounwind {
12 ; X86-LABEL: test_fixed_i8:
14 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx
15 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
16 ; X86-NEXT: addl %ecx, %eax
18 ; X86-NEXT: # kill: def $al killed $al killed $eax
21 ; X64-LABEL: test_fixed_i8:
23 ; X64-NEXT: movzbl %sil, %ecx
24 ; X64-NEXT: movzbl %dil, %eax
25 ; X64-NEXT: addl %ecx, %eax
27 ; X64-NEXT: # kill: def $al killed $al killed $eax
29 %and = and i8 %a0, %a1
30 %xor = xor i8 %a0, %a1
31 %shift = lshr i8 %xor, 1
32 %res = add i8 %and, %shift
36 define i8 @test_ext_i8(i8 %a0, i8 %a1) nounwind {
37 ; X86-LABEL: test_ext_i8:
39 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx
40 ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
41 ; X86-NEXT: addl %ecx, %eax
43 ; X86-NEXT: # kill: def $al killed $al killed $eax
46 ; X64-LABEL: test_ext_i8:
48 ; X64-NEXT: movzbl %sil, %ecx
49 ; X64-NEXT: movzbl %dil, %eax
50 ; X64-NEXT: addl %ecx, %eax
52 ; X64-NEXT: # kill: def $al killed $al killed $eax
54 %x0 = zext i8 %a0 to i16
55 %x1 = zext i8 %a1 to i16
56 %sum = add i16 %x0, %x1
57 %shift = lshr i16 %sum, 1
58 %res = trunc i16 %shift to i8
62 define i16 @test_fixed_i16(i16 %a0, i16 %a1) nounwind {
63 ; X86-LABEL: test_fixed_i16:
65 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
66 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
67 ; X86-NEXT: addl %ecx, %eax
69 ; X86-NEXT: # kill: def $ax killed $ax killed $eax
72 ; X64-LABEL: test_fixed_i16:
74 ; X64-NEXT: movzwl %si, %ecx
75 ; X64-NEXT: movzwl %di, %eax
76 ; X64-NEXT: addl %ecx, %eax
78 ; X64-NEXT: # kill: def $ax killed $ax killed $eax
80 %and = and i16 %a0, %a1
81 %xor = xor i16 %a0, %a1
82 %shift = lshr i16 %xor, 1
83 %res = add i16 %and, %shift
87 define i16 @test_ext_i16(i16 %a0, i16 %a1) nounwind {
88 ; X86-LABEL: test_ext_i16:
90 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
91 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
92 ; X86-NEXT: addl %ecx, %eax
94 ; X86-NEXT: # kill: def $ax killed $ax killed $eax
97 ; X64-LABEL: test_ext_i16:
99 ; X64-NEXT: movzwl %si, %ecx
100 ; X64-NEXT: movzwl %di, %eax
101 ; X64-NEXT: addl %ecx, %eax
102 ; X64-NEXT: shrl %eax
103 ; X64-NEXT: # kill: def $ax killed $ax killed $eax
105 %x0 = zext i16 %a0 to i32
106 %x1 = zext i16 %a1 to i32
107 %sum = add i32 %x0, %x1
108 %shift = lshr i32 %sum, 1
109 %res = trunc i32 %shift to i16
113 define i32 @test_fixed_i32(i32 %a0, i32 %a1) nounwind {
114 ; X86-LABEL: test_fixed_i32:
116 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
117 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
118 ; X86-NEXT: movl %eax, %edx
119 ; X86-NEXT: andl %ecx, %edx
120 ; X86-NEXT: xorl %ecx, %eax
121 ; X86-NEXT: shrl %eax
122 ; X86-NEXT: addl %edx, %eax
125 ; X64-LABEL: test_fixed_i32:
127 ; X64-NEXT: movl %esi, %ecx
128 ; X64-NEXT: movl %edi, %eax
129 ; X64-NEXT: addq %rcx, %rax
130 ; X64-NEXT: shrq %rax
131 ; X64-NEXT: # kill: def $eax killed $eax killed $rax
133 %and = and i32 %a0, %a1
134 %xor = xor i32 %a1, %a0
135 %shift = lshr i32 %xor, 1
136 %res = add i32 %and, %shift
140 define i32 @test_ext_i32(i32 %a0, i32 %a1) nounwind {
141 ; X86-LABEL: test_ext_i32:
143 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
144 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
145 ; X86-NEXT: movl %eax, %edx
146 ; X86-NEXT: andl %ecx, %edx
147 ; X86-NEXT: xorl %ecx, %eax
148 ; X86-NEXT: shrl %eax
149 ; X86-NEXT: addl %edx, %eax
152 ; X64-LABEL: test_ext_i32:
154 ; X64-NEXT: movl %esi, %ecx
155 ; X64-NEXT: movl %edi, %eax
156 ; X64-NEXT: addq %rcx, %rax
157 ; X64-NEXT: shrq %rax
158 ; X64-NEXT: # kill: def $eax killed $eax killed $rax
160 %x0 = zext i32 %a0 to i64
161 %x1 = zext i32 %a1 to i64
162 %sum = add i64 %x0, %x1
163 %shift = lshr i64 %sum, 1
164 %res = trunc i64 %shift to i32
168 define i64 @test_fixed_i64(i64 %a0, i64 %a1) nounwind {
169 ; X86-LABEL: test_fixed_i64:
171 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
172 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
173 ; X86-NEXT: addl {{[0-9]+}}(%esp), %ecx
174 ; X86-NEXT: adcl {{[0-9]+}}(%esp), %eax
176 ; X86-NEXT: movzbl %dl, %edx
177 ; X86-NEXT: shldl $31, %eax, %edx
178 ; X86-NEXT: shldl $31, %ecx, %eax
181 ; X64-LABEL: test_fixed_i64:
183 ; X64-NEXT: movq %rdi, %rax
184 ; X64-NEXT: andq %rsi, %rax
185 ; X64-NEXT: xorq %rsi, %rdi
186 ; X64-NEXT: shrq %rdi
187 ; X64-NEXT: addq %rdi, %rax
189 %and = and i64 %a0, %a1
190 %xor = xor i64 %a1, %a0
191 %shift = lshr i64 %xor, 1
192 %res = add i64 %and, %shift
196 define i64 @test_ext_i64(i64 %a0, i64 %a1) nounwind {
197 ; X86-LABEL: test_ext_i64:
199 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
200 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
201 ; X86-NEXT: addl {{[0-9]+}}(%esp), %ecx
202 ; X86-NEXT: adcl {{[0-9]+}}(%esp), %eax
204 ; X86-NEXT: movzbl %dl, %edx
205 ; X86-NEXT: shldl $31, %eax, %edx
206 ; X86-NEXT: shldl $31, %ecx, %eax
209 ; X64-LABEL: test_ext_i64:
211 ; X64-NEXT: movq %rdi, %rax
212 ; X64-NEXT: andq %rsi, %rax
213 ; X64-NEXT: xorq %rsi, %rdi
214 ; X64-NEXT: shrq %rdi
215 ; X64-NEXT: addq %rdi, %rax
217 %x0 = zext i64 %a0 to i128
218 %x1 = zext i64 %a1 to i128
219 %sum = add i128 %x0, %x1
220 %shift = lshr i128 %sum, 1
221 %res = trunc i128 %shift to i64