1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i386-unknown-unknown | FileCheck %s --check-prefix=X32
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefix=X64
5 define i32 @t1(i32 %t, i32 %val) nounwind {
8 ; X32-NEXT: movb {{[0-9]+}}(%esp), %cl
9 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
10 ; X32-NEXT: shll %cl, %eax
15 ; X64-NEXT: movl %esi, %eax
16 ; X64-NEXT: movl %edi, %ecx
17 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
18 ; X64-NEXT: shll %cl, %eax
20 %shamt = and i32 %t, 31
21 %res = shl i32 %val, %shamt
25 define i32 @t2(i32 %t, i32 %val) nounwind {
28 ; X32-NEXT: movb {{[0-9]+}}(%esp), %cl
29 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
30 ; X32-NEXT: shll %cl, %eax
35 ; X64-NEXT: movl %esi, %eax
36 ; X64-NEXT: movl %edi, %ecx
37 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
38 ; X64-NEXT: shll %cl, %eax
40 %shamt = and i32 %t, 63
41 %res = shl i32 %val, %shamt
45 @X = internal global i16 0
47 define void @t3(i16 %t) nounwind {
50 ; X32-NEXT: movb {{[0-9]+}}(%esp), %cl
51 ; X32-NEXT: sarw %cl, X
56 ; X64-NEXT: movl %edi, %ecx
57 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
58 ; X64-NEXT: sarw %cl, {{.*}}(%rip)
60 %shamt = and i16 %t, 31
61 %tmp = load i16, i16* @X
62 %tmp1 = ashr i16 %tmp, %shamt
63 store i16 %tmp1, i16* @X
67 define i64 @t4(i64 %t, i64 %val) nounwind {
70 ; X32-NEXT: pushl %esi
71 ; X32-NEXT: movb {{[0-9]+}}(%esp), %cl
72 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
73 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
74 ; X32-NEXT: movl %esi, %edx
75 ; X32-NEXT: shrl %cl, %edx
76 ; X32-NEXT: shrdl %cl, %esi, %eax
77 ; X32-NEXT: testb $32, %cl
78 ; X32-NEXT: je .LBB3_2
80 ; X32-NEXT: movl %edx, %eax
81 ; X32-NEXT: xorl %edx, %edx
88 ; X64-NEXT: movq %rsi, %rax
89 ; X64-NEXT: movq %rdi, %rcx
90 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
91 ; X64-NEXT: shrq %cl, %rax
93 %shamt = and i64 %t, 63
94 %res = lshr i64 %val, %shamt
98 define i64 @t5(i64 %t, i64 %val) nounwind {
101 ; X32-NEXT: pushl %esi
102 ; X32-NEXT: movb {{[0-9]+}}(%esp), %cl
103 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
104 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
105 ; X32-NEXT: movl %esi, %edx
106 ; X32-NEXT: shrl %cl, %edx
107 ; X32-NEXT: shrdl %cl, %esi, %eax
108 ; X32-NEXT: testb $32, %cl
109 ; X32-NEXT: je .LBB4_2
111 ; X32-NEXT: movl %edx, %eax
112 ; X32-NEXT: xorl %edx, %edx
114 ; X32-NEXT: popl %esi
119 ; X64-NEXT: movq %rsi, %rax
120 ; X64-NEXT: movq %rdi, %rcx
121 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
122 ; X64-NEXT: shrq %cl, %rax
124 %shamt = and i64 %t, 191
125 %res = lshr i64 %val, %shamt
129 define void @t5ptr(i64 %t, i64* %ptr) nounwind {
132 ; X32-NEXT: pushl %edi
133 ; X32-NEXT: pushl %esi
134 ; X32-NEXT: movb {{[0-9]+}}(%esp), %cl
135 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
136 ; X32-NEXT: movl (%eax), %edx
137 ; X32-NEXT: movl 4(%eax), %edi
138 ; X32-NEXT: movl %edi, %esi
139 ; X32-NEXT: shrl %cl, %esi
140 ; X32-NEXT: shrdl %cl, %edi, %edx
141 ; X32-NEXT: testb $32, %cl
142 ; X32-NEXT: je .LBB5_2
144 ; X32-NEXT: movl %esi, %edx
145 ; X32-NEXT: xorl %esi, %esi
147 ; X32-NEXT: movl %edx, (%eax)
148 ; X32-NEXT: movl %esi, 4(%eax)
149 ; X32-NEXT: popl %esi
150 ; X32-NEXT: popl %edi
155 ; X64-NEXT: movq %rdi, %rcx
156 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
157 ; X64-NEXT: shrq %cl, (%rsi)
159 %shamt = and i64 %t, 191
160 %tmp = load i64, i64* %ptr
161 %tmp1 = lshr i64 %tmp, %shamt
162 store i64 %tmp1, i64* %ptr
168 define i64 @t6(i64 %key, i64* nocapture %val) nounwind {
171 ; X32-NEXT: pushl %edi
172 ; X32-NEXT: pushl %esi
173 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
174 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
175 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
176 ; X32-NEXT: shrdl $3, %eax, %esi
177 ; X32-NEXT: movl %eax, %edi
178 ; X32-NEXT: shrl $3, %edi
179 ; X32-NEXT: movl (%ecx), %eax
180 ; X32-NEXT: movl 4(%ecx), %edx
181 ; X32-NEXT: addl $-1, %eax
182 ; X32-NEXT: adcl $-1, %edx
183 ; X32-NEXT: andl %esi, %eax
184 ; X32-NEXT: andl %edi, %edx
185 ; X32-NEXT: popl %esi
186 ; X32-NEXT: popl %edi
191 ; X64-NEXT: shrq $3, %rdi
192 ; X64-NEXT: movq (%rsi), %rax
193 ; X64-NEXT: decq %rax
194 ; X64-NEXT: andq %rdi, %rax
196 %shr = lshr i64 %key, 3
197 %1 = load i64, i64* %val, align 8
198 %sub = add i64 %1, 2305843009213693951
199 %and = and i64 %sub, %shr
203 define i64 @big_mask_constant(i64 %x) nounwind {
204 ; X32-LABEL: big_mask_constant:
206 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
207 ; X32-NEXT: andl $4, %eax
208 ; X32-NEXT: shll $25, %eax
209 ; X32-NEXT: xorl %edx, %edx
212 ; X64-LABEL: big_mask_constant:
214 ; X64-NEXT: movq %rdi, %rax
215 ; X64-NEXT: shrq $7, %rax
216 ; X64-NEXT: andl $134217728, %eax # imm = 0x8000000
218 %and = and i64 %x, 17179869184 ; 0x400000000
219 %sh = lshr i64 %and, 7