1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i686-unknown-unknown | FileCheck %s --check-prefix=X32
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefix=X64
5 ;==============================================================================;
6 ; the shift amount is negated (shiftbitwidth - shiftamt)
7 ;==============================================================================;
10 ;------------------------------------------------------------------------------;
12 define i32 @reg32_shl_by_negated(i32 %val, i32 %shamt) nounwind {
13 ; X32-LABEL: reg32_shl_by_negated:
15 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
16 ; X32-NEXT: xorl %ecx, %ecx
17 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
18 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
19 ; X32-NEXT: shll %cl, %eax
22 ; X64-LABEL: reg32_shl_by_negated:
24 ; X64-NEXT: movl %esi, %ecx
25 ; X64-NEXT: movl %edi, %eax
27 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
28 ; X64-NEXT: shll %cl, %eax
30 %negshamt = sub i32 32, %shamt
31 %shifted = shl i32 %val, %negshamt
34 define i32 @load32_shl_by_negated(ptr %valptr, i32 %shamt) nounwind {
35 ; X32-LABEL: load32_shl_by_negated:
37 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
38 ; X32-NEXT: movl (%eax), %eax
39 ; X32-NEXT: xorl %ecx, %ecx
40 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
41 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
42 ; X32-NEXT: shll %cl, %eax
45 ; X64-LABEL: load32_shl_by_negated:
47 ; X64-NEXT: movl %esi, %ecx
48 ; X64-NEXT: movl (%rdi), %eax
50 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
51 ; X64-NEXT: shll %cl, %eax
53 %val = load i32, ptr %valptr
54 %negshamt = sub i32 32, %shamt
55 %shifted = shl i32 %val, %negshamt
58 define void @store32_shl_by_negated(i32 %val, ptr %dstptr, i32 %shamt) nounwind {
59 ; X32-LABEL: store32_shl_by_negated:
61 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
62 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edx
63 ; X32-NEXT: xorl %ecx, %ecx
64 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
65 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
66 ; X32-NEXT: shll %cl, %edx
67 ; X32-NEXT: movl %edx, (%eax)
70 ; X64-LABEL: store32_shl_by_negated:
72 ; X64-NEXT: movl %edx, %ecx
74 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
75 ; X64-NEXT: shll %cl, %edi
76 ; X64-NEXT: movl %edi, (%rsi)
78 %negshamt = sub i32 32, %shamt
79 %shifted = shl i32 %val, %negshamt
80 store i32 %shifted, ptr %dstptr
83 define void @modify32_shl_by_negated(ptr %valptr, i32 %shamt) nounwind {
84 ; X32-LABEL: modify32_shl_by_negated:
86 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
87 ; X32-NEXT: movb $32, %cl
88 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
89 ; X32-NEXT: shll %cl, (%eax)
92 ; X64-LABEL: modify32_shl_by_negated:
94 ; X64-NEXT: movb $32, %cl
95 ; X64-NEXT: subb %sil, %cl
96 ; X64-NEXT: shll %cl, (%rdi)
98 %val = load i32, ptr %valptr
99 %negshamt = sub i32 32, %shamt
100 %shifted = shl i32 %val, %negshamt
101 store i32 %shifted, ptr %valptr
105 define i64 @reg64_shl_by_negated(i64 %val, i64 %shamt) nounwind {
106 ; X32-LABEL: reg64_shl_by_negated:
108 ; X32-NEXT: pushl %esi
109 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
110 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edx
111 ; X32-NEXT: movb $64, %cl
112 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
113 ; X32-NEXT: movl %esi, %eax
114 ; X32-NEXT: shll %cl, %eax
115 ; X32-NEXT: shldl %cl, %esi, %edx
116 ; X32-NEXT: testb $32, %cl
117 ; X32-NEXT: je .LBB4_2
119 ; X32-NEXT: movl %eax, %edx
120 ; X32-NEXT: xorl %eax, %eax
122 ; X32-NEXT: popl %esi
125 ; X64-LABEL: reg64_shl_by_negated:
127 ; X64-NEXT: movq %rsi, %rcx
128 ; X64-NEXT: movq %rdi, %rax
130 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
131 ; X64-NEXT: shlq %cl, %rax
133 %negshamt = sub i64 64, %shamt
134 %shifted = shl i64 %val, %negshamt
137 define i64 @load64_shl_by_negated(ptr %valptr, i64 %shamt) nounwind {
138 ; X32-LABEL: load64_shl_by_negated:
140 ; X32-NEXT: pushl %esi
141 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
142 ; X32-NEXT: movl (%eax), %esi
143 ; X32-NEXT: movl 4(%eax), %edx
144 ; X32-NEXT: movb $64, %cl
145 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
146 ; X32-NEXT: movl %esi, %eax
147 ; X32-NEXT: shll %cl, %eax
148 ; X32-NEXT: shldl %cl, %esi, %edx
149 ; X32-NEXT: testb $32, %cl
150 ; X32-NEXT: je .LBB5_2
152 ; X32-NEXT: movl %eax, %edx
153 ; X32-NEXT: xorl %eax, %eax
155 ; X32-NEXT: popl %esi
158 ; X64-LABEL: load64_shl_by_negated:
160 ; X64-NEXT: movq %rsi, %rcx
161 ; X64-NEXT: movq (%rdi), %rax
163 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
164 ; X64-NEXT: shlq %cl, %rax
166 %val = load i64, ptr %valptr
167 %negshamt = sub i64 64, %shamt
168 %shifted = shl i64 %val, %negshamt
171 define void @store64_shl_by_negated(i64 %val, ptr %dstptr, i64 %shamt) nounwind {
172 ; X32-LABEL: store64_shl_by_negated:
174 ; X32-NEXT: pushl %edi
175 ; X32-NEXT: pushl %esi
176 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
177 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi
178 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edx
179 ; X32-NEXT: movb $64, %cl
180 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
181 ; X32-NEXT: movl %edi, %esi
182 ; X32-NEXT: shll %cl, %esi
183 ; X32-NEXT: shldl %cl, %edi, %edx
184 ; X32-NEXT: testb $32, %cl
185 ; X32-NEXT: je .LBB6_2
187 ; X32-NEXT: movl %esi, %edx
188 ; X32-NEXT: xorl %esi, %esi
190 ; X32-NEXT: movl %edx, 4(%eax)
191 ; X32-NEXT: movl %esi, (%eax)
192 ; X32-NEXT: popl %esi
193 ; X32-NEXT: popl %edi
196 ; X64-LABEL: store64_shl_by_negated:
198 ; X64-NEXT: movq %rdx, %rcx
200 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
201 ; X64-NEXT: shlq %cl, %rdi
202 ; X64-NEXT: movq %rdi, (%rsi)
204 %negshamt = sub i64 64, %shamt
205 %shifted = shl i64 %val, %negshamt
206 store i64 %shifted, ptr %dstptr
209 define void @modify64_shl_by_negated(ptr %valptr, i64 %shamt) nounwind {
210 ; X32-LABEL: modify64_shl_by_negated:
212 ; X32-NEXT: pushl %edi
213 ; X32-NEXT: pushl %esi
214 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
215 ; X32-NEXT: movl (%eax), %edi
216 ; X32-NEXT: movl 4(%eax), %edx
217 ; X32-NEXT: movb $64, %cl
218 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
219 ; X32-NEXT: movl %edi, %esi
220 ; X32-NEXT: shll %cl, %esi
221 ; X32-NEXT: shldl %cl, %edi, %edx
222 ; X32-NEXT: testb $32, %cl
223 ; X32-NEXT: je .LBB7_2
225 ; X32-NEXT: movl %esi, %edx
226 ; X32-NEXT: xorl %esi, %esi
228 ; X32-NEXT: movl %esi, (%eax)
229 ; X32-NEXT: movl %edx, 4(%eax)
230 ; X32-NEXT: popl %esi
231 ; X32-NEXT: popl %edi
234 ; X64-LABEL: modify64_shl_by_negated:
236 ; X64-NEXT: movb $64, %cl
237 ; X64-NEXT: subb %sil, %cl
238 ; X64-NEXT: shlq %cl, (%rdi)
240 %val = load i64, ptr %valptr
241 %negshamt = sub i64 64, %shamt
242 %shifted = shl i64 %val, %negshamt
243 store i64 %shifted, ptr %valptr
247 ; logical shift right
248 ;------------------------------------------------------------------------------;
250 define i32 @reg32_lshr_by_negated(i32 %val, i32 %shamt) nounwind {
251 ; X32-LABEL: reg32_lshr_by_negated:
253 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
254 ; X32-NEXT: xorl %ecx, %ecx
255 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
256 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
257 ; X32-NEXT: shrl %cl, %eax
260 ; X64-LABEL: reg32_lshr_by_negated:
262 ; X64-NEXT: movl %esi, %ecx
263 ; X64-NEXT: movl %edi, %eax
265 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
266 ; X64-NEXT: shrl %cl, %eax
268 %negshamt = sub i32 32, %shamt
269 %shifted = lshr i32 %val, %negshamt
272 define i32 @load32_lshr_by_negated(ptr %valptr, i32 %shamt) nounwind {
273 ; X32-LABEL: load32_lshr_by_negated:
275 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
276 ; X32-NEXT: movl (%eax), %eax
277 ; X32-NEXT: xorl %ecx, %ecx
278 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
279 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
280 ; X32-NEXT: shrl %cl, %eax
283 ; X64-LABEL: load32_lshr_by_negated:
285 ; X64-NEXT: movl %esi, %ecx
286 ; X64-NEXT: movl (%rdi), %eax
288 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
289 ; X64-NEXT: shrl %cl, %eax
291 %val = load i32, ptr %valptr
292 %negshamt = sub i32 32, %shamt
293 %shifted = lshr i32 %val, %negshamt
296 define void @store32_lshr_by_negated(i32 %val, ptr %dstptr, i32 %shamt) nounwind {
297 ; X32-LABEL: store32_lshr_by_negated:
299 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
300 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edx
301 ; X32-NEXT: xorl %ecx, %ecx
302 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
303 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
304 ; X32-NEXT: shrl %cl, %edx
305 ; X32-NEXT: movl %edx, (%eax)
308 ; X64-LABEL: store32_lshr_by_negated:
310 ; X64-NEXT: movl %edx, %ecx
312 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
313 ; X64-NEXT: shrl %cl, %edi
314 ; X64-NEXT: movl %edi, (%rsi)
316 %negshamt = sub i32 32, %shamt
317 %shifted = lshr i32 %val, %negshamt
318 store i32 %shifted, ptr %dstptr
321 define void @modify32_lshr_by_negated(ptr %valptr, i32 %shamt) nounwind {
322 ; X32-LABEL: modify32_lshr_by_negated:
324 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
325 ; X32-NEXT: movb $32, %cl
326 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
327 ; X32-NEXT: shrl %cl, (%eax)
330 ; X64-LABEL: modify32_lshr_by_negated:
332 ; X64-NEXT: movb $32, %cl
333 ; X64-NEXT: subb %sil, %cl
334 ; X64-NEXT: shrl %cl, (%rdi)
336 %val = load i32, ptr %valptr
337 %negshamt = sub i32 32, %shamt
338 %shifted = lshr i32 %val, %negshamt
339 store i32 %shifted, ptr %valptr
343 define i64 @reg64_lshr_by_negated(i64 %val, i64 %shamt) nounwind {
344 ; X32-LABEL: reg64_lshr_by_negated:
346 ; X32-NEXT: pushl %esi
347 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
348 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
349 ; X32-NEXT: movb $64, %cl
350 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
351 ; X32-NEXT: movl %esi, %edx
352 ; X32-NEXT: shrl %cl, %edx
353 ; X32-NEXT: shrdl %cl, %esi, %eax
354 ; X32-NEXT: testb $32, %cl
355 ; X32-NEXT: je .LBB12_2
357 ; X32-NEXT: movl %edx, %eax
358 ; X32-NEXT: xorl %edx, %edx
359 ; X32-NEXT: .LBB12_2:
360 ; X32-NEXT: popl %esi
363 ; X64-LABEL: reg64_lshr_by_negated:
365 ; X64-NEXT: movq %rsi, %rcx
366 ; X64-NEXT: movq %rdi, %rax
368 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
369 ; X64-NEXT: shrq %cl, %rax
371 %negshamt = sub i64 64, %shamt
372 %shifted = lshr i64 %val, %negshamt
375 define i64 @load64_lshr_by_negated(ptr %valptr, i64 %shamt) nounwind {
376 ; X32-LABEL: load64_lshr_by_negated:
378 ; X32-NEXT: pushl %esi
379 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
380 ; X32-NEXT: movl (%ecx), %eax
381 ; X32-NEXT: movl 4(%ecx), %esi
382 ; X32-NEXT: movb $64, %cl
383 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
384 ; X32-NEXT: movl %esi, %edx
385 ; X32-NEXT: shrl %cl, %edx
386 ; X32-NEXT: shrdl %cl, %esi, %eax
387 ; X32-NEXT: testb $32, %cl
388 ; X32-NEXT: je .LBB13_2
390 ; X32-NEXT: movl %edx, %eax
391 ; X32-NEXT: xorl %edx, %edx
392 ; X32-NEXT: .LBB13_2:
393 ; X32-NEXT: popl %esi
396 ; X64-LABEL: load64_lshr_by_negated:
398 ; X64-NEXT: movq %rsi, %rcx
399 ; X64-NEXT: movq (%rdi), %rax
401 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
402 ; X64-NEXT: shrq %cl, %rax
404 %val = load i64, ptr %valptr
405 %negshamt = sub i64 64, %shamt
406 %shifted = lshr i64 %val, %negshamt
409 define void @store64_lshr_by_negated(i64 %val, ptr %dstptr, i64 %shamt) nounwind {
410 ; X32-LABEL: store64_lshr_by_negated:
412 ; X32-NEXT: pushl %edi
413 ; X32-NEXT: pushl %esi
414 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
415 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edx
416 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi
417 ; X32-NEXT: movb $64, %cl
418 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
419 ; X32-NEXT: movl %edi, %esi
420 ; X32-NEXT: shrl %cl, %esi
421 ; X32-NEXT: shrdl %cl, %edi, %edx
422 ; X32-NEXT: testb $32, %cl
423 ; X32-NEXT: je .LBB14_2
425 ; X32-NEXT: movl %esi, %edx
426 ; X32-NEXT: xorl %esi, %esi
427 ; X32-NEXT: .LBB14_2:
428 ; X32-NEXT: movl %esi, 4(%eax)
429 ; X32-NEXT: movl %edx, (%eax)
430 ; X32-NEXT: popl %esi
431 ; X32-NEXT: popl %edi
434 ; X64-LABEL: store64_lshr_by_negated:
436 ; X64-NEXT: movq %rdx, %rcx
438 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
439 ; X64-NEXT: shrq %cl, %rdi
440 ; X64-NEXT: movq %rdi, (%rsi)
442 %negshamt = sub i64 64, %shamt
443 %shifted = lshr i64 %val, %negshamt
444 store i64 %shifted, ptr %dstptr
447 define void @modify64_lshr_by_negated(ptr %valptr, i64 %shamt) nounwind {
448 ; X32-LABEL: modify64_lshr_by_negated:
450 ; X32-NEXT: pushl %edi
451 ; X32-NEXT: pushl %esi
452 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
453 ; X32-NEXT: movl (%eax), %edx
454 ; X32-NEXT: movl 4(%eax), %edi
455 ; X32-NEXT: movb $64, %cl
456 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
457 ; X32-NEXT: movl %edi, %esi
458 ; X32-NEXT: shrl %cl, %esi
459 ; X32-NEXT: shrdl %cl, %edi, %edx
460 ; X32-NEXT: testb $32, %cl
461 ; X32-NEXT: je .LBB15_2
463 ; X32-NEXT: movl %esi, %edx
464 ; X32-NEXT: xorl %esi, %esi
465 ; X32-NEXT: .LBB15_2:
466 ; X32-NEXT: movl %edx, (%eax)
467 ; X32-NEXT: movl %esi, 4(%eax)
468 ; X32-NEXT: popl %esi
469 ; X32-NEXT: popl %edi
472 ; X64-LABEL: modify64_lshr_by_negated:
474 ; X64-NEXT: movb $64, %cl
475 ; X64-NEXT: subb %sil, %cl
476 ; X64-NEXT: shrq %cl, (%rdi)
478 %val = load i64, ptr %valptr
479 %negshamt = sub i64 64, %shamt
480 %shifted = lshr i64 %val, %negshamt
481 store i64 %shifted, ptr %valptr
485 ; arithmetic shift right
486 ;------------------------------------------------------------------------------;
488 define i32 @reg32_ashr_by_negated(i32 %val, i32 %shamt) nounwind {
489 ; X32-LABEL: reg32_ashr_by_negated:
491 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
492 ; X32-NEXT: xorl %ecx, %ecx
493 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
494 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
495 ; X32-NEXT: sarl %cl, %eax
498 ; X64-LABEL: reg32_ashr_by_negated:
500 ; X64-NEXT: movl %esi, %ecx
501 ; X64-NEXT: movl %edi, %eax
503 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
504 ; X64-NEXT: sarl %cl, %eax
506 %negshamt = sub i32 32, %shamt
507 %shifted = ashr i32 %val, %negshamt
510 define i32 @load32_ashr_by_negated(ptr %valptr, i32 %shamt) nounwind {
511 ; X32-LABEL: load32_ashr_by_negated:
513 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
514 ; X32-NEXT: movl (%eax), %eax
515 ; X32-NEXT: xorl %ecx, %ecx
516 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
517 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
518 ; X32-NEXT: sarl %cl, %eax
521 ; X64-LABEL: load32_ashr_by_negated:
523 ; X64-NEXT: movl %esi, %ecx
524 ; X64-NEXT: movl (%rdi), %eax
526 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
527 ; X64-NEXT: sarl %cl, %eax
529 %val = load i32, ptr %valptr
530 %negshamt = sub i32 32, %shamt
531 %shifted = ashr i32 %val, %negshamt
534 define void @store32_ashr_by_negated(i32 %val, ptr %dstptr, i32 %shamt) nounwind {
535 ; X32-LABEL: store32_ashr_by_negated:
537 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
538 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edx
539 ; X32-NEXT: xorl %ecx, %ecx
540 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
541 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
542 ; X32-NEXT: sarl %cl, %edx
543 ; X32-NEXT: movl %edx, (%eax)
546 ; X64-LABEL: store32_ashr_by_negated:
548 ; X64-NEXT: movl %edx, %ecx
550 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
551 ; X64-NEXT: sarl %cl, %edi
552 ; X64-NEXT: movl %edi, (%rsi)
554 %negshamt = sub i32 32, %shamt
555 %shifted = ashr i32 %val, %negshamt
556 store i32 %shifted, ptr %dstptr
559 define void @modify32_ashr_by_negated(ptr %valptr, i32 %shamt) nounwind {
560 ; X32-LABEL: modify32_ashr_by_negated:
562 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
563 ; X32-NEXT: movb $32, %cl
564 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
565 ; X32-NEXT: sarl %cl, (%eax)
568 ; X64-LABEL: modify32_ashr_by_negated:
570 ; X64-NEXT: movb $32, %cl
571 ; X64-NEXT: subb %sil, %cl
572 ; X64-NEXT: sarl %cl, (%rdi)
574 %val = load i32, ptr %valptr
575 %negshamt = sub i32 32, %shamt
576 %shifted = ashr i32 %val, %negshamt
577 store i32 %shifted, ptr %valptr
581 define i64 @reg64_ashr_by_negated(i64 %val, i64 %shamt) nounwind {
582 ; X32-LABEL: reg64_ashr_by_negated:
584 ; X32-NEXT: pushl %esi
585 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
586 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
587 ; X32-NEXT: movb $64, %cl
588 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
589 ; X32-NEXT: movl %esi, %edx
590 ; X32-NEXT: sarl %cl, %edx
591 ; X32-NEXT: shrdl %cl, %esi, %eax
592 ; X32-NEXT: testb $32, %cl
593 ; X32-NEXT: je .LBB20_2
595 ; X32-NEXT: sarl $31, %esi
596 ; X32-NEXT: movl %edx, %eax
597 ; X32-NEXT: movl %esi, %edx
598 ; X32-NEXT: .LBB20_2:
599 ; X32-NEXT: popl %esi
602 ; X64-LABEL: reg64_ashr_by_negated:
604 ; X64-NEXT: movq %rsi, %rcx
605 ; X64-NEXT: movq %rdi, %rax
607 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
608 ; X64-NEXT: sarq %cl, %rax
610 %negshamt = sub i64 64, %shamt
611 %shifted = ashr i64 %val, %negshamt
614 define i64 @load64_ashr_by_negated(ptr %valptr, i64 %shamt) nounwind {
615 ; X32-LABEL: load64_ashr_by_negated:
617 ; X32-NEXT: pushl %esi
618 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
619 ; X32-NEXT: movl (%ecx), %eax
620 ; X32-NEXT: movl 4(%ecx), %esi
621 ; X32-NEXT: movb $64, %cl
622 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
623 ; X32-NEXT: movl %esi, %edx
624 ; X32-NEXT: sarl %cl, %edx
625 ; X32-NEXT: shrdl %cl, %esi, %eax
626 ; X32-NEXT: testb $32, %cl
627 ; X32-NEXT: je .LBB21_2
629 ; X32-NEXT: sarl $31, %esi
630 ; X32-NEXT: movl %edx, %eax
631 ; X32-NEXT: movl %esi, %edx
632 ; X32-NEXT: .LBB21_2:
633 ; X32-NEXT: popl %esi
636 ; X64-LABEL: load64_ashr_by_negated:
638 ; X64-NEXT: movq %rsi, %rcx
639 ; X64-NEXT: movq (%rdi), %rax
641 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
642 ; X64-NEXT: sarq %cl, %rax
644 %val = load i64, ptr %valptr
645 %negshamt = sub i64 64, %shamt
646 %shifted = ashr i64 %val, %negshamt
649 define void @store64_ashr_by_negated(i64 %val, ptr %dstptr, i64 %shamt) nounwind {
650 ; X32-LABEL: store64_ashr_by_negated:
652 ; X32-NEXT: pushl %edi
653 ; X32-NEXT: pushl %esi
654 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
655 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edx
656 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edi
657 ; X32-NEXT: movb $64, %cl
658 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
659 ; X32-NEXT: movl %edi, %esi
660 ; X32-NEXT: sarl %cl, %esi
661 ; X32-NEXT: shrdl %cl, %edi, %edx
662 ; X32-NEXT: testb $32, %cl
663 ; X32-NEXT: je .LBB22_2
665 ; X32-NEXT: sarl $31, %edi
666 ; X32-NEXT: movl %esi, %edx
667 ; X32-NEXT: movl %edi, %esi
668 ; X32-NEXT: .LBB22_2:
669 ; X32-NEXT: movl %esi, 4(%eax)
670 ; X32-NEXT: movl %edx, (%eax)
671 ; X32-NEXT: popl %esi
672 ; X32-NEXT: popl %edi
675 ; X64-LABEL: store64_ashr_by_negated:
677 ; X64-NEXT: movq %rdx, %rcx
679 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
680 ; X64-NEXT: sarq %cl, %rdi
681 ; X64-NEXT: movq %rdi, (%rsi)
683 %negshamt = sub i64 64, %shamt
684 %shifted = ashr i64 %val, %negshamt
685 store i64 %shifted, ptr %dstptr
688 define void @modify64_ashr_by_negated(ptr %valptr, i64 %shamt) nounwind {
689 ; X32-LABEL: modify64_ashr_by_negated:
691 ; X32-NEXT: pushl %edi
692 ; X32-NEXT: pushl %esi
693 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
694 ; X32-NEXT: movl (%eax), %edx
695 ; X32-NEXT: movl 4(%eax), %edi
696 ; X32-NEXT: movb $64, %cl
697 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
698 ; X32-NEXT: movl %edi, %esi
699 ; X32-NEXT: sarl %cl, %esi
700 ; X32-NEXT: shrdl %cl, %edi, %edx
701 ; X32-NEXT: testb $32, %cl
702 ; X32-NEXT: je .LBB23_2
704 ; X32-NEXT: sarl $31, %edi
705 ; X32-NEXT: movl %esi, %edx
706 ; X32-NEXT: movl %edi, %esi
707 ; X32-NEXT: .LBB23_2:
708 ; X32-NEXT: movl %edx, (%eax)
709 ; X32-NEXT: movl %esi, 4(%eax)
710 ; X32-NEXT: popl %esi
711 ; X32-NEXT: popl %edi
714 ; X64-LABEL: modify64_ashr_by_negated:
716 ; X64-NEXT: movb $64, %cl
717 ; X64-NEXT: subb %sil, %cl
718 ; X64-NEXT: sarq %cl, (%rdi)
720 %val = load i64, ptr %valptr
721 %negshamt = sub i64 64, %shamt
722 %shifted = ashr i64 %val, %negshamt
723 store i64 %shifted, ptr %valptr
727 ;||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||;
728 ; next let's only test simple reg pattern, and only lshr.
729 ;||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||;
731 ;==============================================================================;
732 ; subtraction from negated shift amount
734 define i32 @reg32_lshr_by_sub_from_negated(i32 %val, i32 %a, i32 %b) nounwind {
735 ; X32-LABEL: reg32_lshr_by_sub_from_negated:
737 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
738 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
739 ; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx
741 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
742 ; X32-NEXT: shrl %cl, %eax
745 ; X64-LABEL: reg32_lshr_by_sub_from_negated:
747 ; X64-NEXT: # kill: def $edx killed $edx def $rdx
748 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
749 ; X64-NEXT: movl %edi, %eax
750 ; X64-NEXT: leal (%rsi,%rdx), %ecx
752 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
753 ; X64-NEXT: shrl %cl, %eax
755 %nega = sub i32 32, %a
756 %negasubb = sub i32 %nega, %b
757 %shifted = lshr i32 %val, %negasubb
760 define i64 @reg64_lshr_by_sub_from_negated(i64 %val, i64 %a, i64 %b) nounwind {
761 ; X32-LABEL: reg64_lshr_by_sub_from_negated:
763 ; X32-NEXT: pushl %esi
764 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
765 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
766 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edx
767 ; X32-NEXT: addl {{[0-9]+}}(%esp), %edx
768 ; X32-NEXT: movb $64, %cl
769 ; X32-NEXT: subb %dl, %cl
770 ; X32-NEXT: movl %esi, %edx
771 ; X32-NEXT: shrl %cl, %edx
772 ; X32-NEXT: shrdl %cl, %esi, %eax
773 ; X32-NEXT: testb $32, %cl
774 ; X32-NEXT: je .LBB25_2
776 ; X32-NEXT: movl %edx, %eax
777 ; X32-NEXT: xorl %edx, %edx
778 ; X32-NEXT: .LBB25_2:
779 ; X32-NEXT: popl %esi
782 ; X64-LABEL: reg64_lshr_by_sub_from_negated:
784 ; X64-NEXT: movq %rdi, %rax
785 ; X64-NEXT: leal (%rdx,%rsi), %ecx
787 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
788 ; X64-NEXT: shrq %cl, %rax
790 %nega = sub i64 64, %a
791 %negasubb = sub i64 %nega, %b
792 %shifted = lshr i64 %val, %negasubb
796 ;==============================================================================;
797 ; subtraction of negated shift amount
799 define i32 @reg32_lshr_by_sub_of_negated(i32 %val, i32 %a, i32 %b) nounwind {
800 ; X32-LABEL: reg32_lshr_by_sub_of_negated:
802 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
803 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
804 ; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx
805 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
806 ; X32-NEXT: shrl %cl, %eax
809 ; X64-LABEL: reg32_lshr_by_sub_of_negated:
811 ; X64-NEXT: # kill: def $edx killed $edx def $rdx
812 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
813 ; X64-NEXT: movl %edi, %eax
814 ; X64-NEXT: leal (%rsi,%rdx), %ecx
815 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
816 ; X64-NEXT: shrl %cl, %eax
818 %nega = sub i32 32, %a
819 %negasubb = sub i32 %b, %nega
820 %shifted = lshr i32 %val, %negasubb
823 define i64 @reg64_lshr_by_sub_of_negated(i64 %val, i64 %a, i64 %b) nounwind {
824 ; X32-LABEL: reg64_lshr_by_sub_of_negated:
826 ; X32-NEXT: pushl %esi
827 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
828 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
829 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
830 ; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx
831 ; X32-NEXT: addb $-64, %cl
832 ; X32-NEXT: movl %esi, %edx
833 ; X32-NEXT: shrl %cl, %edx
834 ; X32-NEXT: shrdl %cl, %esi, %eax
835 ; X32-NEXT: testb $32, %cl
836 ; X32-NEXT: je .LBB27_2
838 ; X32-NEXT: movl %edx, %eax
839 ; X32-NEXT: xorl %edx, %edx
840 ; X32-NEXT: .LBB27_2:
841 ; X32-NEXT: popl %esi
844 ; X64-LABEL: reg64_lshr_by_sub_of_negated:
846 ; X64-NEXT: movq %rdi, %rax
847 ; X64-NEXT: leal (%rdx,%rsi), %ecx
848 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
849 ; X64-NEXT: shrq %cl, %rax
851 %nega = sub i64 64, %a
852 %negasubb = sub i64 %b, %nega
853 %shifted = lshr i64 %val, %negasubb
857 ;==============================================================================;
858 ; add to negated shift amount
861 define i32 @reg32_lshr_by_add_to_negated(i32 %val, i32 %a, i32 %b) nounwind {
862 ; X32-LABEL: reg32_lshr_by_add_to_negated:
864 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
865 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
866 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
867 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
868 ; X32-NEXT: shrl %cl, %eax
871 ; X64-LABEL: reg32_lshr_by_add_to_negated:
873 ; X64-NEXT: movl %edx, %ecx
874 ; X64-NEXT: movl %edi, %eax
875 ; X64-NEXT: subl %esi, %ecx
876 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
877 ; X64-NEXT: shrl %cl, %eax
879 %nega = sub i32 32, %a
880 %negasubb = add i32 %nega, %b
881 %shifted = lshr i32 %val, %negasubb
884 define i64 @reg64_lshr_by_add_to_negated(i64 %val, i64 %a, i64 %b) nounwind {
885 ; X32-LABEL: reg64_lshr_by_add_to_negated:
887 ; X32-NEXT: pushl %esi
888 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
889 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
890 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
891 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
892 ; X32-NEXT: addb $64, %cl
893 ; X32-NEXT: movl %esi, %edx
894 ; X32-NEXT: shrl %cl, %edx
895 ; X32-NEXT: shrdl %cl, %esi, %eax
896 ; X32-NEXT: testb $32, %cl
897 ; X32-NEXT: je .LBB29_2
899 ; X32-NEXT: movl %edx, %eax
900 ; X32-NEXT: xorl %edx, %edx
901 ; X32-NEXT: .LBB29_2:
902 ; X32-NEXT: popl %esi
905 ; X64-LABEL: reg64_lshr_by_add_to_negated:
907 ; X64-NEXT: movq %rdx, %rcx
908 ; X64-NEXT: movq %rdi, %rax
909 ; X64-NEXT: subl %esi, %ecx
910 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
911 ; X64-NEXT: shrq %cl, %rax
913 %nega = sub i64 64, %a
914 %negasubb = add i64 %nega, %b
915 %shifted = lshr i64 %val, %negasubb
919 ;==============================================================================;
920 ; subtraction of negated shift amounts
922 define i32 @reg32_lshr_by_sub_of_negated_amts(i32 %val, i32 %a, i32 %b) nounwind {
923 ; X32-LABEL: reg32_lshr_by_sub_of_negated_amts:
925 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
926 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
927 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
928 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
929 ; X32-NEXT: shrl %cl, %eax
932 ; X64-LABEL: reg32_lshr_by_sub_of_negated_amts:
934 ; X64-NEXT: movl %edx, %ecx
935 ; X64-NEXT: movl %edi, %eax
936 ; X64-NEXT: subl %esi, %ecx
937 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
938 ; X64-NEXT: shrl %cl, %eax
940 %nega = sub i32 32, %a
941 %negb = sub i32 32, %b
942 %negasubnegb = sub i32 %nega, %negb
943 %shifted = lshr i32 %val, %negasubnegb
946 define i64 @reg64_lshr_by_sub_of_negated_amts(i64 %val, i64 %a, i64 %b) nounwind {
947 ; X32-LABEL: reg64_lshr_by_sub_of_negated_amts:
949 ; X32-NEXT: pushl %esi
950 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
951 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
952 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
953 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
954 ; X32-NEXT: movl %esi, %edx
955 ; X32-NEXT: shrl %cl, %edx
956 ; X32-NEXT: shrdl %cl, %esi, %eax
957 ; X32-NEXT: testb $32, %cl
958 ; X32-NEXT: je .LBB31_2
960 ; X32-NEXT: movl %edx, %eax
961 ; X32-NEXT: xorl %edx, %edx
962 ; X32-NEXT: .LBB31_2:
963 ; X32-NEXT: popl %esi
966 ; X64-LABEL: reg64_lshr_by_sub_of_negated_amts:
968 ; X64-NEXT: movq %rdx, %rcx
969 ; X64-NEXT: movq %rdi, %rax
970 ; X64-NEXT: subl %esi, %ecx
971 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
972 ; X64-NEXT: shrq %cl, %rax
974 %nega = sub i64 64, %a
975 %negb = sub i64 64, %b
976 %negasubnegb = sub i64 %nega, %negb
977 %shifted = lshr i64 %val, %negasubnegb
981 ;==============================================================================;
982 ; addition of negated shift amounts
984 define i32 @reg32_lshr_by_add_of_negated_amts(i32 %val, i32 %a, i32 %b) nounwind {
985 ; X32-LABEL: reg32_lshr_by_add_of_negated_amts:
987 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
988 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
989 ; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx
991 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
992 ; X32-NEXT: shrl %cl, %eax
995 ; X64-LABEL: reg32_lshr_by_add_of_negated_amts:
997 ; X64-NEXT: # kill: def $edx killed $edx def $rdx
998 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
999 ; X64-NEXT: movl %edi, %eax
1000 ; X64-NEXT: leal (%rsi,%rdx), %ecx
1001 ; X64-NEXT: negb %cl
1002 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1003 ; X64-NEXT: shrl %cl, %eax
1005 %nega = sub i32 32, %a
1006 %negb = sub i32 32, %b
1007 %negasubnegb = add i32 %nega, %negb
1008 %shifted = lshr i32 %val, %negasubnegb
1011 define i64 @reg64_lshr_by_add_of_negated_amts(i64 %val, i64 %a, i64 %b) nounwind {
1012 ; X32-LABEL: reg64_lshr_by_add_of_negated_amts:
1014 ; X32-NEXT: pushl %esi
1015 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1016 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
1017 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edx
1018 ; X32-NEXT: addl {{[0-9]+}}(%esp), %edx
1019 ; X32-NEXT: movb $-128, %cl
1020 ; X32-NEXT: subb %dl, %cl
1021 ; X32-NEXT: movl %esi, %edx
1022 ; X32-NEXT: shrl %cl, %edx
1023 ; X32-NEXT: shrdl %cl, %esi, %eax
1024 ; X32-NEXT: testb $32, %cl
1025 ; X32-NEXT: je .LBB33_2
1026 ; X32-NEXT: # %bb.1:
1027 ; X32-NEXT: movl %edx, %eax
1028 ; X32-NEXT: xorl %edx, %edx
1029 ; X32-NEXT: .LBB33_2:
1030 ; X32-NEXT: popl %esi
1033 ; X64-LABEL: reg64_lshr_by_add_of_negated_amts:
1035 ; X64-NEXT: movq %rdi, %rax
1036 ; X64-NEXT: leal (%rdx,%rsi), %ecx
1037 ; X64-NEXT: negb %cl
1038 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1039 ; X64-NEXT: shrq %cl, %rax
1041 %nega = sub i64 64, %a
1042 %negb = sub i64 64, %b
1043 %negasubnegb = add i64 %nega, %negb
1044 %shifted = lshr i64 %val, %negasubnegb
1048 ;==============================================================================;
1049 ; and patterns with an actual negation+addition
1051 define i32 @reg32_lshr_by_negated_unfolded(i32 %val, i32 %shamt) nounwind {
1052 ; X32-LABEL: reg32_lshr_by_negated_unfolded:
1054 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1055 ; X32-NEXT: xorl %ecx, %ecx
1056 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
1057 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
1058 ; X32-NEXT: shrl %cl, %eax
1061 ; X64-LABEL: reg32_lshr_by_negated_unfolded:
1063 ; X64-NEXT: movl %esi, %ecx
1064 ; X64-NEXT: movl %edi, %eax
1065 ; X64-NEXT: negb %cl
1066 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1067 ; X64-NEXT: shrl %cl, %eax
1069 %negshamt = sub i32 0, %shamt
1070 %negaaddbitwidth = add i32 %negshamt, 32
1071 %shifted = lshr i32 %val, %negaaddbitwidth
1074 define i64 @reg64_lshr_by_negated_unfolded(i64 %val, i64 %shamt) nounwind {
1075 ; X32-LABEL: reg64_lshr_by_negated_unfolded:
1077 ; X32-NEXT: pushl %esi
1078 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1079 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
1080 ; X32-NEXT: movb $64, %cl
1081 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
1082 ; X32-NEXT: movl %esi, %edx
1083 ; X32-NEXT: shrl %cl, %edx
1084 ; X32-NEXT: shrdl %cl, %esi, %eax
1085 ; X32-NEXT: testb $32, %cl
1086 ; X32-NEXT: je .LBB35_2
1087 ; X32-NEXT: # %bb.1:
1088 ; X32-NEXT: movl %edx, %eax
1089 ; X32-NEXT: xorl %edx, %edx
1090 ; X32-NEXT: .LBB35_2:
1091 ; X32-NEXT: popl %esi
1094 ; X64-LABEL: reg64_lshr_by_negated_unfolded:
1096 ; X64-NEXT: movq %rsi, %rcx
1097 ; X64-NEXT: movq %rdi, %rax
1098 ; X64-NEXT: negb %cl
1099 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
1100 ; X64-NEXT: shrq %cl, %rax
1102 %negshamt = sub i64 0, %shamt
1103 %negaaddbitwidth = add i64 %negshamt, 64
1104 %shifted = lshr i64 %val, %negaaddbitwidth
1108 define i32 @reg32_lshr_by_negated_unfolded_sub_b(i32 %val, i32 %a, i32 %b) nounwind {
1109 ; X32-LABEL: reg32_lshr_by_negated_unfolded_sub_b:
1111 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1112 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
1113 ; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx
1114 ; X32-NEXT: negb %cl
1115 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
1116 ; X32-NEXT: shrl %cl, %eax
1119 ; X64-LABEL: reg32_lshr_by_negated_unfolded_sub_b:
1121 ; X64-NEXT: # kill: def $edx killed $edx def $rdx
1122 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
1123 ; X64-NEXT: movl %edi, %eax
1124 ; X64-NEXT: leal (%rsi,%rdx), %ecx
1125 ; X64-NEXT: negb %cl
1126 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1127 ; X64-NEXT: shrl %cl, %eax
1129 %nega = sub i32 0, %a
1130 %negaaddbitwidth = add i32 %nega, 32
1131 %negaaddbitwidthsubb = sub i32 %negaaddbitwidth, %b
1132 %shifted = lshr i32 %val, %negaaddbitwidthsubb
1135 define i64 @reg64_lshr_by_negated_unfolded_sub_b(i64 %val, i64 %a, i64 %b) nounwind {
1136 ; X32-LABEL: reg64_lshr_by_negated_unfolded_sub_b:
1138 ; X32-NEXT: pushl %esi
1139 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1140 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
1141 ; X32-NEXT: movl {{[0-9]+}}(%esp), %edx
1142 ; X32-NEXT: addl {{[0-9]+}}(%esp), %edx
1143 ; X32-NEXT: movb $64, %cl
1144 ; X32-NEXT: subb %dl, %cl
1145 ; X32-NEXT: movl %esi, %edx
1146 ; X32-NEXT: shrl %cl, %edx
1147 ; X32-NEXT: shrdl %cl, %esi, %eax
1148 ; X32-NEXT: testb $32, %cl
1149 ; X32-NEXT: je .LBB37_2
1150 ; X32-NEXT: # %bb.1:
1151 ; X32-NEXT: movl %edx, %eax
1152 ; X32-NEXT: xorl %edx, %edx
1153 ; X32-NEXT: .LBB37_2:
1154 ; X32-NEXT: popl %esi
1157 ; X64-LABEL: reg64_lshr_by_negated_unfolded_sub_b:
1159 ; X64-NEXT: movq %rdi, %rax
1160 ; X64-NEXT: leal (%rdx,%rsi), %ecx
1161 ; X64-NEXT: negb %cl
1162 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1163 ; X64-NEXT: shrq %cl, %rax
1165 %nega = sub i64 0, %a
1166 %negaaddbitwidth = add i64 %nega, 64
1167 %negaaddbitwidthsubb = sub i64 %negaaddbitwidth, %b
1168 %shifted = lshr i64 %val, %negaaddbitwidthsubb
1172 define i32 @reg32_lshr_by_b_sub_negated_unfolded(i32 %val, i32 %a, i32 %b) nounwind {
1173 ; X32-LABEL: reg32_lshr_by_b_sub_negated_unfolded:
1175 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1176 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
1177 ; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx
1178 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
1179 ; X32-NEXT: shrl %cl, %eax
1182 ; X64-LABEL: reg32_lshr_by_b_sub_negated_unfolded:
1184 ; X64-NEXT: # kill: def $edx killed $edx def $rdx
1185 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
1186 ; X64-NEXT: movl %edi, %eax
1187 ; X64-NEXT: leal (%rsi,%rdx), %ecx
1188 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1189 ; X64-NEXT: shrl %cl, %eax
1191 %nega = sub i32 0, %a
1192 %negaaddbitwidth = add i32 %nega, 32
1193 %negaaddbitwidthsubb = sub i32 %b, %negaaddbitwidth
1194 %shifted = lshr i32 %val, %negaaddbitwidthsubb
1197 define i64 @reg64_lshr_by_b_sub_negated_unfolded(i64 %val, i64 %a, i64 %b) nounwind {
1198 ; X32-LABEL: reg64_lshr_by_b_sub_negated_unfolded:
1200 ; X32-NEXT: pushl %esi
1201 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1202 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
1203 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
1204 ; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx
1205 ; X32-NEXT: addb $-64, %cl
1206 ; X32-NEXT: movl %esi, %edx
1207 ; X32-NEXT: shrl %cl, %edx
1208 ; X32-NEXT: shrdl %cl, %esi, %eax
1209 ; X32-NEXT: testb $32, %cl
1210 ; X32-NEXT: je .LBB39_2
1211 ; X32-NEXT: # %bb.1:
1212 ; X32-NEXT: movl %edx, %eax
1213 ; X32-NEXT: xorl %edx, %edx
1214 ; X32-NEXT: .LBB39_2:
1215 ; X32-NEXT: popl %esi
1218 ; X64-LABEL: reg64_lshr_by_b_sub_negated_unfolded:
1220 ; X64-NEXT: movq %rdi, %rax
1221 ; X64-NEXT: leal (%rdx,%rsi), %ecx
1222 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1223 ; X64-NEXT: shrq %cl, %rax
1225 %nega = sub i64 0, %a
1226 %negaaddbitwidth = add i64 %nega, 64
1227 %negaaddbitwidthsubb = sub i64 %b, %negaaddbitwidth
1228 %shifted = lshr i64 %val, %negaaddbitwidthsubb
1232 define i32 @reg32_lshr_by_negated_unfolded_add_b(i32 %val, i32 %a, i32 %b) nounwind {
1233 ; X32-LABEL: reg32_lshr_by_negated_unfolded_add_b:
1235 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1236 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
1237 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
1238 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
1239 ; X32-NEXT: shrl %cl, %eax
1242 ; X64-LABEL: reg32_lshr_by_negated_unfolded_add_b:
1244 ; X64-NEXT: movl %edx, %ecx
1245 ; X64-NEXT: movl %edi, %eax
1246 ; X64-NEXT: subl %esi, %ecx
1247 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1248 ; X64-NEXT: shrl %cl, %eax
1250 %nega = sub i32 0, %a
1251 %negaaddbitwidth = add i32 %nega, 32
1252 %negaaddbitwidthaddb = add i32 %negaaddbitwidth, %b
1253 %shifted = lshr i32 %val, %negaaddbitwidthaddb
1256 define i64 @reg64_lshr_by_negated_unfolded_add_b(i64 %val, i64 %a, i64 %b) nounwind {
1257 ; X32-LABEL: reg64_lshr_by_negated_unfolded_add_b:
1259 ; X32-NEXT: pushl %esi
1260 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1261 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
1262 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
1263 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
1264 ; X32-NEXT: addb $64, %cl
1265 ; X32-NEXT: movl %esi, %edx
1266 ; X32-NEXT: shrl %cl, %edx
1267 ; X32-NEXT: shrdl %cl, %esi, %eax
1268 ; X32-NEXT: testb $32, %cl
1269 ; X32-NEXT: je .LBB41_2
1270 ; X32-NEXT: # %bb.1:
1271 ; X32-NEXT: movl %edx, %eax
1272 ; X32-NEXT: xorl %edx, %edx
1273 ; X32-NEXT: .LBB41_2:
1274 ; X32-NEXT: popl %esi
1277 ; X64-LABEL: reg64_lshr_by_negated_unfolded_add_b:
1279 ; X64-NEXT: movq %rdx, %rcx
1280 ; X64-NEXT: movq %rdi, %rax
1281 ; X64-NEXT: subl %esi, %ecx
1282 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
1283 ; X64-NEXT: shrq %cl, %rax
1285 %nega = sub i64 0, %a
1286 %negaaddbitwidth = add i64 %nega, 64
1287 %negaaddbitwidthaddb = add i64 %negaaddbitwidth, %b
1288 %shifted = lshr i64 %val, %negaaddbitwidthaddb
1292 ;==============================================================================;
1293 ; and patterns with an actual negation+mask
1295 define i32 @reg32_lshr_by_masked_negated_unfolded(i32 %val, i32 %shamt) nounwind {
1296 ; X32-LABEL: reg32_lshr_by_masked_negated_unfolded:
1298 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1299 ; X32-NEXT: xorl %ecx, %ecx
1300 ; X32-NEXT: subb {{[0-9]+}}(%esp), %cl
1301 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
1302 ; X32-NEXT: shrl %cl, %eax
1305 ; X64-LABEL: reg32_lshr_by_masked_negated_unfolded:
1307 ; X64-NEXT: movl %esi, %ecx
1308 ; X64-NEXT: movl %edi, %eax
1309 ; X64-NEXT: negb %cl
1310 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1311 ; X64-NEXT: shrl %cl, %eax
1313 %negshamt = sub i32 0, %shamt
1314 %negaaddbitwidth = and i32 %negshamt, 31
1315 %shifted = lshr i32 %val, %negaaddbitwidth
1318 define i64 @reg64_lshr_by_masked_negated_unfolded(i64 %val, i64 %shamt) nounwind {
1319 ; X32-LABEL: reg64_lshr_by_masked_negated_unfolded:
1321 ; X32-NEXT: pushl %esi
1322 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1323 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
1324 ; X32-NEXT: xorl %ecx, %ecx
1325 ; X32-NEXT: movzbl {{[0-9]+}}(%esp), %edx
1326 ; X32-NEXT: subb %dl, %cl
1327 ; X32-NEXT: movl %esi, %edx
1328 ; X32-NEXT: shrl %cl, %edx
1329 ; X32-NEXT: shrdl %cl, %esi, %eax
1330 ; X32-NEXT: testb $32, %cl
1331 ; X32-NEXT: je .LBB43_2
1332 ; X32-NEXT: # %bb.1:
1333 ; X32-NEXT: movl %edx, %eax
1334 ; X32-NEXT: xorl %edx, %edx
1335 ; X32-NEXT: .LBB43_2:
1336 ; X32-NEXT: popl %esi
1339 ; X64-LABEL: reg64_lshr_by_masked_negated_unfolded:
1341 ; X64-NEXT: movq %rsi, %rcx
1342 ; X64-NEXT: movq %rdi, %rax
1343 ; X64-NEXT: negb %cl
1344 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
1345 ; X64-NEXT: shrq %cl, %rax
1347 %negshamt = sub i64 0, %shamt
1348 %negaaddbitwidth = and i64 %negshamt, 63
1349 %shifted = lshr i64 %val, %negaaddbitwidth
1353 define i32 @reg32_lshr_by_masked_negated_unfolded_sub_b(i32 %val, i32 %a, i32 %b) nounwind {
1354 ; X32-LABEL: reg32_lshr_by_masked_negated_unfolded_sub_b:
1356 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1357 ; X32-NEXT: xorl %ecx, %ecx
1358 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
1359 ; X32-NEXT: andl $31, %ecx
1360 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
1361 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
1362 ; X32-NEXT: shrl %cl, %eax
1365 ; X64-LABEL: reg32_lshr_by_masked_negated_unfolded_sub_b:
1367 ; X64-NEXT: movl %esi, %ecx
1368 ; X64-NEXT: movl %edi, %eax
1369 ; X64-NEXT: negl %ecx
1370 ; X64-NEXT: andl $31, %ecx
1371 ; X64-NEXT: subl %edx, %ecx
1372 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1373 ; X64-NEXT: shrl %cl, %eax
1375 %nega = sub i32 0, %a
1376 %negaaddbitwidth = and i32 %nega, 31
1377 %negaaddbitwidthsubb = sub i32 %negaaddbitwidth, %b
1378 %shifted = lshr i32 %val, %negaaddbitwidthsubb
1381 define i64 @reg64_lshr_by_masked_negated_unfolded_sub_b(i64 %val, i64 %a, i64 %b) nounwind {
1382 ; X32-LABEL: reg64_lshr_by_masked_negated_unfolded_sub_b:
1384 ; X32-NEXT: pushl %esi
1385 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1386 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
1387 ; X32-NEXT: xorl %ecx, %ecx
1388 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
1389 ; X32-NEXT: andl $63, %ecx
1390 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
1391 ; X32-NEXT: movl %esi, %edx
1392 ; X32-NEXT: shrl %cl, %edx
1393 ; X32-NEXT: shrdl %cl, %esi, %eax
1394 ; X32-NEXT: testb $32, %cl
1395 ; X32-NEXT: je .LBB45_2
1396 ; X32-NEXT: # %bb.1:
1397 ; X32-NEXT: movl %edx, %eax
1398 ; X32-NEXT: xorl %edx, %edx
1399 ; X32-NEXT: .LBB45_2:
1400 ; X32-NEXT: popl %esi
1403 ; X64-LABEL: reg64_lshr_by_masked_negated_unfolded_sub_b:
1405 ; X64-NEXT: movq %rsi, %rcx
1406 ; X64-NEXT: movq %rdi, %rax
1407 ; X64-NEXT: negl %ecx
1408 ; X64-NEXT: andl $63, %ecx
1409 ; X64-NEXT: subl %edx, %ecx
1410 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
1411 ; X64-NEXT: shrq %cl, %rax
1413 %nega = sub i64 0, %a
1414 %negaaddbitwidth = and i64 %nega, 63
1415 %negaaddbitwidthsubb = sub i64 %negaaddbitwidth, %b
1416 %shifted = lshr i64 %val, %negaaddbitwidthsubb
1420 define i32 @reg32_lshr_by_masked_b_sub_negated_unfolded(i32 %val, i32 %a, i32 %b) nounwind {
1421 ; X32-LABEL: reg32_lshr_by_masked_b_sub_negated_unfolded:
1423 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1424 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
1425 ; X32-NEXT: xorl %edx, %edx
1426 ; X32-NEXT: subl {{[0-9]+}}(%esp), %edx
1427 ; X32-NEXT: andl $31, %edx
1428 ; X32-NEXT: subl %edx, %ecx
1429 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
1430 ; X32-NEXT: shrl %cl, %eax
1433 ; X64-LABEL: reg32_lshr_by_masked_b_sub_negated_unfolded:
1435 ; X64-NEXT: movl %edx, %ecx
1436 ; X64-NEXT: movl %edi, %eax
1437 ; X64-NEXT: negl %esi
1438 ; X64-NEXT: andl $31, %esi
1439 ; X64-NEXT: subl %esi, %ecx
1440 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1441 ; X64-NEXT: shrl %cl, %eax
1443 %nega = sub i32 0, %a
1444 %negaaddbitwidth = and i32 %nega, 31
1445 %negaaddbitwidthsubb = sub i32 %b, %negaaddbitwidth
1446 %shifted = lshr i32 %val, %negaaddbitwidthsubb
1449 define i64 @reg64_lshr_by_masked_b_sub_negated_unfolded(i64 %val, i64 %a, i64 %b) nounwind {
1450 ; X32-LABEL: reg64_lshr_by_masked_b_sub_negated_unfolded:
1452 ; X32-NEXT: pushl %esi
1453 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1454 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
1455 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
1456 ; X32-NEXT: xorl %edx, %edx
1457 ; X32-NEXT: subl {{[0-9]+}}(%esp), %edx
1458 ; X32-NEXT: andl $63, %edx
1459 ; X32-NEXT: subl %edx, %ecx
1460 ; X32-NEXT: movl %esi, %edx
1461 ; X32-NEXT: shrl %cl, %edx
1462 ; X32-NEXT: shrdl %cl, %esi, %eax
1463 ; X32-NEXT: testb $32, %cl
1464 ; X32-NEXT: je .LBB47_2
1465 ; X32-NEXT: # %bb.1:
1466 ; X32-NEXT: movl %edx, %eax
1467 ; X32-NEXT: xorl %edx, %edx
1468 ; X32-NEXT: .LBB47_2:
1469 ; X32-NEXT: popl %esi
1472 ; X64-LABEL: reg64_lshr_by_masked_b_sub_negated_unfolded:
1474 ; X64-NEXT: movq %rdx, %rcx
1475 ; X64-NEXT: movq %rdi, %rax
1476 ; X64-NEXT: negl %esi
1477 ; X64-NEXT: andl $63, %esi
1478 ; X64-NEXT: subl %esi, %ecx
1479 ; X64-NEXT: # kill: def $cl killed $cl killed $rcx
1480 ; X64-NEXT: shrq %cl, %rax
1482 %nega = sub i64 0, %a
1483 %negaaddbitwidth = and i64 %nega, 63
1484 %negaaddbitwidthsubb = sub i64 %b, %negaaddbitwidth
1485 %shifted = lshr i64 %val, %negaaddbitwidthsubb
1489 define i32 @reg32_lshr_by_masked_negated_unfolded_add_b(i32 %val, i32 %a, i32 %b) nounwind {
1490 ; X32-LABEL: reg32_lshr_by_masked_negated_unfolded_add_b:
1492 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1493 ; X32-NEXT: xorl %ecx, %ecx
1494 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
1495 ; X32-NEXT: andl $31, %ecx
1496 ; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx
1497 ; X32-NEXT: # kill: def $cl killed $cl killed $ecx
1498 ; X32-NEXT: shrl %cl, %eax
1501 ; X64-LABEL: reg32_lshr_by_masked_negated_unfolded_add_b:
1503 ; X64-NEXT: # kill: def $edx killed $edx def $rdx
1504 ; X64-NEXT: # kill: def $esi killed $esi def $rsi
1505 ; X64-NEXT: movl %edi, %eax
1506 ; X64-NEXT: negl %esi
1507 ; X64-NEXT: andl $31, %esi
1508 ; X64-NEXT: leal (%rsi,%rdx), %ecx
1509 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1510 ; X64-NEXT: shrl %cl, %eax
1512 %nega = sub i32 0, %a
1513 %negaaddbitwidth = and i32 %nega, 31
1514 %negaaddbitwidthaddb = add i32 %negaaddbitwidth, %b
1515 %shifted = lshr i32 %val, %negaaddbitwidthaddb
1518 define i64 @reg64_lshr_by_masked_negated_unfolded_add_b(i64 %val, i64 %a, i64 %b) nounwind {
1519 ; X32-LABEL: reg64_lshr_by_masked_negated_unfolded_add_b:
1521 ; X32-NEXT: pushl %esi
1522 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1523 ; X32-NEXT: movl {{[0-9]+}}(%esp), %esi
1524 ; X32-NEXT: xorl %ecx, %ecx
1525 ; X32-NEXT: subl {{[0-9]+}}(%esp), %ecx
1526 ; X32-NEXT: andl $63, %ecx
1527 ; X32-NEXT: addl {{[0-9]+}}(%esp), %ecx
1528 ; X32-NEXT: movl %esi, %edx
1529 ; X32-NEXT: shrl %cl, %edx
1530 ; X32-NEXT: shrdl %cl, %esi, %eax
1531 ; X32-NEXT: testb $32, %cl
1532 ; X32-NEXT: je .LBB49_2
1533 ; X32-NEXT: # %bb.1:
1534 ; X32-NEXT: movl %edx, %eax
1535 ; X32-NEXT: xorl %edx, %edx
1536 ; X32-NEXT: .LBB49_2:
1537 ; X32-NEXT: popl %esi
1540 ; X64-LABEL: reg64_lshr_by_masked_negated_unfolded_add_b:
1542 ; X64-NEXT: movq %rdi, %rax
1543 ; X64-NEXT: negl %esi
1544 ; X64-NEXT: andl $63, %esi
1545 ; X64-NEXT: leal (%rdx,%rsi), %ecx
1546 ; X64-NEXT: # kill: def $cl killed $cl killed $ecx
1547 ; X64-NEXT: shrq %cl, %rax
1549 %nega = sub i64 0, %a
1550 %negaaddbitwidth = and i64 %nega, 63
1551 %negaaddbitwidthaddb = add i64 %negaaddbitwidth, %b
1552 %shifted = lshr i64 %val, %negaaddbitwidthaddb
1556 define i16 @sh_trunc_sh(i64 %x) {
1557 ; X32-LABEL: sh_trunc_sh:
1559 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
1560 ; X32-NEXT: shrl $4, %eax
1561 ; X32-NEXT: andl $15, %eax
1562 ; X32-NEXT: # kill: def $ax killed $ax killed $eax
1565 ; X64-LABEL: sh_trunc_sh:
1567 ; X64-NEXT: movq %rdi, %rax
1568 ; X64-NEXT: shrq $36, %rax
1569 ; X64-NEXT: andl $15, %eax
1570 ; X64-NEXT: # kill: def $ax killed $ax killed $rax
1572 %s = lshr i64 %x, 24
1573 %t = trunc i64 %s to i16
1574 %r = lshr i16 %t, 12