1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2 ; Tests to make sure elimination of casts is working correctly
3 ; RUN: opt < %s -instcombine -S -data-layout="E-p:64:64:64-p1:32:32:32-p2:64:64:64-p3:64:64:64-a0:0:8-f32:32:32-f64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:32:64-v64:64:64-v128:128:128-n8:16:32:64" | FileCheck %s --check-prefixes=ALL,BE
4 ; RUN: opt < %s -instcombine -S -data-layout="e-p:64:64:64-p1:32:32:32-p2:64:64:64-p3:64:64:64-a0:0:8-f32:32:32-f64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:32:64-v64:64:64-v128:128:128-n8:16:32:64" | FileCheck %s --check-prefixes=ALL,LE
6 declare void @use_i32(i32)
7 declare void @use_v2i32(<2 x i32>)
9 @inbuf = external global [32832 x i8]
11 define i32 @test1(i32 %A) {
13 ; ALL-NEXT: ret i32 [[A:%.*]]
15 %c1 = bitcast i32 %A to i32
16 %c2 = bitcast i32 %c1 to i32
20 define i64 @test2(i8 %A) {
22 ; ALL-NEXT: [[RET:%.*]] = zext i8 [[A:%.*]] to i64
23 ; ALL-NEXT: ret i64 [[RET]]
25 %c1 = zext i8 %A to i16
26 %c2 = zext i16 %c1 to i32
27 %Ret = zext i32 %c2 to i64
31 define i64 @test3(i64 %A) {
33 ; ALL-NEXT: [[C2:%.*]] = and i64 [[A:%.*]], 255
34 ; ALL-NEXT: ret i64 [[C2]]
36 %c1 = trunc i64 %A to i8
37 %c2 = zext i8 %c1 to i64
41 define i32 @test4(i32 %A, i32 %B) {
43 ; ALL-NEXT: [[COND:%.*]] = icmp slt i32 [[A:%.*]], [[B:%.*]]
44 ; ALL-NEXT: [[RESULT:%.*]] = zext i1 [[COND]] to i32
45 ; ALL-NEXT: ret i32 [[RESULT]]
47 %COND = icmp slt i32 %A, %B
48 %c = zext i1 %COND to i8
49 %result = zext i8 %c to i32
53 define i32 @test5(i1 %B) {
55 ; ALL-NEXT: [[RESULT:%.*]] = zext i1 [[B:%.*]] to i32
56 ; ALL-NEXT: ret i32 [[RESULT]]
59 %result = zext i8 %c to i32
63 define i32 @test6(i64 %A) {
65 ; ALL-NEXT: [[C1:%.*]] = trunc i64 [[A:%.*]] to i32
66 ; ALL-NEXT: ret i32 [[C1]]
68 %c1 = trunc i64 %A to i32
69 %res = bitcast i32 %c1 to i32
73 define i64 @test7(i1 %A) {
75 ; ALL-NEXT: [[RES:%.*]] = zext i1 [[A:%.*]] to i64
76 ; ALL-NEXT: ret i64 [[RES]]
78 %c1 = zext i1 %A to i32
79 %res = sext i32 %c1 to i64
83 define i64 @test8(i8 %A) {
85 ; ALL-NEXT: [[C1:%.*]] = sext i8 [[A:%.*]] to i64
86 ; ALL-NEXT: ret i64 [[C1]]
88 %c1 = sext i8 %A to i64
89 %res = bitcast i64 %c1 to i64
93 define i16 @test9(i16 %A) {
95 ; ALL-NEXT: ret i16 [[A:%.*]]
97 %c1 = sext i16 %A to i32
98 %c2 = trunc i32 %c1 to i16
102 define i16 @test10(i16 %A) {
103 ; ALL-LABEL: @test10(
104 ; ALL-NEXT: ret i16 [[A:%.*]]
106 %c1 = sext i16 %A to i32
107 %c2 = trunc i32 %c1 to i16
111 declare void @varargs(i32, ...)
113 define void @test11(i32* %P) {
114 ; ALL-LABEL: @test11(
115 ; ALL-NEXT: call void (i32, ...) @varargs(i32 5, i32* [[P:%.*]])
118 %c = bitcast i32* %P to i16*
119 call void (i32, ...) @varargs( i32 5, i16* %c )
123 declare i32 @__gxx_personality_v0(...)
124 define void @test_invoke_vararg_cast(i32* %a, i32* %b) personality i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*) {
125 ; ALL-LABEL: @test_invoke_vararg_cast(
127 ; ALL-NEXT: invoke void (i32, ...) @varargs(i32 1, i32* [[B:%.*]], i32* [[A:%.*]])
128 ; ALL-NEXT: to label [[INVOKE_CONT:%.*]] unwind label [[LPAD:%.*]]
132 ; ALL-NEXT: [[TMP0:%.*]] = landingpad { i8*, i32 }
137 %0 = bitcast i32* %b to i8*
138 %1 = bitcast i32* %a to i64*
139 invoke void (i32, ...) @varargs(i32 1, i8* %0, i64* %1)
140 to label %invoke.cont unwind label %lpad
146 %2 = landingpad { i8*, i32 }
151 define i8* @test13(i64 %A) {
152 ; ALL-LABEL: @test13(
153 ; ALL-NEXT: [[C:%.*]] = getelementptr [32832 x i8], [32832 x i8]* @inbuf, i64 0, i64 [[A:%.*]]
154 ; ALL-NEXT: ret i8* [[C]]
156 %c = getelementptr [0 x i8], [0 x i8]* bitcast ([32832 x i8]* @inbuf to [0 x i8]*), i64 0, i64 %A
160 define i1 @test14(i8 %A) {
161 ; ALL-LABEL: @test14(
162 ; ALL-NEXT: [[X:%.*]] = icmp sgt i8 [[A:%.*]], -1
163 ; ALL-NEXT: ret i1 [[X]]
165 %c = bitcast i8 %A to i8
166 %X = icmp ult i8 %c, -128
171 ; This just won't occur when there's no difference between ubyte and sbyte
172 ;bool %test15(ubyte %A) {
173 ; %c = cast ubyte %A to sbyte
174 ; %X = setlt sbyte %c, 0 ; setgt %A, 127
178 define i1 @test16(i32* %P) {
179 ; ALL-LABEL: @test16(
180 ; ALL-NEXT: [[C:%.*]] = icmp ne i32* [[P:%.*]], null
181 ; ALL-NEXT: ret i1 [[C]]
183 %c = icmp ne i32* %P, null
187 define i16 @test17(i1 %x) {
188 ; ALL-LABEL: @test17(
189 ; ALL-NEXT: [[T86:%.*]] = zext i1 [[X:%.*]] to i16
190 ; ALL-NEXT: ret i16 [[T86]]
192 %c = zext i1 %x to i32
193 %t86 = trunc i32 %c to i16
197 define i16 @test18(i8 %x) {
198 ; ALL-LABEL: @test18(
199 ; ALL-NEXT: [[T86:%.*]] = sext i8 [[X:%.*]] to i16
200 ; ALL-NEXT: ret i16 [[T86]]
202 %c = sext i8 %x to i32
203 %t86 = trunc i32 %c to i16
207 define i1 @test19(i32 %X) {
208 ; ALL-LABEL: @test19(
209 ; ALL-NEXT: [[Z:%.*]] = icmp slt i32 [[X:%.*]], 12345
210 ; ALL-NEXT: ret i1 [[Z]]
212 %c = sext i32 %X to i64
213 %Z = icmp slt i64 %c, 12345
217 define <2 x i1> @test19vec(<2 x i32> %X) {
218 ; ALL-LABEL: @test19vec(
219 ; ALL-NEXT: [[Z:%.*]] = icmp slt <2 x i32> [[X:%.*]], <i32 12345, i32 2147483647>
220 ; ALL-NEXT: ret <2 x i1> [[Z]]
222 %c = sext <2 x i32> %X to <2 x i64>
223 %Z = icmp slt <2 x i64> %c, <i64 12345, i64 2147483647>
227 define <3 x i1> @test19vec2(<3 x i1> %X) {
228 ; ALL-LABEL: @test19vec2(
229 ; ALL-NEXT: [[CMPEQ:%.*]] = xor <3 x i1> [[X:%.*]], <i1 true, i1 true, i1 true>
230 ; ALL-NEXT: ret <3 x i1> [[CMPEQ]]
232 %sext = sext <3 x i1> %X to <3 x i32>
233 %cmpeq = icmp eq <3 x i32> %sext, zeroinitializer
237 define i1 @test20(i1 %B) {
238 ; ALL-LABEL: @test20(
239 ; ALL-NEXT: ret i1 false
241 %c = zext i1 %B to i32
242 %D = icmp slt i32 %c, -1
246 define i32 @test21(i32 %X) {
247 ; ALL-LABEL: @test21(
248 ; ALL-NEXT: [[SEXT:%.*]] = and i32 [[X:%.*]], 255
249 ; ALL-NEXT: ret i32 [[SEXT]]
251 %c1 = trunc i32 %X to i8
252 %c2 = sext i8 %c1 to i32
253 %RV = and i32 %c2, 255
257 define i32 @test22(i32 %X) {
258 ; ALL-LABEL: @test22(
259 ; ALL-NEXT: [[SEXT:%.*]] = shl i32 [[X:%.*]], 24
260 ; ALL-NEXT: ret i32 [[SEXT]]
262 %c1 = trunc i32 %X to i8
263 %c2 = sext i8 %c1 to i32
264 %RV = shl i32 %c2, 24
268 define i32 @test23(i32 %X) {
269 ; ALL-LABEL: @test23(
270 ; ALL-NEXT: [[C2:%.*]] = and i32 [[X:%.*]], 65535
271 ; ALL-NEXT: ret i32 [[C2]]
273 %c1 = trunc i32 %X to i16
274 %c2 = zext i16 %c1 to i32
278 define i1 @test24(i1 %C) {
279 ; ALL-LABEL: @test24(
280 ; ALL-NEXT: ret i1 true
282 %X = select i1 %C, i32 14, i32 1234
283 %c = icmp ne i32 %X, 0
287 define i32 @test26(float %F) {
288 ; ALL-LABEL: @test26(
289 ; ALL-NEXT: [[D:%.*]] = fptosi float [[F:%.*]] to i32
290 ; ALL-NEXT: ret i32 [[D]]
292 %c = fpext float %F to double
293 %D = fptosi double %c to i32
297 define [4 x float]* @test27([9 x [4 x float]]* %A) {
298 ; ALL-LABEL: @test27(
299 ; ALL-NEXT: [[C:%.*]] = getelementptr [9 x [4 x float]], [9 x [4 x float]]* [[A:%.*]], i64 0, i64 0
300 ; ALL-NEXT: ret [4 x float]* [[C]]
302 %c = bitcast [9 x [4 x float]]* %A to [4 x float]*
306 define float* @test28([4 x float]* %A) {
307 ; ALL-LABEL: @test28(
308 ; ALL-NEXT: [[C:%.*]] = getelementptr [4 x float], [4 x float]* [[A:%.*]], i64 0, i64 0
309 ; ALL-NEXT: ret float* [[C]]
311 %c = bitcast [4 x float]* %A to float*
315 define i32 @test29(i32 %c1, i32 %c2) {
316 ; ALL-LABEL: @test29(
317 ; ALL-NEXT: [[T21:%.*]] = or i32 [[C2:%.*]], [[C1:%.*]]
318 ; ALL-NEXT: [[T10:%.*]] = and i32 [[T21]], 255
319 ; ALL-NEXT: ret i32 [[T10]]
321 %t1 = trunc i32 %c1 to i8
322 %tmask = trunc i32 %c2 to i8
323 %t2 = or i8 %tmask, %t1
324 %t10 = zext i8 %t2 to i32
328 define i32 @test30(i32 %c1) {
329 ; ALL-LABEL: @test30(
330 ; ALL-NEXT: [[C3:%.*]] = and i32 [[C1:%.*]], 255
331 ; ALL-NEXT: [[C4:%.*]] = xor i32 [[C3]], 1
332 ; ALL-NEXT: ret i32 [[C4]]
334 %c2 = trunc i32 %c1 to i8
336 %c4 = zext i8 %c3 to i32
340 define i1 @test31(i64 %A) {
341 ; ALL-LABEL: @test31(
342 ; ALL-NEXT: [[C1:%.*]] = and i64 [[A:%.*]], 42
343 ; ALL-NEXT: [[D:%.*]] = icmp eq i64 [[C1]], 10
344 ; ALL-NEXT: ret i1 [[D]]
346 %B = trunc i64 %A to i32
348 %D = icmp eq i32 %C, 10
352 ; FIXME: Vectors should fold too...or not?
353 ; Does this depend on the whether the source/dest types of the trunc are legal in the data layout?
354 define <2 x i1> @test31vec(<2 x i64> %A) {
355 ; ALL-LABEL: @test31vec(
356 ; ALL-NEXT: [[B:%.*]] = trunc <2 x i64> [[A:%.*]] to <2 x i32>
357 ; ALL-NEXT: [[C:%.*]] = and <2 x i32> [[B]], <i32 42, i32 42>
358 ; ALL-NEXT: [[D:%.*]] = icmp eq <2 x i32> [[C]], <i32 10, i32 10>
359 ; ALL-NEXT: ret <2 x i1> [[D]]
361 %B = trunc <2 x i64> %A to <2 x i32>
362 %C = and <2 x i32> %B, <i32 42, i32 42>
363 %D = icmp eq <2 x i32> %C, <i32 10, i32 10>
367 ; Verify that the 'and' was narrowed, the zext was eliminated, and the compare was narrowed
368 ; even for vectors. Earlier folds should ensure that the icmp(and(zext)) pattern never occurs.
370 define <2 x i1> @test32vec(<2 x i8> %A) {
371 ; ALL-LABEL: @test32vec(
372 ; ALL-NEXT: [[TMP1:%.*]] = and <2 x i8> [[A:%.*]], <i8 42, i8 42>
373 ; ALL-NEXT: [[D:%.*]] = icmp eq <2 x i8> [[TMP1]], <i8 10, i8 10>
374 ; ALL-NEXT: ret <2 x i1> [[D]]
376 %B = zext <2 x i8> %A to <2 x i16>
377 %C = and <2 x i16> %B, <i16 42, i16 42>
378 %D = icmp eq <2 x i16> %C, <i16 10, i16 10>
382 define i32 @test33(i32 %c1) {
383 ; ALL-LABEL: @test33(
384 ; ALL-NEXT: ret i32 [[C1:%.*]]
386 %x = bitcast i32 %c1 to float
387 %y = bitcast float %x to i32
391 define i16 @test34(i16 %a) {
392 ; ALL-LABEL: @test34(
393 ; ALL-NEXT: [[TMP1:%.*]] = lshr i16 [[A:%.*]], 8
394 ; ALL-NEXT: ret i16 [[TMP1]]
396 %c1 = zext i16 %a to i32
397 %t21 = lshr i32 %c1, 8
398 %c2 = trunc i32 %t21 to i16
402 define i16 @test35(i16 %a) {
403 ; ALL-LABEL: @test35(
404 ; ALL-NEXT: [[T2:%.*]] = lshr i16 [[A:%.*]], 8
405 ; ALL-NEXT: ret i16 [[T2]]
407 %c1 = bitcast i16 %a to i16
408 %t2 = lshr i16 %c1, 8
409 %c2 = bitcast i16 %t2 to i16
414 define i1 @test36(i32 %a) {
415 ; ALL-LABEL: @test36(
416 ; ALL-NEXT: [[D:%.*]] = icmp sgt i32 [[A:%.*]], -1
417 ; ALL-NEXT: ret i1 [[D]]
420 %c = trunc i32 %b to i8
421 %d = icmp eq i8 %c, 0
425 define <2 x i1> @test36vec(<2 x i32> %a) {
426 ; ALL-LABEL: @test36vec(
427 ; ALL-NEXT: [[D:%.*]] = icmp sgt <2 x i32> [[A:%.*]], <i32 -1, i32 -1>
428 ; ALL-NEXT: ret <2 x i1> [[D]]
430 %b = lshr <2 x i32> %a, <i32 31, i32 31>
431 %c = trunc <2 x i32> %b to <2 x i8>
432 %d = icmp eq <2 x i8> %c, zeroinitializer
436 define i1 @test37(i32 %a) {
437 ; ALL-LABEL: @test37(
438 ; ALL-NEXT: ret i1 false
442 %d = trunc i32 %c to i8
443 %e = icmp eq i8 %d, 11
447 define i64 @test38(i32 %a) {
448 ; ALL-LABEL: @test38(
449 ; ALL-NEXT: [[TMP1:%.*]] = icmp ne i32 [[A:%.*]], -2
450 ; ALL-NEXT: [[TMP2:%.*]] = zext i1 [[TMP1]] to i64
451 ; ALL-NEXT: ret i64 [[TMP2]]
453 %1 = icmp eq i32 %a, -2
454 %2 = zext i1 %1 to i8
456 %4 = zext i8 %3 to i64
460 define i16 @test39(i16 %a) {
461 ; ALL-LABEL: @test39(
462 ; ALL-NEXT: [[T32:%.*]] = call i16 @llvm.bswap.i16(i16 [[A:%.*]])
463 ; ALL-NEXT: ret i16 [[T32]]
465 %t = zext i16 %a to i32
466 %t21 = lshr i32 %t, 8
468 %t32 = or i32 %t21, %t5
469 %r = trunc i32 %t32 to i16
473 define i16 @test40(i16 %a) {
474 ; ALL-LABEL: @test40(
475 ; ALL-NEXT: [[T21:%.*]] = lshr i16 [[A:%.*]], 9
476 ; ALL-NEXT: [[T5:%.*]] = shl i16 [[A]], 8
477 ; ALL-NEXT: [[T32:%.*]] = or i16 [[T21]], [[T5]]
478 ; ALL-NEXT: ret i16 [[T32]]
480 %t = zext i16 %a to i32
481 %t21 = lshr i32 %t, 9
483 %t32 = or i32 %t21, %t5
484 %r = trunc i32 %t32 to i16
488 define <2 x i16> @test40vec(<2 x i16> %a) {
489 ; ALL-LABEL: @test40vec(
490 ; ALL-NEXT: [[T21:%.*]] = lshr <2 x i16> [[A:%.*]], <i16 9, i16 9>
491 ; ALL-NEXT: [[T5:%.*]] = shl <2 x i16> [[A]], <i16 8, i16 8>
492 ; ALL-NEXT: [[T32:%.*]] = or <2 x i16> [[T21]], [[T5]]
493 ; ALL-NEXT: ret <2 x i16> [[T32]]
495 %t = zext <2 x i16> %a to <2 x i32>
496 %t21 = lshr <2 x i32> %t, <i32 9, i32 9>
497 %t5 = shl <2 x i32> %t, <i32 8, i32 8>
498 %t32 = or <2 x i32> %t21, %t5
499 %r = trunc <2 x i32> %t32 to <2 x i16>
503 define <2 x i16> @test40vec_nonuniform(<2 x i16> %a) {
504 ; ALL-LABEL: @test40vec_nonuniform(
505 ; ALL-NEXT: [[T21:%.*]] = lshr <2 x i16> [[A:%.*]], <i16 9, i16 10>
506 ; ALL-NEXT: [[T5:%.*]] = shl <2 x i16> [[A]], <i16 8, i16 9>
507 ; ALL-NEXT: [[T32:%.*]] = or <2 x i16> [[T21]], [[T5]]
508 ; ALL-NEXT: ret <2 x i16> [[T32]]
510 %t = zext <2 x i16> %a to <2 x i32>
511 %t21 = lshr <2 x i32> %t, <i32 9, i32 10>
512 %t5 = shl <2 x i32> %t, <i32 8, i32 9>
513 %t32 = or <2 x i32> %t21, %t5
514 %r = trunc <2 x i32> %t32 to <2 x i16>
518 define <2 x i16> @test40vec_undef(<2 x i16> %a) {
519 ; ALL-LABEL: @test40vec_undef(
520 ; ALL-NEXT: [[T:%.*]] = zext <2 x i16> [[A:%.*]] to <2 x i32>
521 ; ALL-NEXT: [[T21:%.*]] = lshr <2 x i32> [[T]], <i32 9, i32 undef>
522 ; ALL-NEXT: [[T5:%.*]] = shl <2 x i32> [[T]], <i32 8, i32 undef>
523 ; ALL-NEXT: [[T32:%.*]] = or <2 x i32> [[T21]], [[T5]]
524 ; ALL-NEXT: [[R:%.*]] = trunc <2 x i32> [[T32]] to <2 x i16>
525 ; ALL-NEXT: ret <2 x i16> [[R]]
527 %t = zext <2 x i16> %a to <2 x i32>
528 %t21 = lshr <2 x i32> %t, <i32 9, i32 undef>
529 %t5 = shl <2 x i32> %t, <i32 8, i32 undef>
530 %t32 = or <2 x i32> %t21, %t5
531 %r = trunc <2 x i32> %t32 to <2 x i16>
536 define i32* @test41(i32* %t1) {
537 ; ALL-LABEL: @test41(
538 ; ALL-NEXT: ret i32* [[T1:%.*]]
540 %t64 = bitcast i32* %t1 to { i32 }*
541 %t65 = getelementptr { i32 }, { i32 }* %t64, i32 0, i32 0
545 define i32 addrspace(1)* @test41_addrspacecast_smaller(i32* %t1) {
546 ; ALL-LABEL: @test41_addrspacecast_smaller(
547 ; ALL-NEXT: [[T65:%.*]] = addrspacecast i32* [[T1:%.*]] to i32 addrspace(1)*
548 ; ALL-NEXT: ret i32 addrspace(1)* [[T65]]
550 %t64 = addrspacecast i32* %t1 to { i32 } addrspace(1)*
551 %t65 = getelementptr { i32 }, { i32 } addrspace(1)* %t64, i32 0, i32 0
552 ret i32 addrspace(1)* %t65
555 define i32* @test41_addrspacecast_larger(i32 addrspace(1)* %t1) {
556 ; ALL-LABEL: @test41_addrspacecast_larger(
557 ; ALL-NEXT: [[T65:%.*]] = addrspacecast i32 addrspace(1)* [[T1:%.*]] to i32*
558 ; ALL-NEXT: ret i32* [[T65]]
560 %t64 = addrspacecast i32 addrspace(1)* %t1 to { i32 }*
561 %t65 = getelementptr { i32 }, { i32 }* %t64, i32 0, i32 0
565 define i32 @test42(i32 %X) {
566 ; ALL-LABEL: @test42(
567 ; ALL-NEXT: [[Z:%.*]] = and i32 [[X:%.*]], 255
568 ; ALL-NEXT: ret i32 [[Z]]
570 %Y = trunc i32 %X to i8
571 %Z = zext i8 %Y to i32
576 define zeroext i64 @test43(i8 zeroext %on_off) {
577 ; ALL-LABEL: @test43(
578 ; ALL-NEXT: [[A:%.*]] = zext i8 [[ON_OFF:%.*]] to i64
579 ; ALL-NEXT: [[B:%.*]] = add nsw i64 [[A]], -1
580 ; ALL-NEXT: ret i64 [[B]]
582 %A = zext i8 %on_off to i32
584 %C = sext i32 %B to i64
585 ret i64 %C ;; Should be (add (zext i8 -> i64), -1)
588 define i64 @test44(i8 %T) {
589 ; ALL-LABEL: @test44(
590 ; ALL-NEXT: [[A:%.*]] = zext i8 [[T:%.*]] to i64
591 ; ALL-NEXT: [[B:%.*]] = or i64 [[A]], 1234
592 ; ALL-NEXT: ret i64 [[B]]
594 %A = zext i8 %T to i16
596 %C = zext i16 %B to i64
600 define i64 @test45(i8 %A, i64 %Q) {
601 ; ALL-LABEL: @test45(
602 ; ALL-NEXT: [[B:%.*]] = sext i8 [[A:%.*]] to i64
603 ; ALL-NEXT: [[C:%.*]] = or i64 [[B]], [[Q:%.*]]
604 ; ALL-NEXT: [[E:%.*]] = and i64 [[C]], 4294967295
605 ; ALL-NEXT: ret i64 [[E]]
607 %D = trunc i64 %Q to i32 ;; should be removed
608 %B = sext i8 %A to i32
610 %E = zext i32 %C to i64
615 define i64 @test46(i64 %A) {
616 ; ALL-LABEL: @test46(
617 ; ALL-NEXT: [[C:%.*]] = shl i64 [[A:%.*]], 8
618 ; ALL-NEXT: [[D:%.*]] = and i64 [[C]], 10752
619 ; ALL-NEXT: ret i64 [[D]]
621 %B = trunc i64 %A to i32
624 %E = zext i32 %D to i64
628 define <2 x i64> @test46vec(<2 x i64> %A) {
629 ; ALL-LABEL: @test46vec(
630 ; ALL-NEXT: [[B:%.*]] = trunc <2 x i64> [[A:%.*]] to <2 x i32>
631 ; ALL-NEXT: [[C:%.*]] = shl <2 x i32> [[B]], <i32 8, i32 8>
632 ; ALL-NEXT: [[D:%.*]] = and <2 x i32> [[C]], <i32 10752, i32 10752>
633 ; ALL-NEXT: [[E:%.*]] = zext <2 x i32> [[D]] to <2 x i64>
634 ; ALL-NEXT: ret <2 x i64> [[E]]
636 %B = trunc <2 x i64> %A to <2 x i32>
637 %C = and <2 x i32> %B, <i32 42, i32 42>
638 %D = shl <2 x i32> %C, <i32 8, i32 8>
639 %E = zext <2 x i32> %D to <2 x i64>
643 define i64 @test47(i8 %A) {
644 ; ALL-LABEL: @test47(
645 ; ALL-NEXT: [[TMP1:%.*]] = or i8 [[A:%.*]], 42
646 ; ALL-NEXT: [[C:%.*]] = sext i8 [[TMP1]] to i64
647 ; ALL-NEXT: [[E:%.*]] = and i64 [[C]], 4294967295
648 ; ALL-NEXT: ret i64 [[E]]
650 %B = sext i8 %A to i32
652 %E = zext i32 %C to i64
656 define i64 @test48(i8 %A1, i8 %a2) {
657 ; ALL-LABEL: @test48(
658 ; ALL-NEXT: [[Z2:%.*]] = zext i8 [[A1:%.*]] to i32
659 ; ALL-NEXT: [[C:%.*]] = shl nuw nsw i32 [[Z2]], 8
660 ; ALL-NEXT: [[D:%.*]] = or i32 [[C]], [[Z2]]
661 ; ALL-NEXT: [[E:%.*]] = zext i32 [[D]] to i64
662 ; ALL-NEXT: ret i64 [[E]]
664 %Z1 = zext i8 %a2 to i32
665 %Z2 = zext i8 %A1 to i32
668 %E = zext i32 %D to i64
672 define i64 @test49(i64 %A) {
673 ; ALL-LABEL: @test49(
674 ; ALL-NEXT: [[C:%.*]] = shl i64 [[A:%.*]], 32
675 ; ALL-NEXT: [[SEXT:%.*]] = ashr exact i64 [[C]], 32
676 ; ALL-NEXT: [[D:%.*]] = or i64 [[SEXT]], 1
677 ; ALL-NEXT: ret i64 [[D]]
679 %B = trunc i64 %A to i32
681 %D = sext i32 %C to i64
685 define i64 @test50(i64 %x) {
686 ; ALL-LABEL: @test50(
687 ; ALL-NEXT: [[TMP1:%.*]] = shl i64 [[X:%.*]], 30
688 ; ALL-NEXT: [[TMP2:%.*]] = add i64 [[TMP1]], -4294967296
689 ; ALL-NEXT: [[E:%.*]] = ashr i64 [[TMP2]], 32
690 ; ALL-NEXT: ret i64 [[E]]
693 %B = trunc i64 %a to i32
695 %E = sext i32 %D to i64
699 define i64 @test51(i64 %A, i1 %cond) {
700 ; ALL-LABEL: @test51(
701 ; ALL-NEXT: [[C:%.*]] = and i64 [[A:%.*]], 4294967294
702 ; ALL-NEXT: [[NOT_COND:%.*]] = xor i1 [[COND:%.*]], true
703 ; ALL-NEXT: [[MASKSEL:%.*]] = zext i1 [[NOT_COND]] to i64
704 ; ALL-NEXT: [[E:%.*]] = or i64 [[C]], [[MASKSEL]]
705 ; ALL-NEXT: [[SEXT:%.*]] = shl nuw i64 [[E]], 32
706 ; ALL-NEXT: [[F:%.*]] = ashr exact i64 [[SEXT]], 32
707 ; ALL-NEXT: ret i64 [[F]]
709 %B = trunc i64 %A to i32
712 %E = select i1 %cond, i32 %C, i32 %D
713 %F = sext i32 %E to i64
717 define i32 @test52(i64 %A) {
718 ; ALL-LABEL: @test52(
719 ; ALL-NEXT: [[B:%.*]] = trunc i64 [[A:%.*]] to i32
720 ; ALL-NEXT: [[C:%.*]] = and i32 [[B]], 7224
721 ; ALL-NEXT: [[D:%.*]] = or i32 [[C]], 32962
722 ; ALL-NEXT: ret i32 [[D]]
724 %B = trunc i64 %A to i16
725 %C = or i16 %B, -32574
726 %D = and i16 %C, -25350
727 %E = zext i16 %D to i32
731 define i64 @test53(i32 %A) {
732 ; ALL-LABEL: @test53(
733 ; ALL-NEXT: [[TMP1:%.*]] = and i32 [[A:%.*]], 7224
734 ; ALL-NEXT: [[TMP2:%.*]] = or i32 [[TMP1]], 32962
735 ; ALL-NEXT: [[D:%.*]] = zext i32 [[TMP2]] to i64
736 ; ALL-NEXT: ret i64 [[D]]
738 %B = trunc i32 %A to i16
739 %C = or i16 %B, -32574
740 %D = and i16 %C, -25350
741 %E = zext i16 %D to i64
745 define i32 @test54(i64 %A) {
746 ; ALL-LABEL: @test54(
747 ; ALL-NEXT: [[B:%.*]] = trunc i64 [[A:%.*]] to i32
748 ; ALL-NEXT: [[C:%.*]] = and i32 [[B]], 7224
749 ; ALL-NEXT: [[D:%.*]] = or i32 [[C]], -32574
750 ; ALL-NEXT: ret i32 [[D]]
752 %B = trunc i64 %A to i16
753 %C = or i16 %B, -32574
754 %D = and i16 %C, -25350
755 %E = sext i16 %D to i32
759 define i64 @test55(i32 %A) {
760 ; ALL-LABEL: @test55(
761 ; ALL-NEXT: [[TMP1:%.*]] = and i32 [[A:%.*]], 7224
762 ; ALL-NEXT: [[C:%.*]] = zext i32 [[TMP1]] to i64
763 ; ALL-NEXT: [[D:%.*]] = or i64 [[C]], -32574
764 ; ALL-NEXT: ret i64 [[D]]
766 %B = trunc i32 %A to i16
767 %C = or i16 %B, -32574
768 %D = and i16 %C, -25350
769 %E = sext i16 %D to i64
773 define i64 @test56(i16 %A) {
774 ; ALL-LABEL: @test56(
775 ; ALL-NEXT: [[P353:%.*]] = sext i16 [[A:%.*]] to i64
776 ; ALL-NEXT: [[P354:%.*]] = lshr i64 [[P353]], 5
777 ; ALL-NEXT: [[P355:%.*]] = and i64 [[P354]], 134217727
778 ; ALL-NEXT: ret i64 [[P355]]
780 %p353 = sext i16 %A to i32
781 %p354 = lshr i32 %p353, 5
782 %p355 = zext i32 %p354 to i64
786 define <2 x i64> @test56vec(<2 x i16> %A) {
787 ; ALL-LABEL: @test56vec(
788 ; ALL-NEXT: [[P353:%.*]] = sext <2 x i16> [[A:%.*]] to <2 x i32>
789 ; ALL-NEXT: [[P354:%.*]] = lshr <2 x i32> [[P353]], <i32 5, i32 5>
790 ; ALL-NEXT: [[P355:%.*]] = zext <2 x i32> [[P354]] to <2 x i64>
791 ; ALL-NEXT: ret <2 x i64> [[P355]]
793 %p353 = sext <2 x i16> %A to <2 x i32>
794 %p354 = lshr <2 x i32> %p353, <i32 5, i32 5>
795 %p355 = zext <2 x i32> %p354 to <2 x i64>
799 define i64 @test57(i64 %A) {
800 ; ALL-LABEL: @test57(
801 ; ALL-NEXT: [[C:%.*]] = lshr i64 [[A:%.*]], 8
802 ; ALL-NEXT: [[E:%.*]] = and i64 [[C]], 16777215
803 ; ALL-NEXT: ret i64 [[E]]
805 %B = trunc i64 %A to i32
807 %E = zext i32 %C to i64
811 define <2 x i64> @test57vec(<2 x i64> %A) {
812 ; ALL-LABEL: @test57vec(
813 ; ALL-NEXT: [[B:%.*]] = trunc <2 x i64> [[A:%.*]] to <2 x i32>
814 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i32> [[B]], <i32 8, i32 8>
815 ; ALL-NEXT: [[E:%.*]] = zext <2 x i32> [[C]] to <2 x i64>
816 ; ALL-NEXT: ret <2 x i64> [[E]]
818 %B = trunc <2 x i64> %A to <2 x i32>
819 %C = lshr <2 x i32> %B, <i32 8, i32 8>
820 %E = zext <2 x i32> %C to <2 x i64>
824 define i64 @test58(i64 %A) {
825 ; ALL-LABEL: @test58(
826 ; ALL-NEXT: [[C:%.*]] = lshr i64 [[A:%.*]], 8
827 ; ALL-NEXT: [[D:%.*]] = and i64 [[C]], 16777087
828 ; ALL-NEXT: [[E:%.*]] = or i64 [[D]], 128
829 ; ALL-NEXT: ret i64 [[E]]
831 %B = trunc i64 %A to i32
834 %E = zext i32 %D to i64
839 define i64 @test59(i8 %A, i8 %B) {
840 ; ALL-LABEL: @test59(
841 ; ALL-NEXT: [[C:%.*]] = zext i8 [[A:%.*]] to i64
842 ; ALL-NEXT: [[D:%.*]] = shl nuw nsw i64 [[C]], 4
843 ; ALL-NEXT: [[E:%.*]] = and i64 [[D]], 48
844 ; ALL-NEXT: [[TMP1:%.*]] = lshr i8 [[B:%.*]], 4
845 ; ALL-NEXT: [[G:%.*]] = zext i8 [[TMP1]] to i64
846 ; ALL-NEXT: [[H:%.*]] = or i64 [[E]], [[G]]
847 ; ALL-NEXT: ret i64 [[H]]
849 %C = zext i8 %A to i32
852 %F = zext i8 %B to i32
855 %I = zext i32 %H to i64
859 define <3 x i32> @test60(<4 x i32> %call4) {
861 ; BE-NEXT: [[P10:%.*]] = shufflevector <4 x i32> [[CALL4:%.*]], <4 x i32> undef, <3 x i32> <i32 1, i32 2, i32 3>
862 ; BE-NEXT: ret <3 x i32> [[P10]]
865 ; LE-NEXT: [[P10:%.*]] = shufflevector <4 x i32> [[CALL4:%.*]], <4 x i32> undef, <3 x i32> <i32 0, i32 1, i32 2>
866 ; LE-NEXT: ret <3 x i32> [[P10]]
868 %p11 = bitcast <4 x i32> %call4 to i128
869 %p9 = trunc i128 %p11 to i96
870 %p10 = bitcast i96 %p9 to <3 x i32>
875 define <4 x i32> @test61(<3 x i32> %call4) {
877 ; BE-NEXT: [[P10:%.*]] = shufflevector <3 x i32> [[CALL4:%.*]], <3 x i32> <i32 0, i32 poison, i32 poison>, <4 x i32> <i32 3, i32 0, i32 1, i32 2>
878 ; BE-NEXT: ret <4 x i32> [[P10]]
881 ; LE-NEXT: [[P10:%.*]] = shufflevector <3 x i32> [[CALL4:%.*]], <3 x i32> <i32 0, i32 poison, i32 poison>, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
882 ; LE-NEXT: ret <4 x i32> [[P10]]
884 %p11 = bitcast <3 x i32> %call4 to i96
885 %p9 = zext i96 %p11 to i128
886 %p10 = bitcast i128 %p9 to <4 x i32>
890 define <4 x i32> @test62(<3 x float> %call4) {
892 ; BE-NEXT: [[TMP1:%.*]] = bitcast <3 x float> [[CALL4:%.*]] to <3 x i32>
893 ; BE-NEXT: [[P10:%.*]] = shufflevector <3 x i32> [[TMP1]], <3 x i32> <i32 0, i32 poison, i32 poison>, <4 x i32> <i32 3, i32 0, i32 1, i32 2>
894 ; BE-NEXT: ret <4 x i32> [[P10]]
897 ; LE-NEXT: [[TMP1:%.*]] = bitcast <3 x float> [[CALL4:%.*]] to <3 x i32>
898 ; LE-NEXT: [[P10:%.*]] = shufflevector <3 x i32> [[TMP1]], <3 x i32> <i32 0, i32 poison, i32 poison>, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
899 ; LE-NEXT: ret <4 x i32> [[P10]]
901 %p11 = bitcast <3 x float> %call4 to i96
902 %p9 = zext i96 %p11 to i128
903 %p10 = bitcast i128 %p9 to <4 x i32>
907 ; PR7311 - Don't create invalid IR on scalar->vector cast.
908 define <2 x float> @test63(i64 %t8) {
909 ; ALL-LABEL: @test63(
910 ; ALL-NEXT: [[A:%.*]] = bitcast i64 [[T8:%.*]] to <2 x i32>
911 ; ALL-NEXT: [[VCVT_I:%.*]] = uitofp <2 x i32> [[A]] to <2 x float>
912 ; ALL-NEXT: ret <2 x float> [[VCVT_I]]
914 %a = bitcast i64 %t8 to <2 x i32>
915 %vcvt.i = uitofp <2 x i32> %a to <2 x float>
916 ret <2 x float> %vcvt.i
919 define <4 x float> @test64(<4 x float> %c) {
920 ; ALL-LABEL: @test64(
921 ; ALL-NEXT: ret <4 x float> [[C:%.*]]
923 %t0 = bitcast <4 x float> %c to <4 x i32>
924 %t1 = bitcast <4 x i32> %t0 to <4 x float>
928 define <4 x float> @test65(<4 x float> %c) {
929 ; ALL-LABEL: @test65(
930 ; ALL-NEXT: ret <4 x float> [[C:%.*]]
932 %t0 = bitcast <4 x float> %c to <2 x double>
933 %t1 = bitcast <2 x double> %t0 to <4 x float>
937 define <2 x float> @test66(<2 x float> %c) {
938 ; ALL-LABEL: @test66(
939 ; ALL-NEXT: ret <2 x float> [[C:%.*]]
941 %t0 = bitcast <2 x float> %c to double
942 %t1 = bitcast double %t0 to <2 x float>
946 define float @test2c() {
947 ; ALL-LABEL: @test2c(
948 ; ALL-NEXT: ret float -1.000000e+00
950 ret float extractelement (<2 x float> bitcast (double bitcast (<2 x float> <float -1.000000e+00, float -1.000000e+00> to double) to <2 x float>), i32 0)
953 define i64 @test_mmx(<2 x i32> %x) {
954 ; ALL-LABEL: @test_mmx(
955 ; ALL-NEXT: [[C:%.*]] = bitcast <2 x i32> [[X:%.*]] to i64
956 ; ALL-NEXT: ret i64 [[C]]
958 %A = bitcast <2 x i32> %x to x86_mmx
959 %B = bitcast x86_mmx %A to <2 x i32>
960 %C = bitcast <2 x i32> %B to i64
964 define i64 @test_mmx_const(<2 x i32> %c) {
965 ; ALL-LABEL: @test_mmx_const(
966 ; ALL-NEXT: ret i64 0
968 %A = bitcast <2 x i32> zeroinitializer to x86_mmx
969 %B = bitcast x86_mmx %A to <2 x i32>
970 %C = bitcast <2 x i32> %B to i64
975 define i1 @test67(i1 %a, i32 %b) {
976 ; ALL-LABEL: @test67(
977 ; ALL-NEXT: ret i1 false
979 %t2 = zext i1 %a to i32
980 %conv6 = xor i32 %t2, 1
981 %and = and i32 %b, %conv6
982 %sext = shl nuw nsw i32 %and, 24
983 %neg.i = xor i32 %sext, -16777216
984 %conv.i.i = ashr exact i32 %neg.i, 24
985 %trunc = trunc i32 %conv.i.i to i8
986 %tobool.i = icmp eq i8 %trunc, 0
990 %s = type { i32, i32, i16 }
992 define %s @test68(%s *%p, i64 %i) {
993 ; ALL-LABEL: @test68(
994 ; ALL-NEXT: [[PP1:%.*]] = getelementptr [[S:%.*]], %s* [[P:%.*]], i64 [[I:%.*]]
995 ; ALL-NEXT: [[L:%.*]] = load [[S]], %s* [[PP1]], align 4
996 ; ALL-NEXT: ret [[S]] [[L]]
999 %q = bitcast %s* %p to i8*
1000 %pp = getelementptr inbounds i8, i8* %q, i64 %o
1001 %r = bitcast i8* %pp to %s*
1002 %l = load %s, %s* %r
1006 ; addrspacecasts should be eliminated.
1007 define %s @test68_addrspacecast(%s* %p, i64 %i) {
1008 ; ALL-LABEL: @test68_addrspacecast(
1009 ; ALL-NEXT: [[PP1:%.*]] = getelementptr [[S:%.*]], %s* [[P:%.*]], i64 [[I:%.*]]
1010 ; ALL-NEXT: [[L:%.*]] = load [[S]], %s* [[PP1]], align 4
1011 ; ALL-NEXT: ret [[S]] [[L]]
1014 %q = addrspacecast %s* %p to i8 addrspace(2)*
1015 %pp = getelementptr inbounds i8, i8 addrspace(2)* %q, i64 %o
1016 %r = addrspacecast i8 addrspace(2)* %pp to %s*
1017 %l = load %s, %s* %r
1021 define %s @test68_addrspacecast_2(%s* %p, i64 %i) {
1022 ; ALL-LABEL: @test68_addrspacecast_2(
1023 ; ALL-NEXT: [[PP1:%.*]] = getelementptr [[S:%.*]], %s* [[P:%.*]], i64 [[I:%.*]]
1024 ; ALL-NEXT: [[R:%.*]] = addrspacecast %s* [[PP1]] to [[S]] addrspace(1)*
1025 ; ALL-NEXT: [[L:%.*]] = load [[S]], [[S]] addrspace(1)* [[R]], align 4
1026 ; ALL-NEXT: ret [[S]] [[L]]
1029 %q = addrspacecast %s* %p to i8 addrspace(2)*
1030 %pp = getelementptr inbounds i8, i8 addrspace(2)* %q, i64 %o
1031 %r = addrspacecast i8 addrspace(2)* %pp to %s addrspace(1)*
1032 %l = load %s, %s addrspace(1)* %r
1036 define %s @test68_as1(%s addrspace(1)* %p, i32 %i) {
1037 ; ALL-LABEL: @test68_as1(
1038 ; ALL-NEXT: [[PP1:%.*]] = getelementptr [[S:%.*]], [[S]] addrspace(1)* [[P:%.*]], i32 [[I:%.*]]
1039 ; ALL-NEXT: [[L:%.*]] = load [[S]], [[S]] addrspace(1)* [[PP1]], align 4
1040 ; ALL-NEXT: ret [[S]] [[L]]
1043 %q = bitcast %s addrspace(1)* %p to i8 addrspace(1)*
1044 %pp = getelementptr inbounds i8, i8 addrspace(1)* %q, i32 %o
1045 %r = bitcast i8 addrspace(1)* %pp to %s addrspace(1)*
1046 %l = load %s, %s addrspace(1)* %r
1050 define double @test69(double *%p, i64 %i) {
1051 ; ALL-LABEL: @test69(
1052 ; ALL-NEXT: [[PP1:%.*]] = getelementptr inbounds double, double* [[P:%.*]], i64 [[I:%.*]]
1053 ; ALL-NEXT: [[L:%.*]] = load double, double* [[PP1]], align 8
1054 ; ALL-NEXT: ret double [[L]]
1056 %o = shl nsw i64 %i, 3
1057 %q = bitcast double* %p to i8*
1058 %pp = getelementptr inbounds i8, i8* %q, i64 %o
1059 %r = bitcast i8* %pp to double*
1060 %l = load double, double* %r
1064 define %s @test70(%s *%p, i64 %i) {
1065 ; ALL-LABEL: @test70(
1066 ; ALL-NEXT: [[O:%.*]] = mul nsw i64 [[I:%.*]], 3
1067 ; ALL-NEXT: [[PP1:%.*]] = getelementptr inbounds [[S:%.*]], %s* [[P:%.*]], i64 [[O]]
1068 ; ALL-NEXT: [[L:%.*]] = load [[S]], %s* [[PP1]], align 4
1069 ; ALL-NEXT: ret [[S]] [[L]]
1071 %o = mul nsw i64 %i, 36
1072 %q = bitcast %s* %p to i8*
1073 %pp = getelementptr inbounds i8, i8* %q, i64 %o
1074 %r = bitcast i8* %pp to %s*
1075 %l = load %s, %s* %r
1079 define double @test71(double *%p, i64 %i) {
1080 ; ALL-LABEL: @test71(
1081 ; ALL-NEXT: [[O:%.*]] = shl i64 [[I:%.*]], 2
1082 ; ALL-NEXT: [[PP1:%.*]] = getelementptr double, double* [[P:%.*]], i64 [[O]]
1083 ; ALL-NEXT: [[L:%.*]] = load double, double* [[PP1]], align 8
1084 ; ALL-NEXT: ret double [[L]]
1087 %q = bitcast double* %p to i8*
1088 %pp = getelementptr i8, i8* %q, i64 %o
1089 %r = bitcast i8* %pp to double*
1090 %l = load double, double* %r
1094 define double @test72(double *%p, i32 %i) {
1095 ; ALL-LABEL: @test72(
1096 ; ALL-NEXT: [[O:%.*]] = sext i32 [[I:%.*]] to i64
1097 ; ALL-NEXT: [[PP1:%.*]] = getelementptr inbounds double, double* [[P:%.*]], i64 [[O]]
1098 ; ALL-NEXT: [[L:%.*]] = load double, double* [[PP1]], align 8
1099 ; ALL-NEXT: ret double [[L]]
1101 %so = shl nsw i32 %i, 3
1102 %o = sext i32 %so to i64
1103 %q = bitcast double* %p to i8*
1104 %pp = getelementptr inbounds i8, i8* %q, i64 %o
1105 %r = bitcast i8* %pp to double*
1106 %l = load double, double* %r
1110 define double @test73(double *%p, i128 %i) {
1111 ; ALL-LABEL: @test73(
1112 ; ALL-NEXT: [[I_TR:%.*]] = trunc i128 [[I:%.*]] to i64
1113 ; ALL-NEXT: [[PP1:%.*]] = getelementptr double, double* [[P:%.*]], i64 [[I_TR]]
1114 ; ALL-NEXT: [[L:%.*]] = load double, double* [[PP1]], align 8
1115 ; ALL-NEXT: ret double [[L]]
1117 %lo = shl nsw i128 %i, 3
1118 %o = trunc i128 %lo to i64
1119 %q = bitcast double* %p to i8*
1120 %pp = getelementptr inbounds i8, i8* %q, i64 %o
1121 %r = bitcast i8* %pp to double*
1122 %l = load double, double* %r
1126 define double @test74(double *%p, i64 %i) {
1127 ; ALL-LABEL: @test74(
1128 ; ALL-NEXT: [[PP1:%.*]] = getelementptr inbounds double, double* [[P:%.*]], i64 [[I:%.*]]
1129 ; ALL-NEXT: [[L:%.*]] = load double, double* [[PP1]], align 8
1130 ; ALL-NEXT: ret double [[L]]
1132 %q = bitcast double* %p to i64*
1133 %pp = getelementptr inbounds i64, i64* %q, i64 %i
1134 %r = bitcast i64* %pp to double*
1135 %l = load double, double* %r
1139 define i32* @test75(i32* %p, i32 %x) {
1140 ; ALL-LABEL: @test75(
1141 ; ALL-NEXT: [[Y:%.*]] = shl i32 [[X:%.*]], 3
1142 ; ALL-NEXT: [[Z:%.*]] = sext i32 [[Y]] to i64
1143 ; ALL-NEXT: [[Q:%.*]] = bitcast i32* [[P:%.*]] to i8*
1144 ; ALL-NEXT: [[R:%.*]] = getelementptr i8, i8* [[Q]], i64 [[Z]]
1145 ; ALL-NEXT: [[S:%.*]] = bitcast i8* [[R]] to i32*
1146 ; ALL-NEXT: ret i32* [[S]]
1149 %z = sext i32 %y to i64
1150 %q = bitcast i32* %p to i8*
1151 %r = getelementptr i8, i8* %q, i64 %z
1152 %s = bitcast i8* %r to i32*
1156 define %s @test76(%s *%p, i64 %i, i64 %j) {
1157 ; ALL-LABEL: @test76(
1158 ; ALL-NEXT: [[O2:%.*]] = mul i64 [[I:%.*]], [[J:%.*]]
1159 ; ALL-NEXT: [[PP1:%.*]] = getelementptr [[S:%.*]], %s* [[P:%.*]], i64 [[O2]]
1160 ; ALL-NEXT: [[L:%.*]] = load [[S]], %s* [[PP1]], align 4
1161 ; ALL-NEXT: ret [[S]] [[L]]
1164 %o2 = mul nsw i64 %o, %j
1165 %q = bitcast %s* %p to i8*
1166 %pp = getelementptr inbounds i8, i8* %q, i64 %o2
1167 %r = bitcast i8* %pp to %s*
1168 %l = load %s, %s* %r
1172 define %s @test77(%s *%p, i64 %i, i64 %j) {
1173 ; ALL-LABEL: @test77(
1174 ; ALL-NEXT: [[O:%.*]] = mul nsw i64 [[I:%.*]], 3
1175 ; ALL-NEXT: [[O2:%.*]] = mul nsw i64 [[O]], [[J:%.*]]
1176 ; ALL-NEXT: [[PP1:%.*]] = getelementptr inbounds [[S:%.*]], %s* [[P:%.*]], i64 [[O2]]
1177 ; ALL-NEXT: [[L:%.*]] = load [[S]], %s* [[PP1]], align 4
1178 ; ALL-NEXT: ret [[S]] [[L]]
1180 %o = mul nsw i64 %i, 36
1181 %o2 = mul nsw i64 %o, %j
1182 %q = bitcast %s* %p to i8*
1183 %pp = getelementptr inbounds i8, i8* %q, i64 %o2
1184 %r = bitcast i8* %pp to %s*
1185 %l = load %s, %s* %r
1189 define %s @test78(%s *%p, i64 %i, i64 %j, i32 %k, i32 %l, i128 %m, i128 %n) {
1190 ; ALL-LABEL: @test78(
1191 ; ALL-NEXT: [[A:%.*]] = mul nsw i32 [[K:%.*]], 3
1192 ; ALL-NEXT: [[B:%.*]] = mul nsw i32 [[A]], [[L:%.*]]
1193 ; ALL-NEXT: [[C:%.*]] = sext i32 [[B]] to i128
1194 ; ALL-NEXT: [[D:%.*]] = mul nsw i128 [[C]], [[M:%.*]]
1195 ; ALL-NEXT: [[E:%.*]] = mul i128 [[D]], [[N:%.*]]
1196 ; ALL-NEXT: [[F:%.*]] = trunc i128 [[E]] to i64
1197 ; ALL-NEXT: [[G:%.*]] = mul i64 [[F]], [[I:%.*]]
1198 ; ALL-NEXT: [[H:%.*]] = mul i64 [[G]], [[J:%.*]]
1199 ; ALL-NEXT: [[PP1:%.*]] = getelementptr [[S:%.*]], %s* [[P:%.*]], i64 [[H]]
1200 ; ALL-NEXT: [[LOAD:%.*]] = load [[S]], %s* [[PP1]], align 4
1201 ; ALL-NEXT: ret [[S]] [[LOAD]]
1203 %a = mul nsw i32 %k, 36
1204 %b = mul nsw i32 %a, %l
1205 %c = sext i32 %b to i128
1206 %d = mul nsw i128 %c, %m
1207 %e = mul i128 %d, %n
1208 %f = trunc i128 %e to i64
1209 %g = mul nsw i64 %f, %i
1210 %h = mul nsw i64 %g, %j
1211 %q = bitcast %s* %p to i8*
1212 %pp = getelementptr inbounds i8, i8* %q, i64 %h
1213 %r = bitcast i8* %pp to %s*
1214 %load = load %s, %s* %r
1218 define %s @test79(%s *%p, i64 %i, i32 %j) {
1219 ; ALL-LABEL: @test79(
1220 ; ALL-NEXT: [[TMP1:%.*]] = trunc i64 [[I:%.*]] to i32
1221 ; ALL-NEXT: [[B:%.*]] = mul i32 [[TMP1]], 36
1222 ; ALL-NEXT: [[C:%.*]] = mul i32 [[B]], [[J:%.*]]
1223 ; ALL-NEXT: [[Q:%.*]] = bitcast %s* [[P:%.*]] to i8*
1224 ; ALL-NEXT: [[TMP2:%.*]] = sext i32 [[C]] to i64
1225 ; ALL-NEXT: [[PP:%.*]] = getelementptr inbounds i8, i8* [[Q]], i64 [[TMP2]]
1226 ; ALL-NEXT: [[R:%.*]] = bitcast i8* [[PP]] to %s*
1227 ; ALL-NEXT: [[L:%.*]] = load [[S:%.*]], %s* [[R]], align 4
1228 ; ALL-NEXT: ret [[S]] [[L]]
1230 %a = mul nsw i64 %i, 36
1231 %b = trunc i64 %a to i32
1233 %q = bitcast %s* %p to i8*
1234 %pp = getelementptr inbounds i8, i8* %q, i32 %c
1235 %r = bitcast i8* %pp to %s*
1236 %l = load %s, %s* %r
1240 define double @test80([100 x double]* %p, i32 %i) {
1241 ; ALL-LABEL: @test80(
1242 ; ALL-NEXT: [[TMP1:%.*]] = sext i32 [[I:%.*]] to i64
1243 ; ALL-NEXT: [[PP1:%.*]] = getelementptr [100 x double], [100 x double]* [[P:%.*]], i64 0, i64 [[TMP1]]
1244 ; ALL-NEXT: [[L:%.*]] = load double, double* [[PP1]], align 8
1245 ; ALL-NEXT: ret double [[L]]
1247 %t = shl nsw i32 %i, 3
1248 %q = bitcast [100 x double]* %p to i8*
1249 %pp = getelementptr i8, i8* %q, i32 %t
1250 %r = bitcast i8* %pp to double*
1251 %l = load double, double* %r
1255 define double @test80_addrspacecast([100 x double] addrspace(1)* %p, i32 %i) {
1256 ; ALL-LABEL: @test80_addrspacecast(
1257 ; ALL-NEXT: [[PP1:%.*]] = getelementptr [100 x double], [100 x double] addrspace(1)* [[P:%.*]], i32 0, i32 [[I:%.*]]
1258 ; ALL-NEXT: [[L:%.*]] = load double, double addrspace(1)* [[PP1]], align 8
1259 ; ALL-NEXT: ret double [[L]]
1261 %t = shl nsw i32 %i, 3
1262 %q = addrspacecast [100 x double] addrspace(1)* %p to i8 addrspace(2)*
1263 %pp = getelementptr i8, i8 addrspace(2)* %q, i32 %t
1264 %r = addrspacecast i8 addrspace(2)* %pp to double addrspace(1)*
1265 %l = load double, double addrspace(1)* %r
1269 define double @test80_addrspacecast_2([100 x double] addrspace(1)* %p, i32 %i) {
1270 ; ALL-LABEL: @test80_addrspacecast_2(
1271 ; ALL-NEXT: [[PP1:%.*]] = getelementptr [100 x double], [100 x double] addrspace(1)* [[P:%.*]], i32 0, i32 [[I:%.*]]
1272 ; ALL-NEXT: [[R:%.*]] = addrspacecast double addrspace(1)* [[PP1]] to double addrspace(3)*
1273 ; ALL-NEXT: [[L:%.*]] = load double, double addrspace(3)* [[R]], align 8
1274 ; ALL-NEXT: ret double [[L]]
1276 %t = shl nsw i32 %i, 3
1277 %q = addrspacecast [100 x double] addrspace(1)* %p to i8 addrspace(2)*
1278 %pp = getelementptr i8, i8 addrspace(2)* %q, i32 %t
1279 %r = addrspacecast i8 addrspace(2)* %pp to double addrspace(3)*
1280 %l = load double, double addrspace(3)* %r
1284 define double @test80_as1([100 x double] addrspace(1)* %p, i16 %i) {
1285 ; ALL-LABEL: @test80_as1(
1286 ; ALL-NEXT: [[TMP1:%.*]] = sext i16 [[I:%.*]] to i32
1287 ; ALL-NEXT: [[PP1:%.*]] = getelementptr [100 x double], [100 x double] addrspace(1)* [[P:%.*]], i32 0, i32 [[TMP1]]
1288 ; ALL-NEXT: [[L:%.*]] = load double, double addrspace(1)* [[PP1]], align 8
1289 ; ALL-NEXT: ret double [[L]]
1291 %t = shl nsw i16 %i, 3
1292 %q = bitcast [100 x double] addrspace(1)* %p to i8 addrspace(1)*
1293 %pp = getelementptr i8, i8 addrspace(1)* %q, i16 %t
1294 %r = bitcast i8 addrspace(1)* %pp to double addrspace(1)*
1295 %l = load double, double addrspace(1)* %r
1299 define double @test81(double *%p, float %f) {
1300 ; ALL-LABEL: @test81(
1301 ; ALL-NEXT: [[I:%.*]] = fptosi float [[F:%.*]] to i64
1302 ; ALL-NEXT: [[Q:%.*]] = bitcast double* [[P:%.*]] to i8*
1303 ; ALL-NEXT: [[PP:%.*]] = getelementptr i8, i8* [[Q]], i64 [[I]]
1304 ; ALL-NEXT: [[R:%.*]] = bitcast i8* [[PP]] to double*
1305 ; ALL-NEXT: [[L:%.*]] = load double, double* [[R]], align 8
1306 ; ALL-NEXT: ret double [[L]]
1308 %i = fptosi float %f to i64
1309 %q = bitcast double* %p to i8*
1310 %pp = getelementptr i8, i8* %q, i64 %i
1311 %r = bitcast i8* %pp to double*
1312 %l = load double, double* %r
1316 define i64 @test82(i64 %A) {
1317 ; ALL-LABEL: @test82(
1318 ; ALL-NEXT: [[TMP1:%.*]] = shl i64 [[A:%.*]], 1
1319 ; ALL-NEXT: [[D:%.*]] = and i64 [[TMP1]], 4294966784
1320 ; ALL-NEXT: ret i64 [[D]]
1322 %B = trunc i64 %A to i32
1325 %E = zext i32 %D to i64
1330 define i64 @test83(i16 %a, i64 %k) {
1331 ; ALL-LABEL: @test83(
1332 ; ALL-NEXT: [[CONV:%.*]] = sext i16 [[A:%.*]] to i32
1333 ; ALL-NEXT: [[TMP1:%.*]] = trunc i64 [[K:%.*]] to i32
1334 ; ALL-NEXT: [[SH_PROM:%.*]] = add i32 [[TMP1]], -1
1335 ; ALL-NEXT: [[SHL:%.*]] = shl i32 [[CONV]], [[SH_PROM]]
1336 ; ALL-NEXT: [[SH_PROM1:%.*]] = zext i32 [[SHL]] to i64
1337 ; ALL-NEXT: ret i64 [[SH_PROM1]]
1339 %conv = sext i16 %a to i32
1340 %sub = add nsw i64 %k, -1
1341 %sh_prom = trunc i64 %sub to i32
1342 %shl = shl i32 %conv, %sh_prom
1343 %sh_prom1 = zext i32 %shl to i64
1347 define i8 @test84(i32 %a) {
1348 ; ALL-LABEL: @test84(
1349 ; ALL-NEXT: [[ADD:%.*]] = add i32 [[A:%.*]], 2130706432
1350 ; ALL-NEXT: [[SHR:%.*]] = lshr exact i32 [[ADD]], 23
1351 ; ALL-NEXT: [[TRUNC:%.*]] = trunc i32 [[SHR]] to i8
1352 ; ALL-NEXT: ret i8 [[TRUNC]]
1354 %add = add nsw i32 %a, -16777216
1355 %shr = lshr exact i32 %add, 23
1356 %trunc = trunc i32 %shr to i8
1360 define i8 @test85(i32 %a) {
1361 ; ALL-LABEL: @test85(
1362 ; ALL-NEXT: [[ADD:%.*]] = add i32 [[A:%.*]], 2130706432
1363 ; ALL-NEXT: [[SHR:%.*]] = lshr exact i32 [[ADD]], 23
1364 ; ALL-NEXT: [[TRUNC:%.*]] = trunc i32 [[SHR]] to i8
1365 ; ALL-NEXT: ret i8 [[TRUNC]]
1367 %add = add nuw i32 %a, -16777216
1368 %shr = lshr exact i32 %add, 23
1369 %trunc = trunc i32 %shr to i8
1373 define i16 @test86(i16 %v) {
1374 ; ALL-LABEL: @test86(
1375 ; ALL-NEXT: [[TMP1:%.*]] = ashr i16 [[V:%.*]], 4
1376 ; ALL-NEXT: ret i16 [[TMP1]]
1378 %a = sext i16 %v to i32
1380 %t = trunc i32 %s to i16
1384 define i16 @test87(i16 %v) {
1385 ; ALL-LABEL: @test87(
1386 ; ALL-NEXT: [[TMP1:%.*]] = ashr i16 [[V:%.*]], 12
1387 ; ALL-NEXT: ret i16 [[TMP1]]
1389 %c = sext i16 %v to i32
1390 %m = mul nsw i32 %c, 16
1391 %a = ashr i32 %m, 16
1392 %t = trunc i32 %a to i16
1396 define i16 @test88(i16 %v) {
1397 ; ALL-LABEL: @test88(
1398 ; ALL-NEXT: [[TMP1:%.*]] = ashr i16 [[V:%.*]], 15
1399 ; ALL-NEXT: ret i16 [[TMP1]]
1401 %a = sext i16 %v to i32
1402 %s = ashr i32 %a, 18
1403 %t = trunc i32 %s to i16
1407 define i32 @PR21388(i32* %v) {
1408 ; ALL-LABEL: @PR21388(
1409 ; ALL-NEXT: [[ICMP:%.*]] = icmp slt i32* [[V:%.*]], null
1410 ; ALL-NEXT: [[SEXT:%.*]] = sext i1 [[ICMP]] to i32
1411 ; ALL-NEXT: ret i32 [[SEXT]]
1413 %icmp = icmp slt i32* %v, null
1414 %sext = sext i1 %icmp to i32
1418 define float @sitofp_zext(i16 %a) {
1419 ; ALL-LABEL: @sitofp_zext(
1420 ; ALL-NEXT: [[SITOFP:%.*]] = uitofp i16 [[A:%.*]] to float
1421 ; ALL-NEXT: ret float [[SITOFP]]
1423 %zext = zext i16 %a to i32
1424 %sitofp = sitofp i32 %zext to float
1428 define i1 @PR23309(i32 %A, i32 %B) {
1429 ; ALL-LABEL: @PR23309(
1430 ; ALL-NEXT: [[SUB:%.*]] = sub i32 [[A:%.*]], [[B:%.*]]
1431 ; ALL-NEXT: [[TMP1:%.*]] = and i32 [[SUB]], 1
1432 ; ALL-NEXT: [[TRUNC:%.*]] = icmp ne i32 [[TMP1]], 0
1433 ; ALL-NEXT: ret i1 [[TRUNC]]
1435 %add = add i32 %A, -4
1436 %sub = sub nsw i32 %add, %B
1437 %trunc = trunc i32 %sub to i1
1441 define i1 @PR23309v2(i32 %A, i32 %B) {
1442 ; ALL-LABEL: @PR23309v2(
1443 ; ALL-NEXT: [[SUB:%.*]] = add i32 [[A:%.*]], [[B:%.*]]
1444 ; ALL-NEXT: [[TMP1:%.*]] = and i32 [[SUB]], 1
1445 ; ALL-NEXT: [[TRUNC:%.*]] = icmp ne i32 [[TMP1]], 0
1446 ; ALL-NEXT: ret i1 [[TRUNC]]
1448 %add = add i32 %A, -4
1449 %sub = add nuw i32 %add, %B
1450 %trunc = trunc i32 %sub to i1
1454 define i16 @PR24763(i8 %V) {
1455 ; ALL-LABEL: @PR24763(
1456 ; ALL-NEXT: [[TMP1:%.*]] = ashr i8 [[V:%.*]], 1
1457 ; ALL-NEXT: [[T:%.*]] = sext i8 [[TMP1]] to i16
1458 ; ALL-NEXT: ret i16 [[T]]
1460 %conv = sext i8 %V to i32
1461 %l = lshr i32 %conv, 1
1462 %t = trunc i32 %l to i16
1466 define i64 @PR28745() {
1467 ; BE-LABEL: @PR28745(
1468 ; BE-NEXT: ret i64 1
1470 ; LE-LABEL: @PR28745(
1471 ; LE-NEXT: ret i64 0
1473 %b = zext i32 extractvalue ({ i32 } select (i1 icmp eq (i16 extractelement (<2 x i16> bitcast (<1 x i32> <i32 1> to <2 x i16>), i32 0), i16 0), { i32 } { i32 1 }, { i32 } zeroinitializer), 0) to i64
1477 define i32 @test89() {
1478 ; BE-LABEL: @test89(
1479 ; BE-NEXT: ret i32 393216
1481 ; LE-LABEL: @test89(
1482 ; LE-NEXT: ret i32 6
1484 ret i32 bitcast (<2 x i16> <i16 6, i16 undef> to i32)
1487 define <2 x i32> @test90() {
1488 ; BE-LABEL: @test90(
1489 ; BE-NEXT: ret <2 x i32> <i32 0, i32 15360>
1491 ; LE-LABEL: @test90(
1492 ; LE-NEXT: ret <2 x i32> <i32 0, i32 1006632960>
1494 %t6 = bitcast <4 x half> <half undef, half undef, half undef, half 0xH3C00> to <2 x i32>
1498 ; Do not optimize to ashr i64 (shift by 48 > 96 - 64)
1499 define i64 @test91(i64 %A) {
1500 ; ALL-LABEL: @test91(
1501 ; ALL-NEXT: [[B:%.*]] = sext i64 [[A:%.*]] to i96
1502 ; ALL-NEXT: [[C:%.*]] = lshr i96 [[B]], 48
1503 ; ALL-NEXT: [[D:%.*]] = trunc i96 [[C]] to i64
1504 ; ALL-NEXT: ret i64 [[D]]
1506 %B = sext i64 %A to i96
1507 %C = lshr i96 %B, 48
1508 %D = trunc i96 %C to i64
1512 ; Do optimize to ashr i64 (shift by 32 <= 96 - 64)
1513 define i64 @test92(i64 %A) {
1514 ; ALL-LABEL: @test92(
1515 ; ALL-NEXT: [[TMP1:%.*]] = ashr i64 [[A:%.*]], 32
1516 ; ALL-NEXT: ret i64 [[TMP1]]
1518 %B = sext i64 %A to i96
1519 %C = lshr i96 %B, 32
1520 %D = trunc i96 %C to i64
1524 ; When optimizing to ashr i32, don't shift by more than 31.
1525 define i32 @test93(i32 %A) {
1526 ; ALL-LABEL: @test93(
1527 ; ALL-NEXT: [[TMP1:%.*]] = ashr i32 [[A:%.*]], 31
1528 ; ALL-NEXT: ret i32 [[TMP1]]
1530 %B = sext i32 %A to i96
1531 %C = lshr i96 %B, 64
1532 %D = trunc i96 %C to i32
1536 define i8 @trunc_lshr_sext(i8 %A) {
1537 ; ALL-LABEL: @trunc_lshr_sext(
1538 ; ALL-NEXT: [[D:%.*]] = ashr i8 [[A:%.*]], 6
1539 ; ALL-NEXT: ret i8 [[D]]
1541 %B = sext i8 %A to i32
1543 %D = trunc i32 %C to i8
1547 define i8 @trunc_lshr_sext_exact(i8 %A) {
1548 ; ALL-LABEL: @trunc_lshr_sext_exact(
1549 ; ALL-NEXT: [[D:%.*]] = ashr exact i8 [[A:%.*]], 6
1550 ; ALL-NEXT: ret i8 [[D]]
1552 %B = sext i8 %A to i32
1553 %C = lshr exact i32 %B, 6
1554 %D = trunc i32 %C to i8
1558 define <2 x i8> @trunc_lshr_sext_uniform(<2 x i8> %A) {
1559 ; ALL-LABEL: @trunc_lshr_sext_uniform(
1560 ; ALL-NEXT: [[D:%.*]] = ashr <2 x i8> [[A:%.*]], <i8 6, i8 6>
1561 ; ALL-NEXT: ret <2 x i8> [[D]]
1563 %B = sext <2 x i8> %A to <2 x i32>
1564 %C = lshr <2 x i32> %B, <i32 6, i32 6>
1565 %D = trunc <2 x i32> %C to <2 x i8>
1569 define <2 x i8> @trunc_lshr_sext_uniform_undef(<2 x i8> %A) {
1570 ; ALL-LABEL: @trunc_lshr_sext_uniform_undef(
1571 ; ALL-NEXT: [[D:%.*]] = ashr <2 x i8> [[A:%.*]], <i8 6, i8 undef>
1572 ; ALL-NEXT: ret <2 x i8> [[D]]
1574 %B = sext <2 x i8> %A to <2 x i32>
1575 %C = lshr <2 x i32> %B, <i32 6, i32 undef>
1576 %D = trunc <2 x i32> %C to <2 x i8>
1580 define <2 x i8> @trunc_lshr_sext_nonuniform(<2 x i8> %A) {
1581 ; ALL-LABEL: @trunc_lshr_sext_nonuniform(
1582 ; ALL-NEXT: [[D:%.*]] = ashr <2 x i8> [[A:%.*]], <i8 6, i8 2>
1583 ; ALL-NEXT: ret <2 x i8> [[D]]
1585 %B = sext <2 x i8> %A to <2 x i32>
1586 %C = lshr <2 x i32> %B, <i32 6, i32 2>
1587 %D = trunc <2 x i32> %C to <2 x i8>
1591 define <3 x i8> @trunc_lshr_sext_nonuniform_undef(<3 x i8> %A) {
1592 ; ALL-LABEL: @trunc_lshr_sext_nonuniform_undef(
1593 ; ALL-NEXT: [[D:%.*]] = ashr <3 x i8> [[A:%.*]], <i8 6, i8 2, i8 undef>
1594 ; ALL-NEXT: ret <3 x i8> [[D]]
1596 %B = sext <3 x i8> %A to <3 x i32>
1597 %C = lshr <3 x i32> %B, <i32 6, i32 2, i32 undef>
1598 %D = trunc <3 x i32> %C to <3 x i8>
1602 define <2 x i8> @trunc_lshr_sext_uses1(<2 x i8> %A) {
1603 ; ALL-LABEL: @trunc_lshr_sext_uses1(
1604 ; ALL-NEXT: [[B:%.*]] = sext <2 x i8> [[A:%.*]] to <2 x i32>
1605 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[B]])
1606 ; ALL-NEXT: [[D:%.*]] = ashr <2 x i8> [[A]], <i8 6, i8 6>
1607 ; ALL-NEXT: ret <2 x i8> [[D]]
1609 %B = sext <2 x i8> %A to <2 x i32>
1610 call void @use_v2i32(<2 x i32> %B)
1611 %C = lshr <2 x i32> %B, <i32 6, i32 6>
1612 %D = trunc <2 x i32> %C to <2 x i8>
1616 define i8 @trunc_lshr_sext_uses2(i8 %A) {
1617 ; ALL-LABEL: @trunc_lshr_sext_uses2(
1618 ; ALL-NEXT: [[B:%.*]] = sext i8 [[A:%.*]] to i32
1619 ; ALL-NEXT: [[C:%.*]] = lshr i32 [[B]], 6
1620 ; ALL-NEXT: call void @use_i32(i32 [[C]])
1621 ; ALL-NEXT: [[D:%.*]] = ashr i8 [[A]], 6
1622 ; ALL-NEXT: ret i8 [[D]]
1624 %B = sext i8 %A to i32
1626 call void @use_i32(i32 %C)
1627 %D = trunc i32 %C to i8
1631 define <2 x i8> @trunc_lshr_sext_uses3(<2 x i8> %A) {
1632 ; ALL-LABEL: @trunc_lshr_sext_uses3(
1633 ; ALL-NEXT: [[B:%.*]] = sext <2 x i8> [[A:%.*]] to <2 x i32>
1634 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[B]])
1635 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i32> [[B]], <i32 6, i32 6>
1636 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[C]])
1637 ; ALL-NEXT: [[D:%.*]] = ashr <2 x i8> [[A]], <i8 6, i8 6>
1638 ; ALL-NEXT: ret <2 x i8> [[D]]
1640 %B = sext <2 x i8 >%A to <2 x i32>
1641 call void @use_v2i32(<2 x i32> %B)
1642 %C = lshr <2 x i32> %B, <i32 6, i32 6>
1643 call void @use_v2i32(<2 x i32> %C)
1644 %D = trunc <2 x i32 >%C to <2 x i8>
1648 define <2 x i8> @trunc_lshr_overshift_sext(<2 x i8> %A) {
1649 ; ALL-LABEL: @trunc_lshr_overshift_sext(
1650 ; ALL-NEXT: [[D:%.*]] = ashr <2 x i8> [[A:%.*]], <i8 7, i8 7>
1651 ; ALL-NEXT: ret <2 x i8> [[D]]
1653 %B = sext <2 x i8> %A to <2 x i32>
1654 %C = lshr <2 x i32> %B, <i32 8, i32 8>
1655 %D = trunc <2 x i32> %C to <2 x i8>
1659 define i8 @trunc_lshr_overshift_sext_uses1(i8 %A) {
1660 ; ALL-LABEL: @trunc_lshr_overshift_sext_uses1(
1661 ; ALL-NEXT: [[B:%.*]] = sext i8 [[A:%.*]] to i32
1662 ; ALL-NEXT: call void @use_i32(i32 [[B]])
1663 ; ALL-NEXT: [[D:%.*]] = ashr i8 [[A]], 7
1664 ; ALL-NEXT: ret i8 [[D]]
1666 %B = sext i8 %A to i32
1667 call void @use_i32(i32 %B)
1669 %D = trunc i32 %C to i8
1673 define <2 x i8> @trunc_lshr_overshift_sext_uses2(<2 x i8> %A) {
1674 ; ALL-LABEL: @trunc_lshr_overshift_sext_uses2(
1675 ; ALL-NEXT: [[B:%.*]] = sext <2 x i8> [[A:%.*]] to <2 x i32>
1676 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i32> [[B]], <i32 8, i32 8>
1677 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[C]])
1678 ; ALL-NEXT: [[D:%.*]] = ashr <2 x i8> [[A]], <i8 7, i8 7>
1679 ; ALL-NEXT: ret <2 x i8> [[D]]
1681 %B = sext <2 x i8> %A to <2 x i32>
1682 %C = lshr <2 x i32> %B, <i32 8, i32 8>
1683 call void @use_v2i32(<2 x i32> %C)
1684 %D = trunc <2 x i32> %C to <2 x i8>
1688 define i8 @trunc_lshr_overshift_sext_uses3(i8 %A) {
1689 ; ALL-LABEL: @trunc_lshr_overshift_sext_uses3(
1690 ; ALL-NEXT: [[B:%.*]] = sext i8 [[A:%.*]] to i32
1691 ; ALL-NEXT: call void @use_i32(i32 [[B]])
1692 ; ALL-NEXT: [[C:%.*]] = lshr i32 [[B]], 8
1693 ; ALL-NEXT: call void @use_i32(i32 [[C]])
1694 ; ALL-NEXT: [[D:%.*]] = ashr i8 [[A]], 7
1695 ; ALL-NEXT: ret i8 [[D]]
1697 %B = sext i8 %A to i32
1698 call void @use_i32(i32 %B)
1700 call void @use_i32(i32 %C)
1701 %D = trunc i32 %C to i8
1705 define i8 @trunc_lshr_sext_wide_input(i16 %A) {
1706 ; ALL-LABEL: @trunc_lshr_sext_wide_input(
1707 ; ALL-NEXT: [[TMP1:%.*]] = ashr i16 [[A:%.*]], 9
1708 ; ALL-NEXT: [[D:%.*]] = trunc i16 [[TMP1]] to i8
1709 ; ALL-NEXT: ret i8 [[D]]
1711 %B = sext i16 %A to i32
1713 %D = trunc i32 %C to i8
1717 define i8 @trunc_lshr_sext_wide_input_exact(i16 %A) {
1718 ; ALL-LABEL: @trunc_lshr_sext_wide_input_exact(
1719 ; ALL-NEXT: [[TMP1:%.*]] = ashr exact i16 [[A:%.*]], 9
1720 ; ALL-NEXT: [[D:%.*]] = trunc i16 [[TMP1]] to i8
1721 ; ALL-NEXT: ret i8 [[D]]
1723 %B = sext i16 %A to i32
1724 %C = lshr exact i32 %B, 9
1725 %D = trunc i32 %C to i8
1729 define <2 x i8> @trunc_lshr_sext_wide_input_uses1(<2 x i16> %A) {
1730 ; ALL-LABEL: @trunc_lshr_sext_wide_input_uses1(
1731 ; ALL-NEXT: [[B:%.*]] = sext <2 x i16> [[A:%.*]] to <2 x i32>
1732 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[B]])
1733 ; ALL-NEXT: [[TMP1:%.*]] = ashr <2 x i16> [[A]], <i16 9, i16 9>
1734 ; ALL-NEXT: [[D:%.*]] = trunc <2 x i16> [[TMP1]] to <2 x i8>
1735 ; ALL-NEXT: ret <2 x i8> [[D]]
1737 %B = sext <2 x i16> %A to <2 x i32>
1738 call void @use_v2i32(<2 x i32> %B)
1739 %C = lshr <2 x i32> %B, <i32 9, i32 9>
1740 %D = trunc <2 x i32> %C to <2 x i8>
1744 define i8 @trunc_lshr_sext_wide_input_uses2(i16 %A) {
1745 ; ALL-LABEL: @trunc_lshr_sext_wide_input_uses2(
1746 ; ALL-NEXT: [[B:%.*]] = sext i16 [[A:%.*]] to i32
1747 ; ALL-NEXT: [[C:%.*]] = lshr i32 [[B]], 9
1748 ; ALL-NEXT: call void @use_i32(i32 [[C]])
1749 ; ALL-NEXT: [[D:%.*]] = trunc i32 [[C]] to i8
1750 ; ALL-NEXT: ret i8 [[D]]
1752 %B = sext i16 %A to i32
1754 call void @use_i32(i32 %C)
1755 %D = trunc i32 %C to i8
1759 define <2 x i8> @trunc_lshr_sext_wide_input_uses3(<2 x i16> %A) {
1760 ; ALL-LABEL: @trunc_lshr_sext_wide_input_uses3(
1761 ; ALL-NEXT: [[B:%.*]] = sext <2 x i16> [[A:%.*]] to <2 x i32>
1762 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[B]])
1763 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i32> [[B]], <i32 9, i32 9>
1764 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[C]])
1765 ; ALL-NEXT: [[D:%.*]] = trunc <2 x i32> [[C]] to <2 x i8>
1766 ; ALL-NEXT: ret <2 x i8> [[D]]
1768 %B = sext <2 x i16 >%A to <2 x i32>
1769 call void @use_v2i32(<2 x i32> %B)
1770 %C = lshr <2 x i32> %B, <i32 9, i32 9>
1771 call void @use_v2i32(<2 x i32> %C)
1772 %D = trunc <2 x i32 >%C to <2 x i8>
1776 define <2 x i8> @trunc_lshr_overshift_wide_input_sext(<2 x i16> %A) {
1777 ; ALL-LABEL: @trunc_lshr_overshift_wide_input_sext(
1778 ; ALL-NEXT: [[TMP1:%.*]] = ashr <2 x i16> [[A:%.*]], <i16 15, i16 15>
1779 ; ALL-NEXT: [[D:%.*]] = trunc <2 x i16> [[TMP1]] to <2 x i8>
1780 ; ALL-NEXT: ret <2 x i8> [[D]]
1782 %B = sext <2 x i16> %A to <2 x i32>
1783 %C = lshr <2 x i32> %B, <i32 16, i32 16>
1784 %D = trunc <2 x i32> %C to <2 x i8>
1788 define i8 @trunc_lshr_overshift_sext_wide_input_uses1(i16 %A) {
1789 ; ALL-LABEL: @trunc_lshr_overshift_sext_wide_input_uses1(
1790 ; ALL-NEXT: [[B:%.*]] = sext i16 [[A:%.*]] to i32
1791 ; ALL-NEXT: call void @use_i32(i32 [[B]])
1792 ; ALL-NEXT: [[TMP1:%.*]] = ashr i16 [[A]], 15
1793 ; ALL-NEXT: [[D:%.*]] = trunc i16 [[TMP1]] to i8
1794 ; ALL-NEXT: ret i8 [[D]]
1796 %B = sext i16 %A to i32
1797 call void @use_i32(i32 %B)
1798 %C = lshr i32 %B, 16
1799 %D = trunc i32 %C to i8
1803 define <2 x i8> @trunc_lshr_overshift_sext_wide_input_uses2(<2 x i16> %A) {
1804 ; ALL-LABEL: @trunc_lshr_overshift_sext_wide_input_uses2(
1805 ; ALL-NEXT: [[TMP1:%.*]] = ashr <2 x i16> [[A:%.*]], <i16 15, i16 15>
1806 ; ALL-NEXT: [[C:%.*]] = zext <2 x i16> [[TMP1]] to <2 x i32>
1807 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[C]])
1808 ; ALL-NEXT: [[D:%.*]] = trunc <2 x i16> [[TMP1]] to <2 x i8>
1809 ; ALL-NEXT: ret <2 x i8> [[D]]
1811 %B = sext <2 x i16> %A to <2 x i32>
1812 %C = lshr <2 x i32> %B, <i32 16, i32 16>
1813 call void @use_v2i32(<2 x i32> %C)
1814 %D = trunc <2 x i32> %C to <2 x i8>
1818 define i8 @trunc_lshr_overshift_sext_wide_input_uses3(i16 %A) {
1819 ; ALL-LABEL: @trunc_lshr_overshift_sext_wide_input_uses3(
1820 ; ALL-NEXT: [[B:%.*]] = sext i16 [[A:%.*]] to i32
1821 ; ALL-NEXT: call void @use_i32(i32 [[B]])
1822 ; ALL-NEXT: [[C:%.*]] = lshr i32 [[B]], 16
1823 ; ALL-NEXT: call void @use_i32(i32 [[C]])
1824 ; ALL-NEXT: [[D:%.*]] = trunc i32 [[C]] to i8
1825 ; ALL-NEXT: ret i8 [[D]]
1827 %B = sext i16 %A to i32
1828 call void @use_i32(i32 %B)
1829 %C = lshr i32 %B, 16
1830 call void @use_i32(i32 %C)
1831 %D = trunc i32 %C to i8
1835 define i16 @trunc_lshr_sext_narrow_input(i8 %A) {
1836 ; ALL-LABEL: @trunc_lshr_sext_narrow_input(
1837 ; ALL-NEXT: [[TMP1:%.*]] = ashr i8 [[A:%.*]], 6
1838 ; ALL-NEXT: [[D:%.*]] = sext i8 [[TMP1]] to i16
1839 ; ALL-NEXT: ret i16 [[D]]
1841 %B = sext i8 %A to i32
1843 %D = trunc i32 %C to i16
1847 define <2 x i16> @trunc_lshr_sext_narrow_input_uses1(<2 x i8> %A) {
1848 ; ALL-LABEL: @trunc_lshr_sext_narrow_input_uses1(
1849 ; ALL-NEXT: [[B:%.*]] = sext <2 x i8> [[A:%.*]] to <2 x i32>
1850 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[B]])
1851 ; ALL-NEXT: [[TMP1:%.*]] = ashr <2 x i8> [[A]], <i8 6, i8 6>
1852 ; ALL-NEXT: [[D:%.*]] = sext <2 x i8> [[TMP1]] to <2 x i16>
1853 ; ALL-NEXT: ret <2 x i16> [[D]]
1855 %B = sext <2 x i8> %A to <2 x i32>
1856 call void @use_v2i32(<2 x i32> %B)
1857 %C = lshr <2 x i32> %B, <i32 6, i32 6>
1858 %D = trunc <2 x i32> %C to <2 x i16>
1862 define i16 @trunc_lshr_sext_narrow_input_uses2(i8 %A) {
1863 ; ALL-LABEL: @trunc_lshr_sext_narrow_input_uses2(
1864 ; ALL-NEXT: [[B:%.*]] = sext i8 [[A:%.*]] to i32
1865 ; ALL-NEXT: [[C:%.*]] = lshr i32 [[B]], 6
1866 ; ALL-NEXT: call void @use_i32(i32 [[C]])
1867 ; ALL-NEXT: [[D:%.*]] = trunc i32 [[C]] to i16
1868 ; ALL-NEXT: ret i16 [[D]]
1870 %B = sext i8 %A to i32
1872 call void @use_i32(i32 %C)
1873 %D = trunc i32 %C to i16
1877 define <2 x i16> @trunc_lshr_sext_narrow_input_uses3(<2 x i8> %A) {
1878 ; ALL-LABEL: @trunc_lshr_sext_narrow_input_uses3(
1879 ; ALL-NEXT: [[B:%.*]] = sext <2 x i8> [[A:%.*]] to <2 x i32>
1880 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[B]])
1881 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i32> [[B]], <i32 6, i32 6>
1882 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[C]])
1883 ; ALL-NEXT: [[D:%.*]] = trunc <2 x i32> [[C]] to <2 x i16>
1884 ; ALL-NEXT: ret <2 x i16> [[D]]
1886 %B = sext <2 x i8 >%A to <2 x i32>
1887 call void @use_v2i32(<2 x i32> %B)
1888 %C = lshr <2 x i32> %B, <i32 6, i32 6>
1889 call void @use_v2i32(<2 x i32> %C)
1890 %D = trunc <2 x i32 >%C to <2 x i16>
1894 define <2 x i16> @trunc_lshr_overshift_narrow_input_sext(<2 x i8> %A) {
1895 ; ALL-LABEL: @trunc_lshr_overshift_narrow_input_sext(
1896 ; ALL-NEXT: [[TMP1:%.*]] = ashr <2 x i8> [[A:%.*]], <i8 7, i8 7>
1897 ; ALL-NEXT: [[D:%.*]] = sext <2 x i8> [[TMP1]] to <2 x i16>
1898 ; ALL-NEXT: ret <2 x i16> [[D]]
1900 %B = sext <2 x i8> %A to <2 x i32>
1901 %C = lshr <2 x i32> %B, <i32 8, i32 8>
1902 %D = trunc <2 x i32> %C to <2 x i16>
1906 define i16 @trunc_lshr_overshift_sext_narrow_input_uses1(i8 %A) {
1907 ; ALL-LABEL: @trunc_lshr_overshift_sext_narrow_input_uses1(
1908 ; ALL-NEXT: [[B:%.*]] = sext i8 [[A:%.*]] to i32
1909 ; ALL-NEXT: call void @use_i32(i32 [[B]])
1910 ; ALL-NEXT: [[TMP1:%.*]] = ashr i8 [[A]], 7
1911 ; ALL-NEXT: [[D:%.*]] = sext i8 [[TMP1]] to i16
1912 ; ALL-NEXT: ret i16 [[D]]
1914 %B = sext i8 %A to i32
1915 call void @use_i32(i32 %B)
1917 %D = trunc i32 %C to i16
1921 define <2 x i16> @trunc_lshr_overshift_sext_narrow_input_uses2(<2 x i8> %A) {
1922 ; ALL-LABEL: @trunc_lshr_overshift_sext_narrow_input_uses2(
1923 ; ALL-NEXT: [[B:%.*]] = sext <2 x i8> [[A:%.*]] to <2 x i32>
1924 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i32> [[B]], <i32 8, i32 8>
1925 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[C]])
1926 ; ALL-NEXT: [[D:%.*]] = trunc <2 x i32> [[C]] to <2 x i16>
1927 ; ALL-NEXT: ret <2 x i16> [[D]]
1929 %B = sext <2 x i8> %A to <2 x i32>
1930 %C = lshr <2 x i32> %B, <i32 8, i32 8>
1931 call void @use_v2i32(<2 x i32> %C)
1932 %D = trunc <2 x i32> %C to <2 x i16>
1936 define i16 @trunc_lshr_overshift_sext_narrow_input_uses3(i8 %A) {
1937 ; ALL-LABEL: @trunc_lshr_overshift_sext_narrow_input_uses3(
1938 ; ALL-NEXT: [[B:%.*]] = sext i8 [[A:%.*]] to i32
1939 ; ALL-NEXT: call void @use_i32(i32 [[B]])
1940 ; ALL-NEXT: [[C:%.*]] = lshr i32 [[B]], 8
1941 ; ALL-NEXT: call void @use_i32(i32 [[C]])
1942 ; ALL-NEXT: [[D:%.*]] = trunc i32 [[C]] to i16
1943 ; ALL-NEXT: ret i16 [[D]]
1945 %B = sext i8 %A to i32
1946 call void @use_i32(i32 %B)
1948 call void @use_i32(i32 %C)
1949 %D = trunc i32 %C to i16
1953 define <2 x i8> @trunc_lshr_overshift2_sext(<2 x i8> %A) {
1954 ; ALL-LABEL: @trunc_lshr_overshift2_sext(
1955 ; ALL-NEXT: [[B:%.*]] = sext <2 x i8> [[A:%.*]] to <2 x i32>
1956 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i32> [[B]], <i32 25, i32 25>
1957 ; ALL-NEXT: [[D:%.*]] = trunc <2 x i32> [[C]] to <2 x i8>
1958 ; ALL-NEXT: ret <2 x i8> [[D]]
1960 %B = sext <2 x i8> %A to <2 x i32>
1961 %C = lshr <2 x i32> %B, <i32 25, i32 25>
1962 %D = trunc <2 x i32> %C to <2 x i8>
1966 define i8 @trunc_lshr_overshift2_sext_uses1(i8 %A) {
1967 ; ALL-LABEL: @trunc_lshr_overshift2_sext_uses1(
1968 ; ALL-NEXT: [[B:%.*]] = sext i8 [[A:%.*]] to i32
1969 ; ALL-NEXT: call void @use_i32(i32 [[B]])
1970 ; ALL-NEXT: [[C:%.*]] = lshr i32 [[B]], 25
1971 ; ALL-NEXT: [[D:%.*]] = trunc i32 [[C]] to i8
1972 ; ALL-NEXT: ret i8 [[D]]
1974 %B = sext i8 %A to i32
1975 call void @use_i32(i32 %B)
1976 %C = lshr i32 %B, 25
1977 %D = trunc i32 %C to i8
1981 define <2 x i8> @trunc_lshr_overshift2_sext_uses2(<2 x i8> %A) {
1982 ; ALL-LABEL: @trunc_lshr_overshift2_sext_uses2(
1983 ; ALL-NEXT: [[B:%.*]] = sext <2 x i8> [[A:%.*]] to <2 x i32>
1984 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i32> [[B]], <i32 25, i32 25>
1985 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[C]])
1986 ; ALL-NEXT: [[D:%.*]] = trunc <2 x i32> [[C]] to <2 x i8>
1987 ; ALL-NEXT: ret <2 x i8> [[D]]
1989 %B = sext <2 x i8> %A to <2 x i32>
1990 %C = lshr <2 x i32> %B, <i32 25, i32 25>
1991 call void @use_v2i32(<2 x i32> %C)
1992 %D = trunc <2 x i32> %C to <2 x i8>
1996 define i8 @trunc_lshr_overshift2_sext_uses3(i8 %A) {
1997 ; ALL-LABEL: @trunc_lshr_overshift2_sext_uses3(
1998 ; ALL-NEXT: [[B:%.*]] = sext i8 [[A:%.*]] to i32
1999 ; ALL-NEXT: call void @use_i32(i32 [[B]])
2000 ; ALL-NEXT: [[C:%.*]] = lshr i32 [[B]], 25
2001 ; ALL-NEXT: call void @use_i32(i32 [[C]])
2002 ; ALL-NEXT: [[D:%.*]] = trunc i32 [[C]] to i8
2003 ; ALL-NEXT: ret i8 [[D]]
2005 %B = sext i8 %A to i32
2006 call void @use_i32(i32 %B)
2007 %C = lshr i32 %B, 25
2008 call void @use_i32(i32 %C)
2009 %D = trunc i32 %C to i8
2013 define i8 @trunc_lshr_zext(i8 %A) {
2014 ; ALL-LABEL: @trunc_lshr_zext(
2015 ; ALL-NEXT: [[TMP1:%.*]] = lshr i8 [[A:%.*]], 6
2016 ; ALL-NEXT: ret i8 [[TMP1]]
2018 %B = zext i8 %A to i32
2020 %D = trunc i32 %C to i8
2024 define i8 @trunc_lshr_zext_exact(i8 %A) {
2025 ; ALL-LABEL: @trunc_lshr_zext_exact(
2026 ; ALL-NEXT: [[TMP1:%.*]] = lshr i8 [[A:%.*]], 6
2027 ; ALL-NEXT: ret i8 [[TMP1]]
2029 %B = zext i8 %A to i32
2030 %C = lshr exact i32 %B, 6
2031 %D = trunc i32 %C to i8
2035 define <2 x i8> @trunc_lshr_zext_uniform(<2 x i8> %A) {
2036 ; ALL-LABEL: @trunc_lshr_zext_uniform(
2037 ; ALL-NEXT: [[TMP1:%.*]] = lshr <2 x i8> [[A:%.*]], <i8 6, i8 6>
2038 ; ALL-NEXT: ret <2 x i8> [[TMP1]]
2040 %B = zext <2 x i8> %A to <2 x i32>
2041 %C = lshr <2 x i32> %B, <i32 6, i32 6>
2042 %D = trunc <2 x i32> %C to <2 x i8>
2046 define <2 x i8> @trunc_lshr_zext_uniform_undef(<2 x i8> %A) {
2047 ; ALL-LABEL: @trunc_lshr_zext_uniform_undef(
2048 ; ALL-NEXT: [[B:%.*]] = zext <2 x i8> [[A:%.*]] to <2 x i32>
2049 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i32> [[B]], <i32 6, i32 undef>
2050 ; ALL-NEXT: [[D:%.*]] = trunc <2 x i32> [[C]] to <2 x i8>
2051 ; ALL-NEXT: ret <2 x i8> [[D]]
2053 %B = zext <2 x i8> %A to <2 x i32>
2054 %C = lshr <2 x i32> %B, <i32 6, i32 undef>
2055 %D = trunc <2 x i32> %C to <2 x i8>
2059 define <2 x i8> @trunc_lshr_zext_nonuniform(<2 x i8> %A) {
2060 ; ALL-LABEL: @trunc_lshr_zext_nonuniform(
2061 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i8> [[A:%.*]], <i8 6, i8 2>
2062 ; ALL-NEXT: ret <2 x i8> [[C]]
2064 %B = zext <2 x i8> %A to <2 x i32>
2065 %C = lshr <2 x i32> %B, <i32 6, i32 2>
2066 %D = trunc <2 x i32> %C to <2 x i8>
2070 define <3 x i8> @trunc_lshr_zext_nonuniform_undef(<3 x i8> %A) {
2071 ; ALL-LABEL: @trunc_lshr_zext_nonuniform_undef(
2072 ; ALL-NEXT: [[B:%.*]] = zext <3 x i8> [[A:%.*]] to <3 x i32>
2073 ; ALL-NEXT: [[C:%.*]] = lshr <3 x i32> [[B]], <i32 6, i32 2, i32 undef>
2074 ; ALL-NEXT: [[D:%.*]] = trunc <3 x i32> [[C]] to <3 x i8>
2075 ; ALL-NEXT: ret <3 x i8> [[D]]
2077 %B = zext <3 x i8> %A to <3 x i32>
2078 %C = lshr <3 x i32> %B, <i32 6, i32 2, i32 undef>
2079 %D = trunc <3 x i32> %C to <3 x i8>
2083 define <2 x i8> @trunc_lshr_zext_uses1(<2 x i8> %A) {
2084 ; ALL-LABEL: @trunc_lshr_zext_uses1(
2085 ; ALL-NEXT: [[B:%.*]] = zext <2 x i8> [[A:%.*]] to <2 x i32>
2086 ; ALL-NEXT: call void @use_v2i32(<2 x i32> [[B]])
2087 ; ALL-NEXT: [[C:%.*]] = lshr <2 x i8> [[A]], <i8 6, i8 6>
2088 ; ALL-NEXT: ret <2 x i8> [[C]]
2090 %B = zext <2 x i8> %A to <2 x i32>
2091 call void @use_v2i32(<2 x i32> %B)
2092 %C = lshr <2 x i32> %B, <i32 6, i32 6>
2093 %D = trunc <2 x i32> %C to <2 x i8>
2097 ; The following four tests sext + lshr + trunc patterns.
2100 define i8 @pr33078_1(i8 %A) {
2101 ; ALL-LABEL: @pr33078_1(
2102 ; ALL-NEXT: [[TMP1:%.*]] = ashr i8 [[A:%.*]], 7
2103 ; ALL-NEXT: ret i8 [[TMP1]]
2105 %B = sext i8 %A to i16
2107 %D = trunc i16 %C to i8
2111 define i12 @pr33078_2(i8 %A) {
2112 ; ALL-LABEL: @pr33078_2(
2113 ; ALL-NEXT: [[TMP1:%.*]] = ashr i8 [[A:%.*]], 4
2114 ; ALL-NEXT: [[D:%.*]] = sext i8 [[TMP1]] to i12
2115 ; ALL-NEXT: ret i12 [[D]]
2117 %B = sext i8 %A to i16
2119 %D = trunc i16 %C to i12
2123 define i4 @pr33078_3(i8 %A) {
2124 ; ALL-LABEL: @pr33078_3(
2125 ; ALL-NEXT: [[B:%.*]] = sext i8 [[A:%.*]] to i16
2126 ; ALL-NEXT: [[C:%.*]] = lshr i16 [[B]], 12
2127 ; ALL-NEXT: [[D:%.*]] = trunc i16 [[C]] to i4
2128 ; ALL-NEXT: ret i4 [[D]]
2130 %B = sext i8 %A to i16
2131 %C = lshr i16 %B, 12
2132 %D = trunc i16 %C to i4
2136 define i8 @pr33078_4(i3 %x) {
2137 ; Don't turn this in an `ashr`. This was getting miscompiled
2138 ; ALL-LABEL: @pr33078_4(
2139 ; ALL-NEXT: [[B:%.*]] = sext i3 [[X:%.*]] to i16
2140 ; ALL-NEXT: [[C:%.*]] = lshr i16 [[B]], 13
2141 ; ALL-NEXT: [[D:%.*]] = trunc i16 [[C]] to i8
2142 ; ALL-NEXT: ret i8 [[D]]
2144 %B = sext i3 %x to i16
2145 %C = lshr i16 %B, 13
2146 %D = trunc i16 %C to i8
2150 ; (sext (xor (cmp), -1)) -> (sext (!cmp))
2151 define i64 @test94(i32 %a) {
2152 ; ALL-LABEL: @test94(
2153 ; ALL-NEXT: [[TMP1:%.*]] = icmp ne i32 [[A:%.*]], -2
2154 ; ALL-NEXT: [[TMP2:%.*]] = sext i1 [[TMP1]] to i64
2155 ; ALL-NEXT: ret i64 [[TMP2]]
2157 %1 = icmp eq i32 %a, -2
2158 %2 = sext i1 %1 to i8
2160 %4 = sext i8 %3 to i64
2164 ; We should be able to remove the zext and trunc here.
2165 define i32 @test95(i32 %x) {
2166 ; ALL-LABEL: @test95(
2167 ; ALL-NEXT: [[TMP1:%.*]] = lshr i32 [[X:%.*]], 6
2168 ; ALL-NEXT: [[TMP2:%.*]] = and i32 [[TMP1]], 2
2169 ; ALL-NEXT: [[TMP3:%.*]] = or i32 [[TMP2]], 40
2170 ; ALL-NEXT: ret i32 [[TMP3]]
2172 %1 = trunc i32 %x to i8
2176 %5 = zext i8 %4 to i32