; NOTE: Assertions have been autogenerated by utils/update_test_checks.py ; RUN: opt < %s -instcombine -S | FileCheck %s define i32 @foo(i32 %a, i32 %b, i32 %c, i32 %d) { ; CHECK-LABEL: @foo( ; CHECK-NEXT: [[E:%.*]] = icmp slt i32 %a, %b ; CHECK-NEXT: [[J:%.*]] = select i1 [[E]], i32 %c, i32 %d ; CHECK-NEXT: ret i32 [[J]] ; %e = icmp slt i32 %a, %b %f = sext i1 %e to i32 %g = and i32 %c, %f %h = xor i32 %f, -1 %i = and i32 %d, %h %j = or i32 %g, %i ret i32 %j } define i32 @bar(i32 %a, i32 %b, i32 %c, i32 %d) { ; CHECK-LABEL: @bar( ; CHECK-NEXT: [[E:%.*]] = icmp slt i32 %a, %b ; CHECK-NEXT: [[J:%.*]] = select i1 [[E]], i32 %c, i32 %d ; CHECK-NEXT: ret i32 [[J]] ; %e = icmp slt i32 %a, %b %f = sext i1 %e to i32 %g = and i32 %c, %f %h = xor i32 %f, -1 %i = and i32 %d, %h %j = or i32 %i, %g ret i32 %j } define i32 @goo(i32 %a, i32 %b, i32 %c, i32 %d) { ; CHECK-LABEL: @goo( ; CHECK-NEXT: [[T0:%.*]] = icmp slt i32 %a, %b ; CHECK-NEXT: [[T3:%.*]] = select i1 [[T0]], i32 %c, i32 %d ; CHECK-NEXT: ret i32 [[T3]] ; %t0 = icmp slt i32 %a, %b %iftmp.0.0 = select i1 %t0, i32 -1, i32 0 %t1 = and i32 %iftmp.0.0, %c %not = xor i32 %iftmp.0.0, -1 %t2 = and i32 %not, %d %t3 = or i32 %t1, %t2 ret i32 %t3 } define i32 @poo(i32 %a, i32 %b, i32 %c, i32 %d) { ; CHECK-LABEL: @poo( ; CHECK-NEXT: [[T0:%.*]] = icmp slt i32 %a, %b ; CHECK-NEXT: [[T3:%.*]] = select i1 [[T0]], i32 %c, i32 %d ; CHECK-NEXT: ret i32 [[T3]] ; %t0 = icmp slt i32 %a, %b %iftmp.0.0 = select i1 %t0, i32 -1, i32 0 %t1 = and i32 %iftmp.0.0, %c %iftmp = select i1 %t0, i32 0, i32 -1 %t2 = and i32 %iftmp, %d %t3 = or i32 %t1, %t2 ret i32 %t3 } define i32 @par(i32 %a, i32 %b, i32 %c, i32 %d) { ; CHECK-LABEL: @par( ; CHECK-NEXT: [[T0:%.*]] = icmp slt i32 %a, %b ; CHECK-NEXT: [[T3:%.*]] = select i1 [[T0]], i32 %c, i32 %d ; CHECK-NEXT: ret i32 [[T3]] ; %t0 = icmp slt i32 %a, %b %iftmp.1.0 = select i1 %t0, i32 -1, i32 0 %t1 = and i32 %iftmp.1.0, %c %not = xor i32 %iftmp.1.0, -1 %t2 = and i32 %not, %d %t3 = or i32 %t1, %t2 ret i32 %t3 } ; FIXME: In the following tests, verify that a bitcast doesn't get in the way ; of a perfectly good transform. These bitcasts are common in SSE/AVX ; code because of canonicalization to i64 elements for vectors. define <2 x i64> @vecBitcastOp0(<4 x i1> %cmp, <2 x i64> %a) { ; CHECK-LABEL: @vecBitcastOp0( ; CHECK-NEXT: [[SEXT:%.*]] = sext <4 x i1> %cmp to <4 x i32> ; CHECK-NEXT: [[BC:%.*]] = bitcast <4 x i32> [[SEXT]] to <2 x i64> ; CHECK-NEXT: [[AND:%.*]] = and <2 x i64> [[BC]], %a ; CHECK-NEXT: ret <2 x i64> [[AND]] ; %sext = sext <4 x i1> %cmp to <4 x i32> %bc = bitcast <4 x i32> %sext to <2 x i64> %and = and <2 x i64> %bc, %a ret <2 x i64> %and } ; Verify that the transform can handle the case where the bitcast is Op1. ; The 'add' is here to prevent a canonicalization of the bitcast to Op0. define <2 x i64> @vecBitcastOp1(<4 x i1> %cmp, <2 x i64> %a) { ; CHECK-LABEL: @vecBitcastOp1( ; CHECK-NEXT: [[A2:%.*]] = shl <2 x i64> %a, ; CHECK-NEXT: [[SEXT:%.*]] = sext <4 x i1> %cmp to <4 x i32> ; CHECK-NEXT: [[BC:%.*]] = bitcast <4 x i32> [[SEXT]] to <2 x i64> ; CHECK-NEXT: [[AND:%.*]] = and <2 x i64> [[A2]], [[BC]] ; CHECK-NEXT: ret <2 x i64> [[AND]] ; %a2 = add <2 x i64> %a, %a %sext = sext <4 x i1> %cmp to <4 x i32> %bc = bitcast <4 x i32> %sext to <2 x i64> %and = and <2 x i64> %a2, %bc ret <2 x i64> %and } ; Verify that a 'not' is matched too. define <2 x i64> @vecBitcastNotOp0(<4 x i1> %cmp, <2 x i64> %a) { ; CHECK-LABEL: @vecBitcastNotOp0( ; CHECK-NEXT: [[SEXT:%.*]] = sext <4 x i1> %cmp to <4 x i32> ; CHECK-NEXT: [[NEG:%.*]] = xor <4 x i32> [[SEXT]], ; CHECK-NEXT: [[BC:%.*]] = bitcast <4 x i32> [[NEG]] to <2 x i64> ; CHECK-NEXT: [[AND:%.*]] = and <2 x i64> [[BC]], %a ; CHECK-NEXT: ret <2 x i64> [[AND]] ; %sext = sext <4 x i1> %cmp to <4 x i32> %neg = xor <4 x i32> %sext, %bc = bitcast <4 x i32> %neg to <2 x i64> %and = and <2 x i64> %bc, %a ret <2 x i64> %and } ; Verify that the transform can handle the case where the bitcast is Op1. ; The 'add' is here to prevent a canonicalization of the bitcast to Op0. define <2 x i64> @vecBitcastNotOp1(<4 x i1> %cmp, <2 x i64> %a) { ; CHECK-LABEL: @vecBitcastNotOp1( ; CHECK-NEXT: [[A2:%.*]] = shl <2 x i64> %a, ; CHECK-NEXT: [[SEXT:%.*]] = sext <4 x i1> %cmp to <4 x i32> ; CHECK-NEXT: [[NEG:%.*]] = xor <4 x i32> [[SEXT]], ; CHECK-NEXT: [[BC:%.*]] = bitcast <4 x i32> [[NEG]] to <2 x i64> ; CHECK-NEXT: [[AND:%.*]] = and <2 x i64> [[A2]], [[BC]] ; CHECK-NEXT: ret <2 x i64> [[AND]] ; %a2 = add <2 x i64> %a, %a %sext = sext <4 x i1> %cmp to <4 x i32> %neg = xor <4 x i32> %sext, %bc = bitcast <4 x i32> %neg to <2 x i64> %and = and <2 x i64> %a2, %bc ret <2 x i64> %and } ; Verify that the transform fires even if the bitcast is ahead of the 'not'. define <2 x i64> @vecBitcastSext(<4 x i1> %cmp, <2 x i64> %a) { ; CHECK-LABEL: @vecBitcastSext( ; CHECK-NEXT: [[SEXT:%.*]] = sext <4 x i1> %cmp to <4 x i32> ; CHECK-NEXT: [[NEG1:%.*]] = xor <4 x i32> [[SEXT]], ; CHECK-NEXT: [[NEG:%.*]] = bitcast <4 x i32> [[NEG:%.*]]1 to <2 x i64> ; CHECK-NEXT: [[AND:%.*]] = and <2 x i64> [[NEG]], %a ; CHECK-NEXT: ret <2 x i64> [[AND]] ; %sext = sext <4 x i1> %cmp to <4 x i32> %bc = bitcast <4 x i32> %sext to <2 x i64> %neg = xor <2 x i64> %bc, %and = and <2 x i64> %a, %neg ret <2 x i64> %and }