; NOTE: Assertions have been autogenerated by utils/update_test_checks.py ; RUN: opt < %s -instcombine -S | FileCheck %s define <16 x i8> @sse2_adds_b_constant() { ; CHECK-LABEL: @sse2_adds_b_constant( ; CHECK-NEXT: ret <16 x i8> %1 = call <16 x i8> @llvm.x86.sse2.padds.b(<16 x i8> , <16 x i8> ) ret <16 x i8> %1 } define <16 x i8> @sse2_adds_b_constant_underflow() { ; CHECK-LABEL: @sse2_adds_b_constant_underflow( ; CHECK-NEXT: ret <16 x i8> %1 = call <16 x i8> @llvm.x86.sse2.padds.b(<16 x i8> , <16 x i8> ) ret <16 x i8> %1 } define <16 x i8> @sse2_adds_b_constant_overflow() { ; CHECK-LABEL: @sse2_adds_b_constant_overflow( ; CHECK-NEXT: ret <16 x i8> %1 = call <16 x i8> @llvm.x86.sse2.padds.b(<16 x i8> , <16 x i8> ) ret <16 x i8> %1 } define <16 x i8> @sse2_adds_b_constant_undefs() { ; CHECK-LABEL: @sse2_adds_b_constant_undefs( ; CHECK-NEXT: ret <16 x i8> %1 = call <16 x i8> @llvm.x86.sse2.padds.b(<16 x i8> , <16 x i8> ) ret <16 x i8> %1 } define <32 x i8> @avx2_adds_b_constant() { ; CHECK-LABEL: @avx2_adds_b_constant( ; CHECK-NEXT: ret <32 x i8> %1 = call <32 x i8> @llvm.x86.avx2.padds.b(<32 x i8> , <32 x i8> ) ret <32 x i8> %1 } define <32 x i8> @avx2_adds_b_constant_underflow() { ; CHECK-LABEL: @avx2_adds_b_constant_underflow( ; CHECK-NEXT: ret <32 x i8> %1 = call <32 x i8> @llvm.x86.avx2.padds.b(<32 x i8> , <32 x i8> ) ret <32 x i8> %1 } define <32 x i8> @avx2_adds_b_constant_overflow() { ; CHECK-LABEL: @avx2_adds_b_constant_overflow( ; CHECK-NEXT: ret <32 x i8> %1 = call <32 x i8> @llvm.x86.avx2.padds.b(<32 x i8> , <32 x i8> ) ret <32 x i8> %1 } define <32 x i8> @avx2_adds_b_constant_undefs() { ; CHECK-LABEL: @avx2_adds_b_constant_undefs( ; CHECK-NEXT: ret <32 x i8> %1 = call <32 x i8> @llvm.x86.avx2.padds.b(<32 x i8> , <32 x i8> ) ret <32 x i8> %1 } define <64 x i8> @avx512_mask_adds_b_constant() { ; CHECK-LABEL: @avx512_mask_adds_b_constant( ; CHECK-NEXT: ret <64 x i8> %1 = call <64 x i8> @llvm.x86.avx512.mask.padds.b.512(<64 x i8> , <64 x i8> , <64 x i8> zeroinitializer, i64 -3) ret <64 x i8> %1 } define <64 x i8> @avx512_mask_adds_b_constant_underflow() { ; CHECK-LABEL: @avx512_mask_adds_b_constant_underflow( ; CHECK-NEXT: ret <64 x i8> %1 = call <64 x i8> @llvm.x86.avx512.mask.padds.b.512(<64 x i8> , <64 x i8> , <64 x i8> zeroinitializer, i64 -1) ret <64 x i8> %1 } define <64 x i8> @avx512_mask_adds_b_constant_overflow() { ; CHECK-LABEL: @avx512_mask_adds_b_constant_overflow( ; CHECK-NEXT: ret <64 x i8> %1 = call <64 x i8> @llvm.x86.avx512.mask.padds.b.512(<64 x i8> , <64 x i8> , <64 x i8> zeroinitializer, i64 -1) ret <64 x i8> %1 } define <64 x i8> @avx512_mask_adds_b_constant_undefs() { ; CHECK-LABEL: @avx512_mask_adds_b_constant_undefs( ; CHECK-NEXT: ret <64 x i8> %1 = call <64 x i8> @llvm.x86.avx512.mask.padds.b.512(<64 x i8> , <64 x i8> , <64 x i8> zeroinitializer, i64 -1) ret <64 x i8> %1 } define <8 x i16> @sse2_adds_w_constant() { ; CHECK-LABEL: @sse2_adds_w_constant( ; CHECK-NEXT: ret <8 x i16> %1 = call <8 x i16> @llvm.x86.sse2.padds.w(<8 x i16> , <8 x i16> ) ret <8 x i16> %1 } define <8 x i16> @sse2_adds_w_constant_underflow() { ; CHECK-LABEL: @sse2_adds_w_constant_underflow( ; CHECK-NEXT: ret <8 x i16> %1 = call <8 x i16> @llvm.x86.sse2.padds.w(<8 x i16> , <8 x i16> ) ret <8 x i16> %1 } define <8 x i16> @sse2_adds_w_constant_overflow() { ; CHECK-LABEL: @sse2_adds_w_constant_overflow( ; CHECK-NEXT: ret <8 x i16> %1 = call <8 x i16> @llvm.x86.sse2.padds.w(<8 x i16> , <8 x i16> ) ret <8 x i16> %1 } define <8 x i16> @sse2_adds_w_constant_undefs() { ; CHECK-LABEL: @sse2_adds_w_constant_undefs( ; CHECK-NEXT: ret <8 x i16> %1 = call <8 x i16> @llvm.x86.sse2.padds.w(<8 x i16> , <8 x i16> ) ret <8 x i16> %1 } define <16 x i16> @avx2_adds_w_constant() { ; CHECK-LABEL: @avx2_adds_w_constant( ; CHECK-NEXT: ret <16 x i16> %1 = call <16 x i16> @llvm.x86.avx2.padds.w(<16 x i16> , <16 x i16> ) ret <16 x i16> %1 } define <16 x i16> @avx2_adds_w_constant_underflow() { ; CHECK-LABEL: @avx2_adds_w_constant_underflow( ; CHECK-NEXT: ret <16 x i16> %1 = call <16 x i16> @llvm.x86.avx2.padds.w(<16 x i16> , <16 x i16> ) ret <16 x i16> %1 } define <16 x i16> @avx2_adds_w_constant_overflow() { ; CHECK-LABEL: @avx2_adds_w_constant_overflow( ; CHECK-NEXT: ret <16 x i16> %1 = call <16 x i16> @llvm.x86.avx2.padds.w(<16 x i16> , <16 x i16> ) ret <16 x i16> %1 } define <16 x i16> @avx2_adds_w_constant_undefs() { ; CHECK-LABEL: @avx2_adds_w_constant_undefs( ; CHECK-NEXT: ret <16 x i16> %1 = call <16 x i16> @llvm.x86.avx2.padds.w(<16 x i16> , <16 x i16> ) ret <16 x i16> %1 } define <32 x i16> @avx512_mask_adds_w_constant() { ; CHECK-LABEL: @avx512_mask_adds_w_constant( ; CHECK-NEXT: ret <32 x i16> %1 = call <32 x i16> @llvm.x86.avx512.mask.padds.w.512(<32 x i16> , <32 x i16> , <32 x i16> zeroinitializer, i32 -3) ret <32 x i16> %1 } define <32 x i16> @avx512_mask_adds_w_constant_underflow() { ; CHECK-LABEL: @avx512_mask_adds_w_constant_underflow( ; CHECK-NEXT: ret <32 x i16> %1 = call <32 x i16> @llvm.x86.avx512.mask.padds.w.512(<32 x i16> , <32 x i16> , <32 x i16> zeroinitializer, i32 -1) ret <32 x i16> %1 } define <32 x i16> @avx512_mask_adds_w_constant_overflow() { ; CHECK-LABEL: @avx512_mask_adds_w_constant_overflow( ; CHECK-NEXT: ret <32 x i16> %1 = call <32 x i16> @llvm.x86.avx512.mask.padds.w.512(<32 x i16> , <32 x i16> , <32 x i16> zeroinitializer, i32 -1) ret <32 x i16> %1 } define <32 x i16> @avx512_mask_adds_w_constant_undefs() { ; CHECK-LABEL: @avx512_mask_adds_w_constant_undefs( ; CHECK-NEXT: ret <32 x i16> %1 = call <32 x i16> @llvm.x86.avx512.mask.padds.w.512(<32 x i16> , <32 x i16> , <32 x i16> zeroinitializer, i32 -1) ret <32 x i16> %1 } define <16 x i8> @sse2_subs_b_constant() { ; CHECK-LABEL: @sse2_subs_b_constant( ; CHECK-NEXT: ret <16 x i8> %1 = call <16 x i8> @llvm.x86.sse2.psubs.b(<16 x i8> , <16 x i8> ) ret <16 x i8> %1 } define <16 x i8> @sse2_subs_b_constant_underflow() { ; CHECK-LABEL: @sse2_subs_b_constant_underflow( ; CHECK-NEXT: ret <16 x i8> %1 = call <16 x i8> @llvm.x86.sse2.psubs.b(<16 x i8> , <16 x i8> ) ret <16 x i8> %1 } define <16 x i8> @sse2_subs_b_constant_overflow() { ; CHECK-LABEL: @sse2_subs_b_constant_overflow( ; CHECK-NEXT: ret <16 x i8> %1 = call <16 x i8> @llvm.x86.sse2.psubs.b(<16 x i8> , <16 x i8> ) ret <16 x i8> %1 } define <16 x i8> @sse2_subs_b_constant_undefs() { ; CHECK-LABEL: @sse2_subs_b_constant_undefs( ; CHECK-NEXT: ret <16 x i8> %1 = call <16 x i8> @llvm.x86.sse2.psubs.b(<16 x i8> , <16 x i8> ) ret <16 x i8> %1 } define <32 x i8> @avx2_subs_b_constant() { ; CHECK-LABEL: @avx2_subs_b_constant( ; CHECK-NEXT: ret <32 x i8> %1 = call <32 x i8> @llvm.x86.avx2.psubs.b(<32 x i8> , <32 x i8> ) ret <32 x i8> %1 } define <32 x i8> @avx2_subs_b_constant_underflow() { ; CHECK-LABEL: @avx2_subs_b_constant_underflow( ; CHECK-NEXT: ret <32 x i8> %1 = call <32 x i8> @llvm.x86.avx2.psubs.b(<32 x i8> , <32 x i8> ) ret <32 x i8> %1 } define <32 x i8> @avx2_subs_b_constant_overflow() { ; CHECK-LABEL: @avx2_subs_b_constant_overflow( ; CHECK-NEXT: ret <32 x i8> %1 = call <32 x i8> @llvm.x86.avx2.psubs.b(<32 x i8> , <32 x i8> ) ret <32 x i8> %1 } define <32 x i8> @avx2_subs_b_constant_undefs() { ; CHECK-LABEL: @avx2_subs_b_constant_undefs( ; CHECK-NEXT: ret <32 x i8> %1 = call <32 x i8> @llvm.x86.avx2.psubs.b(<32 x i8> , <32 x i8> ) ret <32 x i8> %1 } define <64 x i8> @avx512_mask_subs_b_constant() { ; CHECK-LABEL: @avx512_mask_subs_b_constant( ; CHECK-NEXT: ret <64 x i8> %1 = call <64 x i8> @llvm.x86.avx512.mask.psubs.b.512(<64 x i8> , <64 x i8> , <64 x i8> zeroinitializer, i64 -3) ret <64 x i8> %1 } define <64 x i8> @avx512_mask_subs_b_constant_underflow() { ; CHECK-LABEL: @avx512_mask_subs_b_constant_underflow( ; CHECK-NEXT: ret <64 x i8> %1 = call <64 x i8> @llvm.x86.avx512.mask.psubs.b.512(<64 x i8> , <64 x i8> , <64 x i8> zeroinitializer, i64 -1) ret <64 x i8> %1 } define <64 x i8> @avx512_mask_subs_b_constant_overflow() { ; CHECK-LABEL: @avx512_mask_subs_b_constant_overflow( ; CHECK-NEXT: ret <64 x i8> %1 = call <64 x i8> @llvm.x86.avx512.mask.psubs.b.512(<64 x i8> , <64 x i8> , <64 x i8> zeroinitializer, i64 -1) ret <64 x i8> %1 } define <64 x i8> @avx512_mask_subs_b_constant_undefs() { ; CHECK-LABEL: @avx512_mask_subs_b_constant_undefs( ; CHECK-NEXT: ret <64 x i8> %1 = call <64 x i8> @llvm.x86.avx512.mask.psubs.b.512(<64 x i8> , <64 x i8> , <64 x i8> zeroinitializer, i64 -1) ret <64 x i8> %1 } define <8 x i16> @sse2_subs_w_constant() { ; CHECK-LABEL: @sse2_subs_w_constant( ; CHECK-NEXT: ret <8 x i16> %1 = call <8 x i16> @llvm.x86.sse2.psubs.w(<8 x i16> , <8 x i16> ) ret <8 x i16> %1 } define <8 x i16> @sse2_subs_w_constant_underflow() { ; CHECK-LABEL: @sse2_subs_w_constant_underflow( ; CHECK-NEXT: ret <8 x i16> %1 = call <8 x i16> @llvm.x86.sse2.psubs.w(<8 x i16> , <8 x i16> ) ret <8 x i16> %1 } define <8 x i16> @sse2_subs_w_constant_overflow() { ; CHECK-LABEL: @sse2_subs_w_constant_overflow( ; CHECK-NEXT: ret <8 x i16> %1 = call <8 x i16> @llvm.x86.sse2.psubs.w(<8 x i16> , <8 x i16> ) ret <8 x i16> %1 } define <8 x i16> @sse2_subs_w_constant_undefs() { ; CHECK-LABEL: @sse2_subs_w_constant_undefs( ; CHECK-NEXT: ret <8 x i16> %1 = call <8 x i16> @llvm.x86.sse2.psubs.w(<8 x i16> , <8 x i16> ) ret <8 x i16> %1 } define <16 x i16> @avx2_subs_w_constant() { ; CHECK-LABEL: @avx2_subs_w_constant( ; CHECK-NEXT: ret <16 x i16> %1 = call <16 x i16> @llvm.x86.avx2.psubs.w(<16 x i16> , <16 x i16> ) ret <16 x i16> %1 } define <16 x i16> @avx2_subs_w_constant_underflow() { ; CHECK-LABEL: @avx2_subs_w_constant_underflow( ; CHECK-NEXT: ret <16 x i16> %1 = call <16 x i16> @llvm.x86.avx2.psubs.w(<16 x i16> , <16 x i16> ) ret <16 x i16> %1 } define <16 x i16> @avx2_subs_w_constant_overflow() { ; CHECK-LABEL: @avx2_subs_w_constant_overflow( ; CHECK-NEXT: ret <16 x i16> %1 = call <16 x i16> @llvm.x86.avx2.psubs.w(<16 x i16> , <16 x i16> ) ret <16 x i16> %1 } define <16 x i16> @avx2_subs_w_constant_undefs() { ; CHECK-LABEL: @avx2_subs_w_constant_undefs( ; CHECK-NEXT: ret <16 x i16> %1 = call <16 x i16> @llvm.x86.avx2.psubs.w(<16 x i16> , <16 x i16> ) ret <16 x i16> %1 } define <32 x i16> @avx512_mask_subs_w_constant() { ; CHECK-LABEL: @avx512_mask_subs_w_constant( ; CHECK-NEXT: ret <32 x i16> %1 = call <32 x i16> @llvm.x86.avx512.mask.psubs.w.512(<32 x i16> , <32 x i16> , <32 x i16> zeroinitializer, i32 -3) ret <32 x i16> %1 } define <32 x i16> @avx512_mask_subs_w_constant_underflow() { ; CHECK-LABEL: @avx512_mask_subs_w_constant_underflow( ; CHECK-NEXT: ret <32 x i16> %1 = call <32 x i16> @llvm.x86.avx512.mask.psubs.w.512(<32 x i16> , <32 x i16> , <32 x i16> zeroinitializer, i32 -1) ret <32 x i16> %1 } define <32 x i16> @avx512_mask_subs_w_constant_overflow() { ; CHECK-LABEL: @avx512_mask_subs_w_constant_overflow( ; CHECK-NEXT: ret <32 x i16> %1 = call <32 x i16> @llvm.x86.avx512.mask.psubs.w.512(<32 x i16> , <32 x i16> , <32 x i16> zeroinitializer, i32 -1) ret <32 x i16> %1 } define <32 x i16> @avx512_mask_subs_w_constant_undefs() { ; CHECK-LABEL: @avx512_mask_subs_w_constant_undefs( ; CHECK-NEXT: ret <32 x i16> %1 = call <32 x i16> @llvm.x86.avx512.mask.psubs.w.512(<32 x i16> , <32 x i16> , <32 x i16> zeroinitializer, i32 -1) ret <32 x i16> %1 } declare <16 x i8> @llvm.x86.sse2.padds.b(<16 x i8>, <16 x i8>) nounwind readnone declare <16 x i8> @llvm.x86.sse2.psubs.b(<16 x i8>, <16 x i8>) nounwind readnone declare <8 x i16> @llvm.x86.sse2.padds.w(<8 x i16>, <8 x i16>) nounwind readnone declare <8 x i16> @llvm.x86.sse2.psubs.w(<8 x i16>, <8 x i16>) nounwind readnone declare <32 x i8> @llvm.x86.avx2.padds.b(<32 x i8>, <32 x i8>) nounwind readnone declare <32 x i8> @llvm.x86.avx2.psubs.b(<32 x i8>, <32 x i8>) nounwind readnone declare <16 x i16> @llvm.x86.avx2.padds.w(<16 x i16>, <16 x i16>) nounwind readnone declare <16 x i16> @llvm.x86.avx2.psubs.w(<16 x i16>, <16 x i16>) nounwind readnone declare <64 x i8> @llvm.x86.avx512.mask.padds.b.512(<64 x i8>, <64 x i8>, <64 x i8>, i64) nounwind readnone declare <64 x i8> @llvm.x86.avx512.mask.psubs.b.512(<64 x i8>, <64 x i8>, <64 x i8>, i64) nounwind readnone declare <32 x i16> @llvm.x86.avx512.mask.padds.w.512(<32 x i16>, <32 x i16>, <32 x i16>, i32) nounwind readnone declare <32 x i16> @llvm.x86.avx512.mask.psubs.w.512(<32 x i16>, <32 x i16>, <32 x i16>, i32) nounwind readnone