diff options
author | Alex Bradbury <asb@lowrisc.org> | 2018-10-08 09:08:51 +0000 |
---|---|---|
committer | Alex Bradbury <asb@lowrisc.org> | 2018-10-08 09:08:51 +0000 |
commit | 5af6c1496aaf95108af8a928156da503fccb10c9 (patch) | |
tree | b355787dc69aba988ba5ef01e96f65a4f7f44ac0 | |
parent | bcc86a95c1de0612b829d066261460f53b7d99ec (diff) | |
download | bcm5719-llvm-5af6c1496aaf95108af8a928156da503fccb10c9.tar.gz bcm5719-llvm-5af6c1496aaf95108af8a928156da503fccb10c9.zip |
[RISCV] Update alu8.ll and alu16.ll test cases
The srli test in alu8.ll was a no-op, as it shifted by 8 bits. Fix this, and
also change the immediate in alu16.ll as shifted by something other than a
poewr of 8 is more interesting.
llvm-svn: 343958
-rw-r--r-- | llvm/test/CodeGen/RISCV/alu16.ll | 6 | ||||
-rw-r--r-- | llvm/test/CodeGen/RISCV/alu8.ll | 8 |
2 files changed, 9 insertions, 5 deletions
diff --git a/llvm/test/CodeGen/RISCV/alu16.ll b/llvm/test/CodeGen/RISCV/alu16.ll index af0b09c8ad5..20b79a987f6 100644 --- a/llvm/test/CodeGen/RISCV/alu16.ll +++ b/llvm/test/CodeGen/RISCV/alu16.ll @@ -82,11 +82,11 @@ define i16 @srli(i16 %a) nounwind { ; RV32I-LABEL: srli: ; RV32I: # %bb.0: ; RV32I-NEXT: lui a1, 16 -; RV32I-NEXT: addi a1, a1, -256 +; RV32I-NEXT: addi a1, a1, -64 ; RV32I-NEXT: and a0, a0, a1 -; RV32I-NEXT: srli a0, a0, 8 +; RV32I-NEXT: srli a0, a0, 6 ; RV32I-NEXT: ret - %1 = lshr i16 %a, 8 + %1 = lshr i16 %a, 6 ret i16 %1 } diff --git a/llvm/test/CodeGen/RISCV/alu8.ll b/llvm/test/CodeGen/RISCV/alu8.ll index 0d2177b350e..f7d0e8beef3 100644 --- a/llvm/test/CodeGen/RISCV/alu8.ll +++ b/llvm/test/CodeGen/RISCV/alu8.ll @@ -79,16 +79,20 @@ define i8 @slli(i8 %a) nounwind { define i8 @srli(i8 %a) nounwind { ; RV32I-LABEL: srli: ; RV32I: # %bb.0: +; RV32I-NEXT: andi a0, a0, 192 +; RV32I-NEXT: srli a0, a0, 6 ; RV32I-NEXT: ret - %1 = lshr i8 %a, 8 + %1 = lshr i8 %a, 6 ret i8 %1 } define i8 @srai(i8 %a) nounwind { ; RV32I-LABEL: srai: ; RV32I: # %bb.0: +; RV32I-NEXT: slli a0, a0, 24 +; RV32I-NEXT: srai a0, a0, 29 ; RV32I-NEXT: ret - %1 = ashr i8 %a, 9 + %1 = ashr i8 %a, 5 ret i8 %1 } |