| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc -mtriple=riscv32 -mattr=+zbkb -verify-machineinstrs < %s \ |
| ; RUN: | FileCheck %s -check-prefixes=RV32ZBKB |
| ; RUN: llc -mtriple=riscv64 -mattr=+zbkb -verify-machineinstrs < %s \ |
| ; RUN: | FileCheck %s -check-prefixes=RV64ZBKB |
| |
| ; TODO: These tests can be optmised |
| ; fold (bitreverse(srl (bitreverse c), x)) -> (shl c, x) |
| ; fold (bitreverse(shl (bitreverse c), x)) -> (srl c, x) |
| |
| declare i8 @llvm.bitreverse.i8(i8) |
| declare i16 @llvm.bitreverse.i16(i16) |
| declare i32 @llvm.bitreverse.i32(i32) |
| declare i64 @llvm.bitreverse.i64(i64) |
| |
| define i8 @test_bitreverse_srli_bitreverse_i8(i8 %a) nounwind { |
| ; RV32ZBKB-LABEL: test_bitreverse_srli_bitreverse_i8: |
| ; RV32ZBKB: # %bb.0: |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: srli a0, a0, 27 |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: srli a0, a0, 24 |
| ; RV32ZBKB-NEXT: ret |
| ; |
| ; RV64ZBKB-LABEL: test_bitreverse_srli_bitreverse_i8: |
| ; RV64ZBKB: # %bb.0: |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 59 |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 56 |
| ; RV64ZBKB-NEXT: ret |
| %1 = call i8 @llvm.bitreverse.i8(i8 %a) |
| %2 = lshr i8 %1, 3 |
| %3 = call i8 @llvm.bitreverse.i8(i8 %2) |
| ret i8 %3 |
| } |
| |
| define i16 @test_bitreverse_srli_bitreverse_i16(i16 %a) nounwind { |
| ; RV32ZBKB-LABEL: test_bitreverse_srli_bitreverse_i16: |
| ; RV32ZBKB: # %bb.0: |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: srli a0, a0, 23 |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: srli a0, a0, 16 |
| ; RV32ZBKB-NEXT: ret |
| ; |
| ; RV64ZBKB-LABEL: test_bitreverse_srli_bitreverse_i16: |
| ; RV64ZBKB: # %bb.0: |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 55 |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 48 |
| ; RV64ZBKB-NEXT: ret |
| %1 = call i16 @llvm.bitreverse.i16(i16 %a) |
| %2 = lshr i16 %1, 7 |
| %3 = call i16 @llvm.bitreverse.i16(i16 %2) |
| ret i16 %3 |
| } |
| |
| define i32 @test_bitreverse_srli_bitreverse_i32(i32 %a) nounwind { |
| ; RV32ZBKB-LABEL: test_bitreverse_srli_bitreverse_i32: |
| ; RV32ZBKB: # %bb.0: |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: srli a0, a0, 15 |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: ret |
| ; |
| ; RV64ZBKB-LABEL: test_bitreverse_srli_bitreverse_i32: |
| ; RV64ZBKB: # %bb.0: |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 47 |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 32 |
| ; RV64ZBKB-NEXT: ret |
| %1 = call i32 @llvm.bitreverse.i32(i32 %a) |
| %2 = lshr i32 %1, 15 |
| %3 = call i32 @llvm.bitreverse.i32(i32 %2) |
| ret i32 %3 |
| } |
| |
| define i64 @test_bitreverse_srli_bitreverse_i64(i64 %a) nounwind { |
| ; RV32ZBKB-LABEL: test_bitreverse_srli_bitreverse_i64: |
| ; RV32ZBKB: # %bb.0: |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: srli a0, a0, 1 |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a1, a0 |
| ; RV32ZBKB-NEXT: li a0, 0 |
| ; RV32ZBKB-NEXT: ret |
| ; |
| ; RV64ZBKB-LABEL: test_bitreverse_srli_bitreverse_i64: |
| ; RV64ZBKB: # %bb.0: |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 33 |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: ret |
| %1 = call i64 @llvm.bitreverse.i64(i64 %a) |
| %2 = lshr i64 %1, 33 |
| %3 = call i64 @llvm.bitreverse.i64(i64 %2) |
| ret i64 %3 |
| } |
| |
| define i8 @test_bitreverse_shli_bitreverse_i8(i8 %a) nounwind { |
| ; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i8: |
| ; RV32ZBKB: # %bb.0: |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: srli a0, a0, 24 |
| ; RV32ZBKB-NEXT: slli a0, a0, 3 |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: srli a0, a0, 24 |
| ; RV32ZBKB-NEXT: ret |
| ; |
| ; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i8: |
| ; RV64ZBKB: # %bb.0: |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 56 |
| ; RV64ZBKB-NEXT: slli a0, a0, 3 |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 56 |
| ; RV64ZBKB-NEXT: ret |
| %1 = call i8 @llvm.bitreverse.i8(i8 %a) |
| %2 = shl i8 %1, 3 |
| %3 = call i8 @llvm.bitreverse.i8(i8 %2) |
| ret i8 %3 |
| } |
| |
| define i16 @test_bitreverse_shli_bitreverse_i16(i16 %a) nounwind { |
| ; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i16: |
| ; RV32ZBKB: # %bb.0: |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: srli a0, a0, 16 |
| ; RV32ZBKB-NEXT: slli a0, a0, 7 |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: srli a0, a0, 16 |
| ; RV32ZBKB-NEXT: ret |
| ; |
| ; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i16: |
| ; RV64ZBKB: # %bb.0: |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 48 |
| ; RV64ZBKB-NEXT: slli a0, a0, 7 |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 48 |
| ; RV64ZBKB-NEXT: ret |
| %1 = call i16 @llvm.bitreverse.i16(i16 %a) |
| %2 = shl i16 %1, 7 |
| %3 = call i16 @llvm.bitreverse.i16(i16 %2) |
| ret i16 %3 |
| } |
| |
| define i32 @test_bitreverse_shli_bitreverse_i32(i32 %a) nounwind { |
| ; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i32: |
| ; RV32ZBKB: # %bb.0: |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: slli a0, a0, 15 |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: ret |
| ; |
| ; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i32: |
| ; RV64ZBKB: # %bb.0: |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 32 |
| ; RV64ZBKB-NEXT: slli a0, a0, 15 |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: srli a0, a0, 32 |
| ; RV64ZBKB-NEXT: ret |
| %1 = call i32 @llvm.bitreverse.i32(i32 %a) |
| %2 = shl i32 %1, 15 |
| %3 = call i32 @llvm.bitreverse.i32(i32 %2) |
| ret i32 %3 |
| } |
| |
| define i64 @test_bitreverse_shli_bitreverse_i64(i64 %a) nounwind { |
| ; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i64: |
| ; RV32ZBKB: # %bb.0: |
| ; RV32ZBKB-NEXT: rev8 a0, a1 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: slli a0, a0, 1 |
| ; RV32ZBKB-NEXT: rev8 a0, a0 |
| ; RV32ZBKB-NEXT: brev8 a0, a0 |
| ; RV32ZBKB-NEXT: li a1, 0 |
| ; RV32ZBKB-NEXT: ret |
| ; |
| ; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i64: |
| ; RV64ZBKB: # %bb.0: |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: slli a0, a0, 33 |
| ; RV64ZBKB-NEXT: rev8 a0, a0 |
| ; RV64ZBKB-NEXT: brev8 a0, a0 |
| ; RV64ZBKB-NEXT: ret |
| %1 = call i64 @llvm.bitreverse.i64(i64 %a) |
| %2 = shl i64 %1, 33 |
| %3 = call i64 @llvm.bitreverse.i64(i64 %2) |
| ret i64 %3 |
| } |