blob: 54eab0de0465e578e2badb7437ea3a351a83567d [file] [log] [blame] [edit]
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
; RUN: llc -mtriple=riscv32 --verify-machineinstrs < %s \
; RUN: | FileCheck %s -check-prefixes=RV32I
; RUN: llc -mtriple=riscv32 -mattr=+experimental-xqcibm --verify-machineinstrs < %s \
; RUN: | FileCheck %s -check-prefixes=RV32XQCIBM
define i32 @insb(i32 %in1, i32 %in2) {
; RV32I-LABEL: insb:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: andi a2, a0, -2
; RV32I-NEXT: xor a1, a1, a2
; RV32I-NEXT: xor a0, a0, a1
; RV32I-NEXT: ret
;
; RV32XQCIBM-LABEL: insb:
; RV32XQCIBM: # %bb.0:
; RV32XQCIBM-NEXT: qc.ext a1, a1, 31, 0
; RV32XQCIBM-NEXT: qc.insb a0, a1, 31, 1
; RV32XQCIBM-NEXT: ret
%shl1 = shl i32 %in2, 1
%in1.masked = and i32 %in1, -2
%xor1 = xor i32 %shl1, %in1.masked
%xor2 = xor i32 %in1, %xor1
ret i32 %xor2
}
define i32 @insb_and_multiple(i32 %in1, i32 %in2) {
; RV32I-LABEL: insb_and_multiple:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: andi a2, a0, -2
; RV32I-NEXT: xor a1, a1, a2
; RV32I-NEXT: xor a0, a0, a1
; RV32I-NEXT: add a0, a0, a2
; RV32I-NEXT: ret
;
; RV32XQCIBM-LABEL: insb_and_multiple:
; RV32XQCIBM: # %bb.0:
; RV32XQCIBM-NEXT: slli a1, a1, 1
; RV32XQCIBM-NEXT: andi a2, a0, -2
; RV32XQCIBM-NEXT: xor a1, a1, a2
; RV32XQCIBM-NEXT: xor a0, a0, a1
; RV32XQCIBM-NEXT: add a0, a0, a2
; RV32XQCIBM-NEXT: ret
%shl1 = shl i32 %in2, 1
%in1.masked = and i32 %in1, -2
%xor1 = xor i32 %shl1, %in1.masked
%xor2 = xor i32 %in1, %xor1
%add1 = add i32 %xor2, %in1.masked
ret i32 %add1
}
define i32 @insb_xor_multiple(i32 %in1, i32 %in2) {
; RV32I-LABEL: insb_xor_multiple:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: andi a2, a0, -2
; RV32I-NEXT: xor a1, a1, a2
; RV32I-NEXT: xor a0, a0, a1
; RV32I-NEXT: add a0, a0, a1
; RV32I-NEXT: ret
;
; RV32XQCIBM-LABEL: insb_xor_multiple:
; RV32XQCIBM: # %bb.0:
; RV32XQCIBM-NEXT: slli a1, a1, 1
; RV32XQCIBM-NEXT: andi a2, a0, -2
; RV32XQCIBM-NEXT: xor a1, a1, a2
; RV32XQCIBM-NEXT: xor a0, a0, a1
; RV32XQCIBM-NEXT: add a0, a0, a1
; RV32XQCIBM-NEXT: ret
%shl1 = shl i32 %in2, 1
%in1.masked = and i32 %in1, -2
%xor1 = xor i32 %shl1, %in1.masked
%xor2 = xor i32 %in1, %xor1
%add1 = add i32 %xor2, %xor1
ret i32 %add1
}
define i32 @insb_shl_multiple(i32 %in1, i32 %in2) {
; RV32I-LABEL: insb_shl_multiple:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: andi a2, a0, -2
; RV32I-NEXT: xor a2, a1, a2
; RV32I-NEXT: xor a0, a0, a2
; RV32I-NEXT: add a0, a0, a1
; RV32I-NEXT: ret
;
; RV32XQCIBM-LABEL: insb_shl_multiple:
; RV32XQCIBM: # %bb.0:
; RV32XQCIBM-NEXT: slli a1, a1, 1
; RV32XQCIBM-NEXT: srai a2, a1, 1
; RV32XQCIBM-NEXT: qc.insb a0, a2, 31, 1
; RV32XQCIBM-NEXT: add a0, a0, a1
; RV32XQCIBM-NEXT: ret
%shl1 = shl i32 %in2, 1
%in1.masked = and i32 %in1, -2
%and1 = xor i32 %shl1, %in1.masked
%xor2 = xor i32 %in1, %and1
%add1 = add i32 %xor2, %shl1
ret i32 %add1
}
define i32 @insb_comm(i32 %in1, i32 %in2) {
; RV32I-LABEL: insb_comm:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: andi a2, a0, -2
; RV32I-NEXT: xor a1, a2, a1
; RV32I-NEXT: xor a0, a0, a1
; RV32I-NEXT: ret
;
; RV32XQCIBM-LABEL: insb_comm:
; RV32XQCIBM: # %bb.0:
; RV32XQCIBM-NEXT: qc.ext a1, a1, 31, 0
; RV32XQCIBM-NEXT: qc.insb a0, a1, 31, 1
; RV32XQCIBM-NEXT: ret
%shl1 = shl i32 %in2, 1
%in1.masked = and i32 %in1, -2
%xor1 = xor i32 %in1.masked, %shl1
%xor2 = xor i32 %in1, %xor1
ret i32 %xor2
}
define i32 @insb_comm1(i32 %in1, i32 %in2) {
; RV32I-LABEL: insb_comm1:
; RV32I: # %bb.0:
; RV32I-NEXT: slli a1, a1, 1
; RV32I-NEXT: andi a2, a0, -2
; RV32I-NEXT: xor a1, a2, a1
; RV32I-NEXT: xor a0, a1, a0
; RV32I-NEXT: ret
;
; RV32XQCIBM-LABEL: insb_comm1:
; RV32XQCIBM: # %bb.0:
; RV32XQCIBM-NEXT: qc.ext a1, a1, 31, 0
; RV32XQCIBM-NEXT: qc.insb a0, a1, 31, 1
; RV32XQCIBM-NEXT: ret
%shl1 = shl i32 %in2, 1
%in1.masked = and i32 %in1, -2
%xor1 = xor i32 %in1.masked, %shl1
%xor2 = xor i32 %xor1, %in1
ret i32 %xor2
}
define i64 @insb_i64(i64 %in1, i64 %in2) {
; RV32I-LABEL: insb_i64:
; RV32I: # %bb.0:
; RV32I-NEXT: srli a1, a2, 31
; RV32I-NEXT: slli a3, a3, 1
; RV32I-NEXT: slli a2, a2, 1
; RV32I-NEXT: or a1, a3, a1
; RV32I-NEXT: andi a3, a0, -2
; RV32I-NEXT: xor a2, a3, a2
; RV32I-NEXT: xor a0, a2, a0
; RV32I-NEXT: ret
;
; RV32XQCIBM-LABEL: insb_i64:
; RV32XQCIBM: # %bb.0:
; RV32XQCIBM-NEXT: srli a1, a2, 31
; RV32XQCIBM-NEXT: slli a3, a3, 1
; RV32XQCIBM-NEXT: qc.ext a2, a2, 31, 0
; RV32XQCIBM-NEXT: or a1, a1, a3
; RV32XQCIBM-NEXT: qc.insb a0, a2, 31, 1
; RV32XQCIBM-NEXT: ret
%shl1 = shl i64 %in2, 1
%in1.masked = and i64 %in1, -2
%xor1 = xor i64 %in1.masked, %shl1
%xor2 = xor i64 %xor1, %in1
ret i64 %xor2
}
define i8 @tgt2_insb_neg(i8 %x, i8 %y) {
; RV32I-LABEL: tgt2_insb_neg:
; RV32I: # %bb.0:
; RV32I-NEXT: andi a2, a0, -2
; RV32I-NEXT: xor a0, a0, a1
; RV32I-NEXT: xor a0, a0, a2
; RV32I-NEXT: ret
;
; RV32XQCIBM-LABEL: tgt2_insb_neg:
; RV32XQCIBM: # %bb.0:
; RV32XQCIBM-NEXT: andi a2, a0, -2
; RV32XQCIBM-NEXT: xor a0, a0, a1
; RV32XQCIBM-NEXT: xor a0, a0, a2
; RV32XQCIBM-NEXT: ret
%and = and i8 %x, -2
%xor1 = xor i8 %and, %y
%xor2 = xor i8 %x, %xor1
ret i8 %xor2
}