| //=- LoongArchLASXInstrInfo.td - LoongArch LASX instructions -*- tablegen -*-=// |
| // |
| // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| // See https://llvm.org/LICENSE.txt for license information. |
| // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| // |
| //===----------------------------------------------------------------------===// |
| // |
| // This file describes the Advanced SIMD extension instructions. |
| // |
| //===----------------------------------------------------------------------===// |
| |
| // Target nodes. |
| def loongarch_xvpermi: SDNode<"LoongArchISD::XVPERMI", SDT_LoongArchV1RUimm>; |
| |
| def lasxsplati8 |
| : PatFrag<(ops node:$e0), |
| (v32i8 (build_vector node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0))>; |
| def lasxsplati16 |
| : PatFrag<(ops node:$e0), |
| (v16i16 (build_vector node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0))>; |
| def lasxsplati32 |
| : PatFrag<(ops node:$e0), |
| (v8i32 (build_vector node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0))>; |
| def lasxsplati64 |
| : PatFrag<(ops node:$e0), |
| (v4i64 (build_vector node:$e0, node:$e0, node:$e0, node:$e0))>; |
| def lasxsplatf32 |
| : PatFrag<(ops node:$e0), |
| (v8f32 (build_vector node:$e0, node:$e0, node:$e0, node:$e0, |
| node:$e0, node:$e0, node:$e0, node:$e0))>; |
| def lasxsplatf64 |
| : PatFrag<(ops node:$e0), |
| (v4f64 (build_vector node:$e0, node:$e0, node:$e0, node:$e0))>; |
| |
| //===----------------------------------------------------------------------===// |
| // Instruction class templates |
| //===----------------------------------------------------------------------===// |
| |
| class LASX1RI13_XI<bits<32> op, Operand ImmOpnd = simm13> |
| : Fmt1RI13_XI<op, (outs LASX256:$xd), (ins ImmOpnd:$imm13), "$xd, $imm13">; |
| |
| class LASX2R_XX<bits<32> op> |
| : Fmt2R_XX<op, (outs LASX256:$xd), (ins LASX256:$xj), "$xd, $xj">; |
| |
| class LASX2R_XR<bits<32> op> |
| : Fmt2R_XR<op, (outs LASX256:$xd), (ins GPR:$rj), "$xd, $rj">; |
| |
| class LASX2R_CX<bits<32> op> |
| : Fmt2R_CX<op, (outs CFR:$cd), (ins LASX256:$xj), "$cd, $xj">; |
| |
| class LASX2RI1_XXI<bits<32> op, Operand ImmOpnd = uimm1> |
| : Fmt2RI1_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm1), |
| "$xd, $xj, $imm1">; |
| |
| class LASX2RI2_XXI<bits<32> op, Operand ImmOpnd = uimm2> |
| : Fmt2RI2_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm2), |
| "$xd, $xj, $imm2">; |
| |
| class LASX2RI2_RXI<bits<32> op, Operand ImmOpnd = uimm2> |
| : Fmt2RI2_RXI<op, (outs GPR:$rd), (ins LASX256:$xj, ImmOpnd:$imm2), |
| "$rd, $xj, $imm2">; |
| |
| class LASX2RI3_XXI<bits<32> op, Operand ImmOpnd = uimm3> |
| : Fmt2RI3_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm3), |
| "$xd, $xj, $imm3">; |
| |
| class LASX2RI3_RXI<bits<32> op, Operand ImmOpnd = uimm3> |
| : Fmt2RI3_RXI<op, (outs GPR:$rd), (ins LASX256:$xj, ImmOpnd:$imm3), |
| "$rd, $xj, $imm3">; |
| |
| class LASX2RI4_XXI<bits<32> op, Operand ImmOpnd = uimm4> |
| : Fmt2RI4_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm4), |
| "$xd, $xj, $imm4">; |
| |
| class LASX2RI4_XRI<bits<32> op, Operand ImmOpnd = uimm4> |
| : Fmt2RI4_XRI<op, (outs LASX256:$xd), (ins GPR:$rj, ImmOpnd:$imm4), |
| "$xd, $rj, $imm4">; |
| |
| class LASX2RI4_RXI<bits<32> op, Operand ImmOpnd = uimm4> |
| : Fmt2RI4_RXI<op, (outs GPR:$rd), (ins LASX256:$xj, ImmOpnd:$imm4), |
| "$rd, $xj, $imm4">; |
| |
| class LASX2RI5_XXI<bits<32> op, Operand ImmOpnd = uimm5> |
| : Fmt2RI5_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm5), |
| "$xd, $xj, $imm5">; |
| |
| class LASX2RI6_XXI<bits<32> op, Operand ImmOpnd = uimm6> |
| : Fmt2RI6_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm6), |
| "$xd, $xj, $imm6">; |
| |
| class LASX2RI8_XXI<bits<32> op, Operand ImmOpnd = uimm8> |
| : Fmt2RI8_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm8), |
| "$xd, $xj, $imm8">; |
| |
| class LASX2RI8I2_XRII<bits<32> op, Operand ImmOpnd = simm8, |
| Operand IdxOpnd = uimm2> |
| : Fmt2RI8I2_XRII<op, (outs), |
| (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm8, IdxOpnd:$imm2), |
| "$xd, $rj, $imm8, $imm2">; |
| class LASX2RI8I3_XRII<bits<32> op, Operand ImmOpnd = simm8, |
| Operand IdxOpnd = uimm3> |
| : Fmt2RI8I3_XRII<op, (outs), |
| (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm8, IdxOpnd:$imm3), |
| "$xd, $rj, $imm8, $imm3">; |
| class LASX2RI8I4_XRII<bits<32> op, Operand ImmOpnd = simm8, |
| Operand IdxOpnd = uimm4> |
| : Fmt2RI8I4_XRII<op, (outs), |
| (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm8, IdxOpnd:$imm4), |
| "$xd, $rj, $imm8, $imm4">; |
| class LASX2RI8I5_XRII<bits<32> op, Operand ImmOpnd = simm8, |
| Operand IdxOpnd = uimm5> |
| : Fmt2RI8I5_XRII<op, (outs), |
| (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm8, IdxOpnd:$imm5), |
| "$xd, $rj, $imm8, $imm5">; |
| |
| class LASX3R_XXX<bits<32> op> |
| : Fmt3R_XXX<op, (outs LASX256:$xd), (ins LASX256:$xj, LASX256:$xk), |
| "$xd, $xj, $xk">; |
| |
| class LASX3R_XXR<bits<32> op> |
| : Fmt3R_XXR<op, (outs LASX256:$xd), (ins LASX256:$xj, GPR:$rk), |
| "$xd, $xj, $rk">; |
| |
| class LASX4R_XXXX<bits<32> op> |
| : Fmt4R_XXXX<op, (outs LASX256:$xd), |
| (ins LASX256:$xj, LASX256:$xk, LASX256:$xa), |
| "$xd, $xj, $xk, $xa">; |
| |
| let Constraints = "$xd = $dst" in { |
| |
| class LASX2RI2_XXXI<bits<32> op, Operand ImmOpnd = uimm2> |
| : Fmt2RI2_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm2), |
| "$xd, $xj, $imm2">; |
| class LASX2RI3_XXXI<bits<32> op, Operand ImmOpnd = uimm3> |
| : Fmt2RI3_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm3), |
| "$xd, $xj, $imm3">; |
| |
| class LASX2RI2_XXRI<bits<32> op, Operand ImmOpnd = uimm2> |
| : Fmt2RI2_XRI<op, (outs LASX256:$dst), (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm2), |
| "$xd, $rj, $imm2">; |
| class LASX2RI3_XXRI<bits<32> op, Operand ImmOpnd = uimm3> |
| : Fmt2RI3_XRI<op, (outs LASX256:$dst), (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm3), |
| "$xd, $rj, $imm3">; |
| |
| class LASX2RI4_XXXI<bits<32> op, Operand ImmOpnd = uimm4> |
| : Fmt2RI4_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm4), |
| "$xd, $xj, $imm4">; |
| class LASX2RI5_XXXI<bits<32> op, Operand ImmOpnd = uimm5> |
| : Fmt2RI5_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm5), |
| "$xd, $xj, $imm5">; |
| class LASX2RI6_XXXI<bits<32> op, Operand ImmOpnd = uimm6> |
| : Fmt2RI6_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm6), |
| "$xd, $xj, $imm6">; |
| class LASX2RI7_XXXI<bits<32> op, Operand ImmOpnd = uimm7> |
| : Fmt2RI7_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm7), |
| "$xd, $xj, $imm7">; |
| |
| class LASX2RI8_XXXI<bits<32> op, Operand ImmOpnd = uimm8> |
| : Fmt2RI8_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm8), |
| "$xd, $xj, $imm8">; |
| |
| class LASX3R_XXXX<bits<32> op> |
| : Fmt3R_XXX<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, LASX256:$xk), |
| "$xd, $xj, $xk">; |
| |
| } // Constraints = "$xd = $dst" |
| |
| class LASX2RI9_Load<bits<32> op, Operand ImmOpnd = simm9_lsl3> |
| : Fmt2RI9_XRI<op, (outs LASX256:$xd), (ins GPR:$rj, ImmOpnd:$imm9), |
| "$xd, $rj, $imm9">; |
| class LASX2RI10_Load<bits<32> op, Operand ImmOpnd = simm10_lsl2> |
| : Fmt2RI10_XRI<op, (outs LASX256:$xd), (ins GPR:$rj, ImmOpnd:$imm10), |
| "$xd, $rj, $imm10">; |
| class LASX2RI11_Load<bits<32> op, Operand ImmOpnd = simm11_lsl1> |
| : Fmt2RI11_XRI<op, (outs LASX256:$xd), (ins GPR:$rj, ImmOpnd:$imm11), |
| "$xd, $rj, $imm11">; |
| class LASX2RI12_Load<bits<32> op, Operand ImmOpnd = simm12_addlike> |
| : Fmt2RI12_XRI<op, (outs LASX256:$xd), (ins GPR:$rj, ImmOpnd:$imm12), |
| "$xd, $rj, $imm12">; |
| class LASX2RI12_Store<bits<32> op, Operand ImmOpnd = simm12_addlike> |
| : Fmt2RI12_XRI<op, (outs), (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm12), |
| "$xd, $rj, $imm12">; |
| |
| class LASX3R_Load<bits<32> op> |
| : Fmt3R_XRR<op, (outs LASX256:$xd), (ins GPR:$rj, GPR:$rk), |
| "$xd, $rj, $rk">; |
| class LASX3R_Store<bits<32> op> |
| : Fmt3R_XRR<op, (outs), (ins LASX256:$xd, GPR:$rj, GPR:$rk), |
| "$xd, $rj, $rk">; |
| |
| //===----------------------------------------------------------------------===// |
| // Instructions |
| //===----------------------------------------------------------------------===// |
| |
| let hasSideEffects = 0, Predicates = [HasExtLASX] in { |
| |
| let mayLoad = 0, mayStore = 0 in { |
| def XVADD_B : LASX3R_XXX<0x740a0000>; |
| def XVADD_H : LASX3R_XXX<0x740a8000>; |
| def XVADD_W : LASX3R_XXX<0x740b0000>; |
| def XVADD_D : LASX3R_XXX<0x740b8000>; |
| def XVADD_Q : LASX3R_XXX<0x752d0000>; |
| |
| def XVSUB_B : LASX3R_XXX<0x740c0000>; |
| def XVSUB_H : LASX3R_XXX<0x740c8000>; |
| def XVSUB_W : LASX3R_XXX<0x740d0000>; |
| def XVSUB_D : LASX3R_XXX<0x740d8000>; |
| def XVSUB_Q : LASX3R_XXX<0x752d8000>; |
| |
| def XVADDI_BU : LASX2RI5_XXI<0x768a0000>; |
| def XVADDI_HU : LASX2RI5_XXI<0x768a8000>; |
| def XVADDI_WU : LASX2RI5_XXI<0x768b0000>; |
| def XVADDI_DU : LASX2RI5_XXI<0x768b8000>; |
| |
| def XVSUBI_BU : LASX2RI5_XXI<0x768c0000>; |
| def XVSUBI_HU : LASX2RI5_XXI<0x768c8000>; |
| def XVSUBI_WU : LASX2RI5_XXI<0x768d0000>; |
| def XVSUBI_DU : LASX2RI5_XXI<0x768d8000>; |
| |
| def XVNEG_B : LASX2R_XX<0x769c3000>; |
| def XVNEG_H : LASX2R_XX<0x769c3400>; |
| def XVNEG_W : LASX2R_XX<0x769c3800>; |
| def XVNEG_D : LASX2R_XX<0x769c3c00>; |
| |
| def XVSADD_B : LASX3R_XXX<0x74460000>; |
| def XVSADD_H : LASX3R_XXX<0x74468000>; |
| def XVSADD_W : LASX3R_XXX<0x74470000>; |
| def XVSADD_D : LASX3R_XXX<0x74478000>; |
| def XVSADD_BU : LASX3R_XXX<0x744a0000>; |
| def XVSADD_HU : LASX3R_XXX<0x744a8000>; |
| def XVSADD_WU : LASX3R_XXX<0x744b0000>; |
| def XVSADD_DU : LASX3R_XXX<0x744b8000>; |
| |
| def XVSSUB_B : LASX3R_XXX<0x74480000>; |
| def XVSSUB_H : LASX3R_XXX<0x74488000>; |
| def XVSSUB_W : LASX3R_XXX<0x74490000>; |
| def XVSSUB_D : LASX3R_XXX<0x74498000>; |
| def XVSSUB_BU : LASX3R_XXX<0x744c0000>; |
| def XVSSUB_HU : LASX3R_XXX<0x744c8000>; |
| def XVSSUB_WU : LASX3R_XXX<0x744d0000>; |
| def XVSSUB_DU : LASX3R_XXX<0x744d8000>; |
| |
| def XVHADDW_H_B : LASX3R_XXX<0x74540000>; |
| def XVHADDW_W_H : LASX3R_XXX<0x74548000>; |
| def XVHADDW_D_W : LASX3R_XXX<0x74550000>; |
| def XVHADDW_Q_D : LASX3R_XXX<0x74558000>; |
| def XVHADDW_HU_BU : LASX3R_XXX<0x74580000>; |
| def XVHADDW_WU_HU : LASX3R_XXX<0x74588000>; |
| def XVHADDW_DU_WU : LASX3R_XXX<0x74590000>; |
| def XVHADDW_QU_DU : LASX3R_XXX<0x74598000>; |
| |
| def XVHSUBW_H_B : LASX3R_XXX<0x74560000>; |
| def XVHSUBW_W_H : LASX3R_XXX<0x74568000>; |
| def XVHSUBW_D_W : LASX3R_XXX<0x74570000>; |
| def XVHSUBW_Q_D : LASX3R_XXX<0x74578000>; |
| def XVHSUBW_HU_BU : LASX3R_XXX<0x745a0000>; |
| def XVHSUBW_WU_HU : LASX3R_XXX<0x745a8000>; |
| def XVHSUBW_DU_WU : LASX3R_XXX<0x745b0000>; |
| def XVHSUBW_QU_DU : LASX3R_XXX<0x745b8000>; |
| |
| def XVADDWEV_H_B : LASX3R_XXX<0x741e0000>; |
| def XVADDWEV_W_H : LASX3R_XXX<0x741e8000>; |
| def XVADDWEV_D_W : LASX3R_XXX<0x741f0000>; |
| def XVADDWEV_Q_D : LASX3R_XXX<0x741f8000>; |
| def XVADDWOD_H_B : LASX3R_XXX<0x74220000>; |
| def XVADDWOD_W_H : LASX3R_XXX<0x74228000>; |
| def XVADDWOD_D_W : LASX3R_XXX<0x74230000>; |
| def XVADDWOD_Q_D : LASX3R_XXX<0x74238000>; |
| |
| def XVSUBWEV_H_B : LASX3R_XXX<0x74200000>; |
| def XVSUBWEV_W_H : LASX3R_XXX<0x74208000>; |
| def XVSUBWEV_D_W : LASX3R_XXX<0x74210000>; |
| def XVSUBWEV_Q_D : LASX3R_XXX<0x74218000>; |
| def XVSUBWOD_H_B : LASX3R_XXX<0x74240000>; |
| def XVSUBWOD_W_H : LASX3R_XXX<0x74248000>; |
| def XVSUBWOD_D_W : LASX3R_XXX<0x74250000>; |
| def XVSUBWOD_Q_D : LASX3R_XXX<0x74258000>; |
| |
| def XVADDWEV_H_BU : LASX3R_XXX<0x742e0000>; |
| def XVADDWEV_W_HU : LASX3R_XXX<0x742e8000>; |
| def XVADDWEV_D_WU : LASX3R_XXX<0x742f0000>; |
| def XVADDWEV_Q_DU : LASX3R_XXX<0x742f8000>; |
| def XVADDWOD_H_BU : LASX3R_XXX<0x74320000>; |
| def XVADDWOD_W_HU : LASX3R_XXX<0x74328000>; |
| def XVADDWOD_D_WU : LASX3R_XXX<0x74330000>; |
| def XVADDWOD_Q_DU : LASX3R_XXX<0x74338000>; |
| |
| def XVSUBWEV_H_BU : LASX3R_XXX<0x74300000>; |
| def XVSUBWEV_W_HU : LASX3R_XXX<0x74308000>; |
| def XVSUBWEV_D_WU : LASX3R_XXX<0x74310000>; |
| def XVSUBWEV_Q_DU : LASX3R_XXX<0x74318000>; |
| def XVSUBWOD_H_BU : LASX3R_XXX<0x74340000>; |
| def XVSUBWOD_W_HU : LASX3R_XXX<0x74348000>; |
| def XVSUBWOD_D_WU : LASX3R_XXX<0x74350000>; |
| def XVSUBWOD_Q_DU : LASX3R_XXX<0x74358000>; |
| |
| def XVADDWEV_H_BU_B : LASX3R_XXX<0x743e0000>; |
| def XVADDWEV_W_HU_H : LASX3R_XXX<0x743e8000>; |
| def XVADDWEV_D_WU_W : LASX3R_XXX<0x743f0000>; |
| def XVADDWEV_Q_DU_D : LASX3R_XXX<0x743f8000>; |
| def XVADDWOD_H_BU_B : LASX3R_XXX<0x74400000>; |
| def XVADDWOD_W_HU_H : LASX3R_XXX<0x74408000>; |
| def XVADDWOD_D_WU_W : LASX3R_XXX<0x74410000>; |
| def XVADDWOD_Q_DU_D : LASX3R_XXX<0x74418000>; |
| |
| def XVAVG_B : LASX3R_XXX<0x74640000>; |
| def XVAVG_H : LASX3R_XXX<0x74648000>; |
| def XVAVG_W : LASX3R_XXX<0x74650000>; |
| def XVAVG_D : LASX3R_XXX<0x74658000>; |
| def XVAVG_BU : LASX3R_XXX<0x74660000>; |
| def XVAVG_HU : LASX3R_XXX<0x74668000>; |
| def XVAVG_WU : LASX3R_XXX<0x74670000>; |
| def XVAVG_DU : LASX3R_XXX<0x74678000>; |
| def XVAVGR_B : LASX3R_XXX<0x74680000>; |
| def XVAVGR_H : LASX3R_XXX<0x74688000>; |
| def XVAVGR_W : LASX3R_XXX<0x74690000>; |
| def XVAVGR_D : LASX3R_XXX<0x74698000>; |
| def XVAVGR_BU : LASX3R_XXX<0x746a0000>; |
| def XVAVGR_HU : LASX3R_XXX<0x746a8000>; |
| def XVAVGR_WU : LASX3R_XXX<0x746b0000>; |
| def XVAVGR_DU : LASX3R_XXX<0x746b8000>; |
| |
| def XVABSD_B : LASX3R_XXX<0x74600000>; |
| def XVABSD_H : LASX3R_XXX<0x74608000>; |
| def XVABSD_W : LASX3R_XXX<0x74610000>; |
| def XVABSD_D : LASX3R_XXX<0x74618000>; |
| def XVABSD_BU : LASX3R_XXX<0x74620000>; |
| def XVABSD_HU : LASX3R_XXX<0x74628000>; |
| def XVABSD_WU : LASX3R_XXX<0x74630000>; |
| def XVABSD_DU : LASX3R_XXX<0x74638000>; |
| |
| def XVADDA_B : LASX3R_XXX<0x745c0000>; |
| def XVADDA_H : LASX3R_XXX<0x745c8000>; |
| def XVADDA_W : LASX3R_XXX<0x745d0000>; |
| def XVADDA_D : LASX3R_XXX<0x745d8000>; |
| |
| def XVMAX_B : LASX3R_XXX<0x74700000>; |
| def XVMAX_H : LASX3R_XXX<0x74708000>; |
| def XVMAX_W : LASX3R_XXX<0x74710000>; |
| def XVMAX_D : LASX3R_XXX<0x74718000>; |
| def XVMAXI_B : LASX2RI5_XXI<0x76900000, simm5>; |
| def XVMAXI_H : LASX2RI5_XXI<0x76908000, simm5>; |
| def XVMAXI_W : LASX2RI5_XXI<0x76910000, simm5>; |
| def XVMAXI_D : LASX2RI5_XXI<0x76918000, simm5>; |
| def XVMAX_BU : LASX3R_XXX<0x74740000>; |
| def XVMAX_HU : LASX3R_XXX<0x74748000>; |
| def XVMAX_WU : LASX3R_XXX<0x74750000>; |
| def XVMAX_DU : LASX3R_XXX<0x74758000>; |
| def XVMAXI_BU : LASX2RI5_XXI<0x76940000>; |
| def XVMAXI_HU : LASX2RI5_XXI<0x76948000>; |
| def XVMAXI_WU : LASX2RI5_XXI<0x76950000>; |
| def XVMAXI_DU : LASX2RI5_XXI<0x76958000>; |
| |
| def XVMIN_B : LASX3R_XXX<0x74720000>; |
| def XVMIN_H : LASX3R_XXX<0x74728000>; |
| def XVMIN_W : LASX3R_XXX<0x74730000>; |
| def XVMIN_D : LASX3R_XXX<0x74738000>; |
| def XVMINI_B : LASX2RI5_XXI<0x76920000, simm5>; |
| def XVMINI_H : LASX2RI5_XXI<0x76928000, simm5>; |
| def XVMINI_W : LASX2RI5_XXI<0x76930000, simm5>; |
| def XVMINI_D : LASX2RI5_XXI<0x76938000, simm5>; |
| def XVMIN_BU : LASX3R_XXX<0x74760000>; |
| def XVMIN_HU : LASX3R_XXX<0x74768000>; |
| def XVMIN_WU : LASX3R_XXX<0x74770000>; |
| def XVMIN_DU : LASX3R_XXX<0x74778000>; |
| def XVMINI_BU : LASX2RI5_XXI<0x76960000>; |
| def XVMINI_HU : LASX2RI5_XXI<0x76968000>; |
| def XVMINI_WU : LASX2RI5_XXI<0x76970000>; |
| def XVMINI_DU : LASX2RI5_XXI<0x76978000>; |
| |
| def XVMUL_B : LASX3R_XXX<0x74840000>; |
| def XVMUL_H : LASX3R_XXX<0x74848000>; |
| def XVMUL_W : LASX3R_XXX<0x74850000>; |
| def XVMUL_D : LASX3R_XXX<0x74858000>; |
| |
| def XVMUH_B : LASX3R_XXX<0x74860000>; |
| def XVMUH_H : LASX3R_XXX<0x74868000>; |
| def XVMUH_W : LASX3R_XXX<0x74870000>; |
| def XVMUH_D : LASX3R_XXX<0x74878000>; |
| def XVMUH_BU : LASX3R_XXX<0x74880000>; |
| def XVMUH_HU : LASX3R_XXX<0x74888000>; |
| def XVMUH_WU : LASX3R_XXX<0x74890000>; |
| def XVMUH_DU : LASX3R_XXX<0x74898000>; |
| |
| def XVMULWEV_H_B : LASX3R_XXX<0x74900000>; |
| def XVMULWEV_W_H : LASX3R_XXX<0x74908000>; |
| def XVMULWEV_D_W : LASX3R_XXX<0x74910000>; |
| def XVMULWEV_Q_D : LASX3R_XXX<0x74918000>; |
| def XVMULWOD_H_B : LASX3R_XXX<0x74920000>; |
| def XVMULWOD_W_H : LASX3R_XXX<0x74928000>; |
| def XVMULWOD_D_W : LASX3R_XXX<0x74930000>; |
| def XVMULWOD_Q_D : LASX3R_XXX<0x74938000>; |
| def XVMULWEV_H_BU : LASX3R_XXX<0x74980000>; |
| def XVMULWEV_W_HU : LASX3R_XXX<0x74988000>; |
| def XVMULWEV_D_WU : LASX3R_XXX<0x74990000>; |
| def XVMULWEV_Q_DU : LASX3R_XXX<0x74998000>; |
| def XVMULWOD_H_BU : LASX3R_XXX<0x749a0000>; |
| def XVMULWOD_W_HU : LASX3R_XXX<0x749a8000>; |
| def XVMULWOD_D_WU : LASX3R_XXX<0x749b0000>; |
| def XVMULWOD_Q_DU : LASX3R_XXX<0x749b8000>; |
| def XVMULWEV_H_BU_B : LASX3R_XXX<0x74a00000>; |
| def XVMULWEV_W_HU_H : LASX3R_XXX<0x74a08000>; |
| def XVMULWEV_D_WU_W : LASX3R_XXX<0x74a10000>; |
| def XVMULWEV_Q_DU_D : LASX3R_XXX<0x74a18000>; |
| def XVMULWOD_H_BU_B : LASX3R_XXX<0x74a20000>; |
| def XVMULWOD_W_HU_H : LASX3R_XXX<0x74a28000>; |
| def XVMULWOD_D_WU_W : LASX3R_XXX<0x74a30000>; |
| def XVMULWOD_Q_DU_D : LASX3R_XXX<0x74a38000>; |
| |
| def XVMADD_B : LASX3R_XXXX<0x74a80000>; |
| def XVMADD_H : LASX3R_XXXX<0x74a88000>; |
| def XVMADD_W : LASX3R_XXXX<0x74a90000>; |
| def XVMADD_D : LASX3R_XXXX<0x74a98000>; |
| |
| def XVMSUB_B : LASX3R_XXXX<0x74aa0000>; |
| def XVMSUB_H : LASX3R_XXXX<0x74aa8000>; |
| def XVMSUB_W : LASX3R_XXXX<0x74ab0000>; |
| def XVMSUB_D : LASX3R_XXXX<0x74ab8000>; |
| |
| def XVMADDWEV_H_B : LASX3R_XXXX<0x74ac0000>; |
| def XVMADDWEV_W_H : LASX3R_XXXX<0x74ac8000>; |
| def XVMADDWEV_D_W : LASX3R_XXXX<0x74ad0000>; |
| def XVMADDWEV_Q_D : LASX3R_XXXX<0x74ad8000>; |
| def XVMADDWOD_H_B : LASX3R_XXXX<0x74ae0000>; |
| def XVMADDWOD_W_H : LASX3R_XXXX<0x74ae8000>; |
| def XVMADDWOD_D_W : LASX3R_XXXX<0x74af0000>; |
| def XVMADDWOD_Q_D : LASX3R_XXXX<0x74af8000>; |
| def XVMADDWEV_H_BU : LASX3R_XXXX<0x74b40000>; |
| def XVMADDWEV_W_HU : LASX3R_XXXX<0x74b48000>; |
| def XVMADDWEV_D_WU : LASX3R_XXXX<0x74b50000>; |
| def XVMADDWEV_Q_DU : LASX3R_XXXX<0x74b58000>; |
| def XVMADDWOD_H_BU : LASX3R_XXXX<0x74b60000>; |
| def XVMADDWOD_W_HU : LASX3R_XXXX<0x74b68000>; |
| def XVMADDWOD_D_WU : LASX3R_XXXX<0x74b70000>; |
| def XVMADDWOD_Q_DU : LASX3R_XXXX<0x74b78000>; |
| def XVMADDWEV_H_BU_B : LASX3R_XXXX<0x74bc0000>; |
| def XVMADDWEV_W_HU_H : LASX3R_XXXX<0x74bc8000>; |
| def XVMADDWEV_D_WU_W : LASX3R_XXXX<0x74bd0000>; |
| def XVMADDWEV_Q_DU_D : LASX3R_XXXX<0x74bd8000>; |
| def XVMADDWOD_H_BU_B : LASX3R_XXXX<0x74be0000>; |
| def XVMADDWOD_W_HU_H : LASX3R_XXXX<0x74be8000>; |
| def XVMADDWOD_D_WU_W : LASX3R_XXXX<0x74bf0000>; |
| def XVMADDWOD_Q_DU_D : LASX3R_XXXX<0x74bf8000>; |
| |
| def XVDIV_B : LASX3R_XXX<0x74e00000>; |
| def XVDIV_H : LASX3R_XXX<0x74e08000>; |
| def XVDIV_W : LASX3R_XXX<0x74e10000>; |
| def XVDIV_D : LASX3R_XXX<0x74e18000>; |
| def XVDIV_BU : LASX3R_XXX<0x74e40000>; |
| def XVDIV_HU : LASX3R_XXX<0x74e48000>; |
| def XVDIV_WU : LASX3R_XXX<0x74e50000>; |
| def XVDIV_DU : LASX3R_XXX<0x74e58000>; |
| |
| def XVMOD_B : LASX3R_XXX<0x74e20000>; |
| def XVMOD_H : LASX3R_XXX<0x74e28000>; |
| def XVMOD_W : LASX3R_XXX<0x74e30000>; |
| def XVMOD_D : LASX3R_XXX<0x74e38000>; |
| def XVMOD_BU : LASX3R_XXX<0x74e60000>; |
| def XVMOD_HU : LASX3R_XXX<0x74e68000>; |
| def XVMOD_WU : LASX3R_XXX<0x74e70000>; |
| def XVMOD_DU : LASX3R_XXX<0x74e78000>; |
| |
| def XVSAT_B : LASX2RI3_XXI<0x77242000>; |
| def XVSAT_H : LASX2RI4_XXI<0x77244000>; |
| def XVSAT_W : LASX2RI5_XXI<0x77248000>; |
| def XVSAT_D : LASX2RI6_XXI<0x77250000>; |
| def XVSAT_BU : LASX2RI3_XXI<0x77282000>; |
| def XVSAT_HU : LASX2RI4_XXI<0x77284000>; |
| def XVSAT_WU : LASX2RI5_XXI<0x77288000>; |
| def XVSAT_DU : LASX2RI6_XXI<0x77290000>; |
| |
| def XVEXTH_H_B : LASX2R_XX<0x769ee000>; |
| def XVEXTH_W_H : LASX2R_XX<0x769ee400>; |
| def XVEXTH_D_W : LASX2R_XX<0x769ee800>; |
| def XVEXTH_Q_D : LASX2R_XX<0x769eec00>; |
| def XVEXTH_HU_BU : LASX2R_XX<0x769ef000>; |
| def XVEXTH_WU_HU : LASX2R_XX<0x769ef400>; |
| def XVEXTH_DU_WU : LASX2R_XX<0x769ef800>; |
| def XVEXTH_QU_DU : LASX2R_XX<0x769efc00>; |
| |
| def VEXT2XV_H_B : LASX2R_XX<0x769f1000>; |
| def VEXT2XV_W_B : LASX2R_XX<0x769f1400>; |
| def VEXT2XV_D_B : LASX2R_XX<0x769f1800>; |
| def VEXT2XV_W_H : LASX2R_XX<0x769f1c00>; |
| def VEXT2XV_D_H : LASX2R_XX<0x769f2000>; |
| def VEXT2XV_D_W : LASX2R_XX<0x769f2400>; |
| def VEXT2XV_HU_BU : LASX2R_XX<0x769f2800>; |
| def VEXT2XV_WU_BU : LASX2R_XX<0x769f2c00>; |
| def VEXT2XV_DU_BU : LASX2R_XX<0x769f3000>; |
| def VEXT2XV_WU_HU : LASX2R_XX<0x769f3400>; |
| def VEXT2XV_DU_HU : LASX2R_XX<0x769f3800>; |
| def VEXT2XV_DU_WU : LASX2R_XX<0x769f3c00>; |
| |
| def XVHSELI_D : LASX2RI5_XXI<0x769f8000>; |
| |
| def XVSIGNCOV_B : LASX3R_XXX<0x752e0000>; |
| def XVSIGNCOV_H : LASX3R_XXX<0x752e8000>; |
| def XVSIGNCOV_W : LASX3R_XXX<0x752f0000>; |
| def XVSIGNCOV_D : LASX3R_XXX<0x752f8000>; |
| |
| def XVMSKLTZ_B : LASX2R_XX<0x769c4000>; |
| def XVMSKLTZ_H : LASX2R_XX<0x769c4400>; |
| def XVMSKLTZ_W : LASX2R_XX<0x769c4800>; |
| def XVMSKLTZ_D : LASX2R_XX<0x769c4c00>; |
| |
| def XVMSKGEZ_B : LASX2R_XX<0x769c5000>; |
| |
| def XVMSKNZ_B : LASX2R_XX<0x769c6000>; |
| |
| def XVLDI : LASX1RI13_XI<0x77e00000>; |
| |
| def XVAND_V : LASX3R_XXX<0x75260000>; |
| def XVOR_V : LASX3R_XXX<0x75268000>; |
| def XVXOR_V : LASX3R_XXX<0x75270000>; |
| def XVNOR_V : LASX3R_XXX<0x75278000>; |
| def XVANDN_V : LASX3R_XXX<0x75280000>; |
| def XVORN_V : LASX3R_XXX<0x75288000>; |
| |
| def XVANDI_B : LASX2RI8_XXI<0x77d00000>; |
| def XVORI_B : LASX2RI8_XXI<0x77d40000>; |
| def XVXORI_B : LASX2RI8_XXI<0x77d80000>; |
| def XVNORI_B : LASX2RI8_XXI<0x77dc0000>; |
| |
| def XVSLL_B : LASX3R_XXX<0x74e80000>; |
| def XVSLL_H : LASX3R_XXX<0x74e88000>; |
| def XVSLL_W : LASX3R_XXX<0x74e90000>; |
| def XVSLL_D : LASX3R_XXX<0x74e98000>; |
| def XVSLLI_B : LASX2RI3_XXI<0x772c2000>; |
| def XVSLLI_H : LASX2RI4_XXI<0x772c4000>; |
| def XVSLLI_W : LASX2RI5_XXI<0x772c8000>; |
| def XVSLLI_D : LASX2RI6_XXI<0x772d0000>; |
| |
| def XVSRL_B : LASX3R_XXX<0x74ea0000>; |
| def XVSRL_H : LASX3R_XXX<0x74ea8000>; |
| def XVSRL_W : LASX3R_XXX<0x74eb0000>; |
| def XVSRL_D : LASX3R_XXX<0x74eb8000>; |
| def XVSRLI_B : LASX2RI3_XXI<0x77302000>; |
| def XVSRLI_H : LASX2RI4_XXI<0x77304000>; |
| def XVSRLI_W : LASX2RI5_XXI<0x77308000>; |
| def XVSRLI_D : LASX2RI6_XXI<0x77310000>; |
| |
| def XVSRA_B : LASX3R_XXX<0x74ec0000>; |
| def XVSRA_H : LASX3R_XXX<0x74ec8000>; |
| def XVSRA_W : LASX3R_XXX<0x74ed0000>; |
| def XVSRA_D : LASX3R_XXX<0x74ed8000>; |
| def XVSRAI_B : LASX2RI3_XXI<0x77342000>; |
| def XVSRAI_H : LASX2RI4_XXI<0x77344000>; |
| def XVSRAI_W : LASX2RI5_XXI<0x77348000>; |
| def XVSRAI_D : LASX2RI6_XXI<0x77350000>; |
| |
| def XVROTR_B : LASX3R_XXX<0x74ee0000>; |
| def XVROTR_H : LASX3R_XXX<0x74ee8000>; |
| def XVROTR_W : LASX3R_XXX<0x74ef0000>; |
| def XVROTR_D : LASX3R_XXX<0x74ef8000>; |
| def XVROTRI_B : LASX2RI3_XXI<0x76a02000>; |
| def XVROTRI_H : LASX2RI4_XXI<0x76a04000>; |
| def XVROTRI_W : LASX2RI5_XXI<0x76a08000>; |
| def XVROTRI_D : LASX2RI6_XXI<0x76a10000>; |
| |
| def XVSLLWIL_H_B : LASX2RI3_XXI<0x77082000>; |
| def XVSLLWIL_W_H : LASX2RI4_XXI<0x77084000>; |
| def XVSLLWIL_D_W : LASX2RI5_XXI<0x77088000>; |
| def XVEXTL_Q_D : LASX2R_XX<0x77090000>; |
| def XVSLLWIL_HU_BU : LASX2RI3_XXI<0x770c2000>; |
| def XVSLLWIL_WU_HU : LASX2RI4_XXI<0x770c4000>; |
| def XVSLLWIL_DU_WU : LASX2RI5_XXI<0x770c8000>; |
| def XVEXTL_QU_DU : LASX2R_XX<0x770d0000>; |
| |
| def XVSRLR_B : LASX3R_XXX<0x74f00000>; |
| def XVSRLR_H : LASX3R_XXX<0x74f08000>; |
| def XVSRLR_W : LASX3R_XXX<0x74f10000>; |
| def XVSRLR_D : LASX3R_XXX<0x74f18000>; |
| def XVSRLRI_B : LASX2RI3_XXI<0x76a42000>; |
| def XVSRLRI_H : LASX2RI4_XXI<0x76a44000>; |
| def XVSRLRI_W : LASX2RI5_XXI<0x76a48000>; |
| def XVSRLRI_D : LASX2RI6_XXI<0x76a50000>; |
| |
| def XVSRAR_B : LASX3R_XXX<0x74f20000>; |
| def XVSRAR_H : LASX3R_XXX<0x74f28000>; |
| def XVSRAR_W : LASX3R_XXX<0x74f30000>; |
| def XVSRAR_D : LASX3R_XXX<0x74f38000>; |
| def XVSRARI_B : LASX2RI3_XXI<0x76a82000>; |
| def XVSRARI_H : LASX2RI4_XXI<0x76a84000>; |
| def XVSRARI_W : LASX2RI5_XXI<0x76a88000>; |
| def XVSRARI_D : LASX2RI6_XXI<0x76a90000>; |
| |
| def XVSRLN_B_H : LASX3R_XXX<0x74f48000>; |
| def XVSRLN_H_W : LASX3R_XXX<0x74f50000>; |
| def XVSRLN_W_D : LASX3R_XXX<0x74f58000>; |
| def XVSRAN_B_H : LASX3R_XXX<0x74f68000>; |
| def XVSRAN_H_W : LASX3R_XXX<0x74f70000>; |
| def XVSRAN_W_D : LASX3R_XXX<0x74f78000>; |
| |
| def XVSRLNI_B_H : LASX2RI4_XXXI<0x77404000>; |
| def XVSRLNI_H_W : LASX2RI5_XXXI<0x77408000>; |
| def XVSRLNI_W_D : LASX2RI6_XXXI<0x77410000>; |
| def XVSRLNI_D_Q : LASX2RI7_XXXI<0x77420000>; |
| def XVSRANI_B_H : LASX2RI4_XXXI<0x77584000>; |
| def XVSRANI_H_W : LASX2RI5_XXXI<0x77588000>; |
| def XVSRANI_W_D : LASX2RI6_XXXI<0x77590000>; |
| def XVSRANI_D_Q : LASX2RI7_XXXI<0x775a0000>; |
| |
| def XVSRLRN_B_H : LASX3R_XXX<0x74f88000>; |
| def XVSRLRN_H_W : LASX3R_XXX<0x74f90000>; |
| def XVSRLRN_W_D : LASX3R_XXX<0x74f98000>; |
| def XVSRARN_B_H : LASX3R_XXX<0x74fa8000>; |
| def XVSRARN_H_W : LASX3R_XXX<0x74fb0000>; |
| def XVSRARN_W_D : LASX3R_XXX<0x74fb8000>; |
| |
| def XVSRLRNI_B_H : LASX2RI4_XXXI<0x77444000>; |
| def XVSRLRNI_H_W : LASX2RI5_XXXI<0x77448000>; |
| def XVSRLRNI_W_D : LASX2RI6_XXXI<0x77450000>; |
| def XVSRLRNI_D_Q : LASX2RI7_XXXI<0x77460000>; |
| def XVSRARNI_B_H : LASX2RI4_XXXI<0x775c4000>; |
| def XVSRARNI_H_W : LASX2RI5_XXXI<0x775c8000>; |
| def XVSRARNI_W_D : LASX2RI6_XXXI<0x775d0000>; |
| def XVSRARNI_D_Q : LASX2RI7_XXXI<0x775e0000>; |
| |
| def XVSSRLN_B_H : LASX3R_XXX<0x74fc8000>; |
| def XVSSRLN_H_W : LASX3R_XXX<0x74fd0000>; |
| def XVSSRLN_W_D : LASX3R_XXX<0x74fd8000>; |
| def XVSSRAN_B_H : LASX3R_XXX<0x74fe8000>; |
| def XVSSRAN_H_W : LASX3R_XXX<0x74ff0000>; |
| def XVSSRAN_W_D : LASX3R_XXX<0x74ff8000>; |
| def XVSSRLN_BU_H : LASX3R_XXX<0x75048000>; |
| def XVSSRLN_HU_W : LASX3R_XXX<0x75050000>; |
| def XVSSRLN_WU_D : LASX3R_XXX<0x75058000>; |
| def XVSSRAN_BU_H : LASX3R_XXX<0x75068000>; |
| def XVSSRAN_HU_W : LASX3R_XXX<0x75070000>; |
| def XVSSRAN_WU_D : LASX3R_XXX<0x75078000>; |
| |
| def XVSSRLNI_B_H : LASX2RI4_XXXI<0x77484000>; |
| def XVSSRLNI_H_W : LASX2RI5_XXXI<0x77488000>; |
| def XVSSRLNI_W_D : LASX2RI6_XXXI<0x77490000>; |
| def XVSSRLNI_D_Q : LASX2RI7_XXXI<0x774a0000>; |
| def XVSSRANI_B_H : LASX2RI4_XXXI<0x77604000>; |
| def XVSSRANI_H_W : LASX2RI5_XXXI<0x77608000>; |
| def XVSSRANI_W_D : LASX2RI6_XXXI<0x77610000>; |
| def XVSSRANI_D_Q : LASX2RI7_XXXI<0x77620000>; |
| def XVSSRLNI_BU_H : LASX2RI4_XXXI<0x774c4000>; |
| def XVSSRLNI_HU_W : LASX2RI5_XXXI<0x774c8000>; |
| def XVSSRLNI_WU_D : LASX2RI6_XXXI<0x774d0000>; |
| def XVSSRLNI_DU_Q : LASX2RI7_XXXI<0x774e0000>; |
| def XVSSRANI_BU_H : LASX2RI4_XXXI<0x77644000>; |
| def XVSSRANI_HU_W : LASX2RI5_XXXI<0x77648000>; |
| def XVSSRANI_WU_D : LASX2RI6_XXXI<0x77650000>; |
| def XVSSRANI_DU_Q : LASX2RI7_XXXI<0x77660000>; |
| |
| def XVSSRLRN_B_H : LASX3R_XXX<0x75008000>; |
| def XVSSRLRN_H_W : LASX3R_XXX<0x75010000>; |
| def XVSSRLRN_W_D : LASX3R_XXX<0x75018000>; |
| def XVSSRARN_B_H : LASX3R_XXX<0x75028000>; |
| def XVSSRARN_H_W : LASX3R_XXX<0x75030000>; |
| def XVSSRARN_W_D : LASX3R_XXX<0x75038000>; |
| def XVSSRLRN_BU_H : LASX3R_XXX<0x75088000>; |
| def XVSSRLRN_HU_W : LASX3R_XXX<0x75090000>; |
| def XVSSRLRN_WU_D : LASX3R_XXX<0x75098000>; |
| def XVSSRARN_BU_H : LASX3R_XXX<0x750a8000>; |
| def XVSSRARN_HU_W : LASX3R_XXX<0x750b0000>; |
| def XVSSRARN_WU_D : LASX3R_XXX<0x750b8000>; |
| |
| def XVSSRLRNI_B_H : LASX2RI4_XXXI<0x77504000>; |
| def XVSSRLRNI_H_W : LASX2RI5_XXXI<0x77508000>; |
| def XVSSRLRNI_W_D : LASX2RI6_XXXI<0x77510000>; |
| def XVSSRLRNI_D_Q : LASX2RI7_XXXI<0x77520000>; |
| def XVSSRARNI_B_H : LASX2RI4_XXXI<0x77684000>; |
| def XVSSRARNI_H_W : LASX2RI5_XXXI<0x77688000>; |
| def XVSSRARNI_W_D : LASX2RI6_XXXI<0x77690000>; |
| def XVSSRARNI_D_Q : LASX2RI7_XXXI<0x776a0000>; |
| def XVSSRLRNI_BU_H : LASX2RI4_XXXI<0x77544000>; |
| def XVSSRLRNI_HU_W : LASX2RI5_XXXI<0x77548000>; |
| def XVSSRLRNI_WU_D : LASX2RI6_XXXI<0x77550000>; |
| def XVSSRLRNI_DU_Q : LASX2RI7_XXXI<0x77560000>; |
| def XVSSRARNI_BU_H : LASX2RI4_XXXI<0x776c4000>; |
| def XVSSRARNI_HU_W : LASX2RI5_XXXI<0x776c8000>; |
| def XVSSRARNI_WU_D : LASX2RI6_XXXI<0x776d0000>; |
| def XVSSRARNI_DU_Q : LASX2RI7_XXXI<0x776e0000>; |
| |
| def XVCLO_B : LASX2R_XX<0x769c0000>; |
| def XVCLO_H : LASX2R_XX<0x769c0400>; |
| def XVCLO_W : LASX2R_XX<0x769c0800>; |
| def XVCLO_D : LASX2R_XX<0x769c0c00>; |
| def XVCLZ_B : LASX2R_XX<0x769c1000>; |
| def XVCLZ_H : LASX2R_XX<0x769c1400>; |
| def XVCLZ_W : LASX2R_XX<0x769c1800>; |
| def XVCLZ_D : LASX2R_XX<0x769c1c00>; |
| |
| def XVPCNT_B : LASX2R_XX<0x769c2000>; |
| def XVPCNT_H : LASX2R_XX<0x769c2400>; |
| def XVPCNT_W : LASX2R_XX<0x769c2800>; |
| def XVPCNT_D : LASX2R_XX<0x769c2c00>; |
| |
| def XVBITCLR_B : LASX3R_XXX<0x750c0000>; |
| def XVBITCLR_H : LASX3R_XXX<0x750c8000>; |
| def XVBITCLR_W : LASX3R_XXX<0x750d0000>; |
| def XVBITCLR_D : LASX3R_XXX<0x750d8000>; |
| def XVBITCLRI_B : LASX2RI3_XXI<0x77102000>; |
| def XVBITCLRI_H : LASX2RI4_XXI<0x77104000>; |
| def XVBITCLRI_W : LASX2RI5_XXI<0x77108000>; |
| def XVBITCLRI_D : LASX2RI6_XXI<0x77110000>; |
| |
| def XVBITSET_B : LASX3R_XXX<0x750e0000>; |
| def XVBITSET_H : LASX3R_XXX<0x750e8000>; |
| def XVBITSET_W : LASX3R_XXX<0x750f0000>; |
| def XVBITSET_D : LASX3R_XXX<0x750f8000>; |
| def XVBITSETI_B : LASX2RI3_XXI<0x77142000>; |
| def XVBITSETI_H : LASX2RI4_XXI<0x77144000>; |
| def XVBITSETI_W : LASX2RI5_XXI<0x77148000>; |
| def XVBITSETI_D : LASX2RI6_XXI<0x77150000>; |
| |
| def XVBITREV_B : LASX3R_XXX<0x75100000>; |
| def XVBITREV_H : LASX3R_XXX<0x75108000>; |
| def XVBITREV_W : LASX3R_XXX<0x75110000>; |
| def XVBITREV_D : LASX3R_XXX<0x75118000>; |
| def XVBITREVI_B : LASX2RI3_XXI<0x77182000>; |
| def XVBITREVI_H : LASX2RI4_XXI<0x77184000>; |
| def XVBITREVI_W : LASX2RI5_XXI<0x77188000>; |
| def XVBITREVI_D : LASX2RI6_XXI<0x77190000>; |
| |
| def XVFRSTP_B : LASX3R_XXXX<0x752b0000>; |
| def XVFRSTP_H : LASX3R_XXXX<0x752b8000>; |
| def XVFRSTPI_B : LASX2RI5_XXXI<0x769a0000>; |
| def XVFRSTPI_H : LASX2RI5_XXXI<0x769a8000>; |
| |
| def XVFADD_S : LASX3R_XXX<0x75308000>; |
| def XVFADD_D : LASX3R_XXX<0x75310000>; |
| def XVFSUB_S : LASX3R_XXX<0x75328000>; |
| def XVFSUB_D : LASX3R_XXX<0x75330000>; |
| def XVFMUL_S : LASX3R_XXX<0x75388000>; |
| def XVFMUL_D : LASX3R_XXX<0x75390000>; |
| def XVFDIV_S : LASX3R_XXX<0x753a8000>; |
| def XVFDIV_D : LASX3R_XXX<0x753b0000>; |
| |
| def XVFMADD_S : LASX4R_XXXX<0x0a100000>; |
| def XVFMADD_D : LASX4R_XXXX<0x0a200000>; |
| def XVFMSUB_S : LASX4R_XXXX<0x0a500000>; |
| def XVFMSUB_D : LASX4R_XXXX<0x0a600000>; |
| def XVFNMADD_S : LASX4R_XXXX<0x0a900000>; |
| def XVFNMADD_D : LASX4R_XXXX<0x0aa00000>; |
| def XVFNMSUB_S : LASX4R_XXXX<0x0ad00000>; |
| def XVFNMSUB_D : LASX4R_XXXX<0x0ae00000>; |
| |
| def XVFMAX_S : LASX3R_XXX<0x753c8000>; |
| def XVFMAX_D : LASX3R_XXX<0x753d0000>; |
| def XVFMIN_S : LASX3R_XXX<0x753e8000>; |
| def XVFMIN_D : LASX3R_XXX<0x753f0000>; |
| |
| def XVFMAXA_S : LASX3R_XXX<0x75408000>; |
| def XVFMAXA_D : LASX3R_XXX<0x75410000>; |
| def XVFMINA_S : LASX3R_XXX<0x75428000>; |
| def XVFMINA_D : LASX3R_XXX<0x75430000>; |
| |
| def XVFLOGB_S : LASX2R_XX<0x769cc400>; |
| def XVFLOGB_D : LASX2R_XX<0x769cc800>; |
| |
| def XVFCLASS_S : LASX2R_XX<0x769cd400>; |
| def XVFCLASS_D : LASX2R_XX<0x769cd800>; |
| |
| def XVFSQRT_S : LASX2R_XX<0x769ce400>; |
| def XVFSQRT_D : LASX2R_XX<0x769ce800>; |
| def XVFRECIP_S : LASX2R_XX<0x769cf400>; |
| def XVFRECIP_D : LASX2R_XX<0x769cf800>; |
| def XVFRSQRT_S : LASX2R_XX<0x769d0400>; |
| def XVFRSQRT_D : LASX2R_XX<0x769d0800>; |
| def XVFRECIPE_S : LASX2R_XX<0x769d1400>; |
| def XVFRECIPE_D : LASX2R_XX<0x769d1800>; |
| def XVFRSQRTE_S : LASX2R_XX<0x769d2400>; |
| def XVFRSQRTE_D : LASX2R_XX<0x769d2800>; |
| |
| def XVFCVTL_S_H : LASX2R_XX<0x769de800>; |
| def XVFCVTH_S_H : LASX2R_XX<0x769dec00>; |
| def XVFCVTL_D_S : LASX2R_XX<0x769df000>; |
| def XVFCVTH_D_S : LASX2R_XX<0x769df400>; |
| def XVFCVT_H_S : LASX3R_XXX<0x75460000>; |
| def XVFCVT_S_D : LASX3R_XXX<0x75468000>; |
| |
| def XVFRINTRNE_S : LASX2R_XX<0x769d7400>; |
| def XVFRINTRNE_D : LASX2R_XX<0x769d7800>; |
| def XVFRINTRZ_S : LASX2R_XX<0x769d6400>; |
| def XVFRINTRZ_D : LASX2R_XX<0x769d6800>; |
| def XVFRINTRP_S : LASX2R_XX<0x769d5400>; |
| def XVFRINTRP_D : LASX2R_XX<0x769d5800>; |
| def XVFRINTRM_S : LASX2R_XX<0x769d4400>; |
| def XVFRINTRM_D : LASX2R_XX<0x769d4800>; |
| def XVFRINT_S : LASX2R_XX<0x769d3400>; |
| def XVFRINT_D : LASX2R_XX<0x769d3800>; |
| |
| def XVFTINTRNE_W_S : LASX2R_XX<0x769e5000>; |
| def XVFTINTRNE_L_D : LASX2R_XX<0x769e5400>; |
| def XVFTINTRZ_W_S : LASX2R_XX<0x769e4800>; |
| def XVFTINTRZ_L_D : LASX2R_XX<0x769e4c00>; |
| def XVFTINTRP_W_S : LASX2R_XX<0x769e4000>; |
| def XVFTINTRP_L_D : LASX2R_XX<0x769e4400>; |
| def XVFTINTRM_W_S : LASX2R_XX<0x769e3800>; |
| def XVFTINTRM_L_D : LASX2R_XX<0x769e3c00>; |
| def XVFTINT_W_S : LASX2R_XX<0x769e3000>; |
| def XVFTINT_L_D : LASX2R_XX<0x769e3400>; |
| def XVFTINTRZ_WU_S : LASX2R_XX<0x769e7000>; |
| def XVFTINTRZ_LU_D : LASX2R_XX<0x769e7400>; |
| def XVFTINT_WU_S : LASX2R_XX<0x769e5800>; |
| def XVFTINT_LU_D : LASX2R_XX<0x769e5c00>; |
| |
| def XVFTINTRNE_W_D : LASX3R_XXX<0x754b8000>; |
| def XVFTINTRZ_W_D : LASX3R_XXX<0x754b0000>; |
| def XVFTINTRP_W_D : LASX3R_XXX<0x754a8000>; |
| def XVFTINTRM_W_D : LASX3R_XXX<0x754a0000>; |
| def XVFTINT_W_D : LASX3R_XXX<0x75498000>; |
| |
| def XVFTINTRNEL_L_S : LASX2R_XX<0x769ea000>; |
| def XVFTINTRNEH_L_S : LASX2R_XX<0x769ea400>; |
| def XVFTINTRZL_L_S : LASX2R_XX<0x769e9800>; |
| def XVFTINTRZH_L_S : LASX2R_XX<0x769e9c00>; |
| def XVFTINTRPL_L_S : LASX2R_XX<0x769e9000>; |
| def XVFTINTRPH_L_S : LASX2R_XX<0x769e9400>; |
| def XVFTINTRML_L_S : LASX2R_XX<0x769e8800>; |
| def XVFTINTRMH_L_S : LASX2R_XX<0x769e8c00>; |
| def XVFTINTL_L_S : LASX2R_XX<0x769e8000>; |
| def XVFTINTH_L_S : LASX2R_XX<0x769e8400>; |
| |
| def XVFFINT_S_W : LASX2R_XX<0x769e0000>; |
| def XVFFINT_D_L : LASX2R_XX<0x769e0800>; |
| def XVFFINT_S_WU : LASX2R_XX<0x769e0400>; |
| def XVFFINT_D_LU : LASX2R_XX<0x769e0c00>; |
| def XVFFINTL_D_W : LASX2R_XX<0x769e1000>; |
| def XVFFINTH_D_W : LASX2R_XX<0x769e1400>; |
| def XVFFINT_S_L : LASX3R_XXX<0x75480000>; |
| |
| def XVSEQ_B : LASX3R_XXX<0x74000000>; |
| def XVSEQ_H : LASX3R_XXX<0x74008000>; |
| def XVSEQ_W : LASX3R_XXX<0x74010000>; |
| def XVSEQ_D : LASX3R_XXX<0x74018000>; |
| def XVSEQI_B : LASX2RI5_XXI<0x76800000, simm5>; |
| def XVSEQI_H : LASX2RI5_XXI<0x76808000, simm5>; |
| def XVSEQI_W : LASX2RI5_XXI<0x76810000, simm5>; |
| def XVSEQI_D : LASX2RI5_XXI<0x76818000, simm5>; |
| |
| def XVSLE_B : LASX3R_XXX<0x74020000>; |
| def XVSLE_H : LASX3R_XXX<0x74028000>; |
| def XVSLE_W : LASX3R_XXX<0x74030000>; |
| def XVSLE_D : LASX3R_XXX<0x74038000>; |
| def XVSLEI_B : LASX2RI5_XXI<0x76820000, simm5>; |
| def XVSLEI_H : LASX2RI5_XXI<0x76828000, simm5>; |
| def XVSLEI_W : LASX2RI5_XXI<0x76830000, simm5>; |
| def XVSLEI_D : LASX2RI5_XXI<0x76838000, simm5>; |
| |
| def XVSLE_BU : LASX3R_XXX<0x74040000>; |
| def XVSLE_HU : LASX3R_XXX<0x74048000>; |
| def XVSLE_WU : LASX3R_XXX<0x74050000>; |
| def XVSLE_DU : LASX3R_XXX<0x74058000>; |
| def XVSLEI_BU : LASX2RI5_XXI<0x76840000>; |
| def XVSLEI_HU : LASX2RI5_XXI<0x76848000>; |
| def XVSLEI_WU : LASX2RI5_XXI<0x76850000>; |
| def XVSLEI_DU : LASX2RI5_XXI<0x76858000>; |
| |
| def XVSLT_B : LASX3R_XXX<0x74060000>; |
| def XVSLT_H : LASX3R_XXX<0x74068000>; |
| def XVSLT_W : LASX3R_XXX<0x74070000>; |
| def XVSLT_D : LASX3R_XXX<0x74078000>; |
| def XVSLTI_B : LASX2RI5_XXI<0x76860000, simm5>; |
| def XVSLTI_H : LASX2RI5_XXI<0x76868000, simm5>; |
| def XVSLTI_W : LASX2RI5_XXI<0x76870000, simm5>; |
| def XVSLTI_D : LASX2RI5_XXI<0x76878000, simm5>; |
| |
| def XVSLT_BU : LASX3R_XXX<0x74080000>; |
| def XVSLT_HU : LASX3R_XXX<0x74088000>; |
| def XVSLT_WU : LASX3R_XXX<0x74090000>; |
| def XVSLT_DU : LASX3R_XXX<0x74098000>; |
| def XVSLTI_BU : LASX2RI5_XXI<0x76880000>; |
| def XVSLTI_HU : LASX2RI5_XXI<0x76888000>; |
| def XVSLTI_WU : LASX2RI5_XXI<0x76890000>; |
| def XVSLTI_DU : LASX2RI5_XXI<0x76898000>; |
| |
| def XVFCMP_CAF_S : LASX3R_XXX<0x0c900000>; |
| def XVFCMP_SAF_S : LASX3R_XXX<0x0c908000>; |
| def XVFCMP_CLT_S : LASX3R_XXX<0x0c910000>; |
| def XVFCMP_SLT_S : LASX3R_XXX<0x0c918000>; |
| def XVFCMP_CEQ_S : LASX3R_XXX<0x0c920000>; |
| def XVFCMP_SEQ_S : LASX3R_XXX<0x0c928000>; |
| def XVFCMP_CLE_S : LASX3R_XXX<0x0c930000>; |
| def XVFCMP_SLE_S : LASX3R_XXX<0x0c938000>; |
| def XVFCMP_CUN_S : LASX3R_XXX<0x0c940000>; |
| def XVFCMP_SUN_S : LASX3R_XXX<0x0c948000>; |
| def XVFCMP_CULT_S : LASX3R_XXX<0x0c950000>; |
| def XVFCMP_SULT_S : LASX3R_XXX<0x0c958000>; |
| def XVFCMP_CUEQ_S : LASX3R_XXX<0x0c960000>; |
| def XVFCMP_SUEQ_S : LASX3R_XXX<0x0c968000>; |
| def XVFCMP_CULE_S : LASX3R_XXX<0x0c970000>; |
| def XVFCMP_SULE_S : LASX3R_XXX<0x0c978000>; |
| def XVFCMP_CNE_S : LASX3R_XXX<0x0c980000>; |
| def XVFCMP_SNE_S : LASX3R_XXX<0x0c988000>; |
| def XVFCMP_COR_S : LASX3R_XXX<0x0c9a0000>; |
| def XVFCMP_SOR_S : LASX3R_XXX<0x0c9a8000>; |
| def XVFCMP_CUNE_S : LASX3R_XXX<0x0c9c0000>; |
| def XVFCMP_SUNE_S : LASX3R_XXX<0x0c9c8000>; |
| |
| def XVFCMP_CAF_D : LASX3R_XXX<0x0ca00000>; |
| def XVFCMP_SAF_D : LASX3R_XXX<0x0ca08000>; |
| def XVFCMP_CLT_D : LASX3R_XXX<0x0ca10000>; |
| def XVFCMP_SLT_D : LASX3R_XXX<0x0ca18000>; |
| def XVFCMP_CEQ_D : LASX3R_XXX<0x0ca20000>; |
| def XVFCMP_SEQ_D : LASX3R_XXX<0x0ca28000>; |
| def XVFCMP_CLE_D : LASX3R_XXX<0x0ca30000>; |
| def XVFCMP_SLE_D : LASX3R_XXX<0x0ca38000>; |
| def XVFCMP_CUN_D : LASX3R_XXX<0x0ca40000>; |
| def XVFCMP_SUN_D : LASX3R_XXX<0x0ca48000>; |
| def XVFCMP_CULT_D : LASX3R_XXX<0x0ca50000>; |
| def XVFCMP_SULT_D : LASX3R_XXX<0x0ca58000>; |
| def XVFCMP_CUEQ_D : LASX3R_XXX<0x0ca60000>; |
| def XVFCMP_SUEQ_D : LASX3R_XXX<0x0ca68000>; |
| def XVFCMP_CULE_D : LASX3R_XXX<0x0ca70000>; |
| def XVFCMP_SULE_D : LASX3R_XXX<0x0ca78000>; |
| def XVFCMP_CNE_D : LASX3R_XXX<0x0ca80000>; |
| def XVFCMP_SNE_D : LASX3R_XXX<0x0ca88000>; |
| def XVFCMP_COR_D : LASX3R_XXX<0x0caa0000>; |
| def XVFCMP_SOR_D : LASX3R_XXX<0x0caa8000>; |
| def XVFCMP_CUNE_D : LASX3R_XXX<0x0cac0000>; |
| def XVFCMP_SUNE_D : LASX3R_XXX<0x0cac8000>; |
| |
| def XVBITSEL_V : LASX4R_XXXX<0x0d200000>; |
| |
| def XVBITSELI_B : LASX2RI8_XXXI<0x77c40000>; |
| |
| def XVSETEQZ_V : LASX2R_CX<0x769c9800>; |
| def XVSETNEZ_V : LASX2R_CX<0x769c9c00>; |
| def XVSETANYEQZ_B : LASX2R_CX<0x769ca000>; |
| def XVSETANYEQZ_H : LASX2R_CX<0x769ca400>; |
| def XVSETANYEQZ_W : LASX2R_CX<0x769ca800>; |
| def XVSETANYEQZ_D : LASX2R_CX<0x769cac00>; |
| def XVSETALLNEZ_B : LASX2R_CX<0x769cb000>; |
| def XVSETALLNEZ_H : LASX2R_CX<0x769cb400>; |
| def XVSETALLNEZ_W : LASX2R_CX<0x769cb800>; |
| def XVSETALLNEZ_D : LASX2R_CX<0x769cbc00>; |
| |
| def XVINSGR2VR_W : LASX2RI3_XXRI<0x76ebc000>; |
| def XVINSGR2VR_D : LASX2RI2_XXRI<0x76ebe000>; |
| def XVPICKVE2GR_W : LASX2RI3_RXI<0x76efc000>; |
| def XVPICKVE2GR_D : LASX2RI2_RXI<0x76efe000>; |
| def XVPICKVE2GR_WU : LASX2RI3_RXI<0x76f3c000>; |
| def XVPICKVE2GR_DU : LASX2RI2_RXI<0x76f3e000>; |
| |
| def XVREPLGR2VR_B : LASX2R_XR<0x769f0000>; |
| def XVREPLGR2VR_H : LASX2R_XR<0x769f0400>; |
| def XVREPLGR2VR_W : LASX2R_XR<0x769f0800>; |
| def XVREPLGR2VR_D : LASX2R_XR<0x769f0c00>; |
| |
| def XVREPLVE_B : LASX3R_XXR<0x75220000>; |
| def XVREPLVE_H : LASX3R_XXR<0x75228000>; |
| def XVREPLVE_W : LASX3R_XXR<0x75230000>; |
| def XVREPLVE_D : LASX3R_XXR<0x75238000>; |
| def XVREPL128VEI_B : LASX2RI4_XXI<0x76f78000>; |
| def XVREPL128VEI_H : LASX2RI3_XXI<0x76f7c000>; |
| def XVREPL128VEI_W : LASX2RI2_XXI<0x76f7e000>; |
| def XVREPL128VEI_D : LASX2RI1_XXI<0x76f7f000>; |
| |
| def XVREPLVE0_B : LASX2R_XX<0x77070000>; |
| def XVREPLVE0_H : LASX2R_XX<0x77078000>; |
| def XVREPLVE0_W : LASX2R_XX<0x7707c000>; |
| def XVREPLVE0_D : LASX2R_XX<0x7707e000>; |
| def XVREPLVE0_Q : LASX2R_XX<0x7707f000>; |
| |
| def XVINSVE0_W : LASX2RI3_XXXI<0x76ffc000>; |
| def XVINSVE0_D : LASX2RI2_XXXI<0x76ffe000>; |
| |
| def XVPICKVE_W : LASX2RI3_XXI<0x7703c000>; |
| def XVPICKVE_D : LASX2RI2_XXI<0x7703e000>; |
| |
| def XVBSLL_V : LASX2RI5_XXI<0x768e0000>; |
| def XVBSRL_V : LASX2RI5_XXI<0x768e8000>; |
| |
| def XVPACKEV_B : LASX3R_XXX<0x75160000>; |
| def XVPACKEV_H : LASX3R_XXX<0x75168000>; |
| def XVPACKEV_W : LASX3R_XXX<0x75170000>; |
| def XVPACKEV_D : LASX3R_XXX<0x75178000>; |
| def XVPACKOD_B : LASX3R_XXX<0x75180000>; |
| def XVPACKOD_H : LASX3R_XXX<0x75188000>; |
| def XVPACKOD_W : LASX3R_XXX<0x75190000>; |
| def XVPACKOD_D : LASX3R_XXX<0x75198000>; |
| |
| def XVPICKEV_B : LASX3R_XXX<0x751e0000>; |
| def XVPICKEV_H : LASX3R_XXX<0x751e8000>; |
| def XVPICKEV_W : LASX3R_XXX<0x751f0000>; |
| def XVPICKEV_D : LASX3R_XXX<0x751f8000>; |
| def XVPICKOD_B : LASX3R_XXX<0x75200000>; |
| def XVPICKOD_H : LASX3R_XXX<0x75208000>; |
| def XVPICKOD_W : LASX3R_XXX<0x75210000>; |
| def XVPICKOD_D : LASX3R_XXX<0x75218000>; |
| |
| def XVILVL_B : LASX3R_XXX<0x751a0000>; |
| def XVILVL_H : LASX3R_XXX<0x751a8000>; |
| def XVILVL_W : LASX3R_XXX<0x751b0000>; |
| def XVILVL_D : LASX3R_XXX<0x751b8000>; |
| def XVILVH_B : LASX3R_XXX<0x751c0000>; |
| def XVILVH_H : LASX3R_XXX<0x751c8000>; |
| def XVILVH_W : LASX3R_XXX<0x751d0000>; |
| def XVILVH_D : LASX3R_XXX<0x751d8000>; |
| |
| def XVSHUF_B : LASX4R_XXXX<0x0d600000>; |
| |
| def XVSHUF_H : LASX3R_XXXX<0x757a8000>; |
| def XVSHUF_W : LASX3R_XXXX<0x757b0000>; |
| def XVSHUF_D : LASX3R_XXXX<0x757b8000>; |
| |
| def XVPERM_W : LASX3R_XXX<0x757d0000>; |
| |
| def XVSHUF4I_B : LASX2RI8_XXI<0x77900000>; |
| def XVSHUF4I_H : LASX2RI8_XXI<0x77940000>; |
| def XVSHUF4I_W : LASX2RI8_XXI<0x77980000>; |
| def XVSHUF4I_D : LASX2RI8_XXXI<0x779c0000>; |
| |
| def XVPERMI_W : LASX2RI8_XXXI<0x77e40000>; |
| def XVPERMI_D : LASX2RI8_XXI<0x77e80000>; |
| def XVPERMI_Q : LASX2RI8_XXXI<0x77ec0000>; |
| |
| def XVEXTRINS_D : LASX2RI8_XXXI<0x77800000>; |
| def XVEXTRINS_W : LASX2RI8_XXXI<0x77840000>; |
| def XVEXTRINS_H : LASX2RI8_XXXI<0x77880000>; |
| def XVEXTRINS_B : LASX2RI8_XXXI<0x778c0000>; |
| } // mayLoad = 0, mayStore = 0 |
| |
| let mayLoad = 1, mayStore = 0 in { |
| def XVLD : LASX2RI12_Load<0x2c800000>; |
| def XVLDX : LASX3R_Load<0x38480000>; |
| |
| def XVLDREPL_B : LASX2RI12_Load<0x32800000>; |
| def XVLDREPL_H : LASX2RI11_Load<0x32400000>; |
| def XVLDREPL_W : LASX2RI10_Load<0x32200000>; |
| def XVLDREPL_D : LASX2RI9_Load<0x32100000>; |
| } // mayLoad = 1, mayStore = 0 |
| |
| let mayLoad = 0, mayStore = 1 in { |
| def XVST : LASX2RI12_Store<0x2cc00000>; |
| def XVSTX : LASX3R_Store<0x384c0000>; |
| |
| def XVSTELM_B : LASX2RI8I5_XRII<0x33800000>; |
| def XVSTELM_H : LASX2RI8I4_XRII<0x33400000, simm8_lsl1>; |
| def XVSTELM_W : LASX2RI8I3_XRII<0x33200000, simm8_lsl2>; |
| def XVSTELM_D : LASX2RI8I2_XRII<0x33100000, simm8_lsl3>; |
| } // mayLoad = 0, mayStore = 1 |
| |
| } // hasSideEffects = 0, Predicates = [HasExtLASX] |
| |
| /// Pseudo-instructions |
| |
| let Predicates = [HasExtLASX] in { |
| |
| let hasSideEffects = 0, mayLoad = 0, mayStore = 0, isCodeGenOnly = 0, |
| isAsmParserOnly = 1 in { |
| def PseudoXVREPLI_B : Pseudo<(outs LASX256:$xd), (ins simm10:$imm), [], |
| "xvrepli.b", "$xd, $imm">; |
| def PseudoXVREPLI_H : Pseudo<(outs LASX256:$xd), (ins simm10:$imm), [], |
| "xvrepli.h", "$xd, $imm">; |
| def PseudoXVREPLI_W : Pseudo<(outs LASX256:$xd), (ins simm10:$imm), [], |
| "xvrepli.w", "$xd, $imm">; |
| def PseudoXVREPLI_D : Pseudo<(outs LASX256:$xd), (ins simm10:$imm), [], |
| "xvrepli.d", "$xd, $imm">; |
| } |
| |
| def PseudoXVBNZ_B : VecCond<loongarch_vall_nonzero, v32i8, LASX256>; |
| def PseudoXVBNZ_H : VecCond<loongarch_vall_nonzero, v16i16, LASX256>; |
| def PseudoXVBNZ_W : VecCond<loongarch_vall_nonzero, v8i32, LASX256>; |
| def PseudoXVBNZ_D : VecCond<loongarch_vall_nonzero, v4i64, LASX256>; |
| def PseudoXVBNZ : VecCond<loongarch_vany_nonzero, v32i8, LASX256>; |
| |
| def PseudoXVBZ_B : VecCond<loongarch_vall_zero, v32i8, LASX256>; |
| def PseudoXVBZ_H : VecCond<loongarch_vall_zero, v16i16, LASX256>; |
| def PseudoXVBZ_W : VecCond<loongarch_vall_zero, v8i32, LASX256>; |
| def PseudoXVBZ_D : VecCond<loongarch_vall_zero, v4i64, LASX256>; |
| def PseudoXVBZ : VecCond<loongarch_vany_zero, v32i8, LASX256>; |
| |
| let usesCustomInserter = 1, Constraints = "$xd = $dst" in { |
| def PseudoXVINSGR2VR_B |
| : Pseudo<(outs LASX256:$dst), (ins LASX256:$xd, GPR:$rj, uimm5:$imm)>; |
| def PseudoXVINSGR2VR_H |
| : Pseudo<(outs LASX256:$dst), (ins LASX256:$xd, GPR:$rj, uimm4:$imm)>; |
| } // usesCustomInserter = 1, Constraints = "$xd = $dst" |
| |
| } // Predicates = [HasExtLASX] |
| |
| multiclass PatXr<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(v32i8 (OpNode (v32i8 LASX256:$xj))), |
| (!cast<LAInst>(Inst#"_B") LASX256:$xj)>; |
| def : Pat<(v16i16 (OpNode (v16i16 LASX256:$xj))), |
| (!cast<LAInst>(Inst#"_H") LASX256:$xj)>; |
| def : Pat<(v8i32 (OpNode (v8i32 LASX256:$xj))), |
| (!cast<LAInst>(Inst#"_W") LASX256:$xj)>; |
| def : Pat<(v4i64 (OpNode (v4i64 LASX256:$xj))), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xj)>; |
| } |
| |
| multiclass PatXrF<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(v8f32 (OpNode (v8f32 LASX256:$xj))), |
| (!cast<LAInst>(Inst#"_S") LASX256:$xj)>; |
| def : Pat<(v4f64 (OpNode (v4f64 LASX256:$xj))), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xj)>; |
| } |
| |
| multiclass PatXrXr<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(OpNode (v32i8 LASX256:$xj), (v32i8 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_B") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v16i16 LASX256:$xj), (v16i16 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_H") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v8i32 LASX256:$xj), (v8i32 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_W") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v4i64 LASX256:$xj), (v4i64 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xj, LASX256:$xk)>; |
| } |
| |
| multiclass PatXrXrF<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(OpNode (v8f32 LASX256:$xj), (v8f32 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_S") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v4f64 LASX256:$xj), (v4f64 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xj, LASX256:$xk)>; |
| } |
| |
| multiclass PatXrXrU<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(OpNode (v32i8 LASX256:$xj), (v32i8 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_BU") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v16i16 LASX256:$xj), (v16i16 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_HU") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v8i32 LASX256:$xj), (v8i32 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_WU") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v4i64 LASX256:$xj), (v4i64 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_DU") LASX256:$xj, LASX256:$xk)>; |
| } |
| |
| multiclass PatXrSimm5<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(OpNode (v32i8 LASX256:$xj), (v32i8 (SplatPat_simm5 simm5:$imm))), |
| (!cast<LAInst>(Inst#"_B") LASX256:$xj, simm5:$imm)>; |
| def : Pat<(OpNode (v16i16 LASX256:$xj), (v16i16 (SplatPat_simm5 simm5:$imm))), |
| (!cast<LAInst>(Inst#"_H") LASX256:$xj, simm5:$imm)>; |
| def : Pat<(OpNode (v8i32 LASX256:$xj), (v8i32 (SplatPat_simm5 simm5:$imm))), |
| (!cast<LAInst>(Inst#"_W") LASX256:$xj, simm5:$imm)>; |
| def : Pat<(OpNode (v4i64 LASX256:$xj), (v4i64 (SplatPat_simm5 simm5:$imm))), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xj, simm5:$imm)>; |
| } |
| |
| multiclass PatXrUimm5<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(OpNode (v32i8 LASX256:$xj), (v32i8 (SplatPat_uimm5 uimm5:$imm))), |
| (!cast<LAInst>(Inst#"_BU") LASX256:$xj, uimm5:$imm)>; |
| def : Pat<(OpNode (v16i16 LASX256:$xj), (v16i16 (SplatPat_uimm5 uimm5:$imm))), |
| (!cast<LAInst>(Inst#"_HU") LASX256:$xj, uimm5:$imm)>; |
| def : Pat<(OpNode (v8i32 LASX256:$xj), (v8i32 (SplatPat_uimm5 uimm5:$imm))), |
| (!cast<LAInst>(Inst#"_WU") LASX256:$xj, uimm5:$imm)>; |
| def : Pat<(OpNode (v4i64 LASX256:$xj), (v4i64 (SplatPat_uimm5 uimm5:$imm))), |
| (!cast<LAInst>(Inst#"_DU") LASX256:$xj, uimm5:$imm)>; |
| } |
| |
| multiclass PatXrXrXr<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(OpNode (v32i8 LASX256:$xd), (v32i8 LASX256:$xj), |
| (v32i8 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_B") LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v16i16 LASX256:$xd), (v16i16 LASX256:$xj), |
| (v16i16 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_H") LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v8i32 LASX256:$xd), (v8i32 LASX256:$xj), |
| (v8i32 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_W") LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v4i64 LASX256:$xd), (v4i64 LASX256:$xj), |
| (v4i64 LASX256:$xk)), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| } |
| |
| multiclass PatShiftXrXr<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(OpNode (v32i8 LASX256:$xj), (and vsplati8_imm_eq_7, |
| (v32i8 LASX256:$xk))), |
| (!cast<LAInst>(Inst#"_B") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v16i16 LASX256:$xj), (and vsplati16_imm_eq_15, |
| (v16i16 LASX256:$xk))), |
| (!cast<LAInst>(Inst#"_H") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v8i32 LASX256:$xj), (and vsplati32_imm_eq_31, |
| (v8i32 LASX256:$xk))), |
| (!cast<LAInst>(Inst#"_W") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(OpNode (v4i64 LASX256:$xj), (and vsplati64_imm_eq_63, |
| (v4i64 LASX256:$xk))), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xj, LASX256:$xk)>; |
| } |
| |
| multiclass PatShiftXrSplatUimm<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(OpNode (v32i8 LASX256:$xj), (v32i8 (SplatPat_uimm3 uimm3:$imm))), |
| (!cast<LAInst>(Inst#"_B") LASX256:$xj, uimm3:$imm)>; |
| def : Pat<(OpNode (v16i16 LASX256:$xj), (v16i16 (SplatPat_uimm4 uimm4:$imm))), |
| (!cast<LAInst>(Inst#"_H") LASX256:$xj, uimm4:$imm)>; |
| def : Pat<(OpNode (v8i32 LASX256:$xj), (v8i32 (SplatPat_uimm5 uimm5:$imm))), |
| (!cast<LAInst>(Inst#"_W") LASX256:$xj, uimm5:$imm)>; |
| def : Pat<(OpNode (v4i64 LASX256:$xj), (v4i64 (SplatPat_uimm6 uimm6:$imm))), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xj, uimm6:$imm)>; |
| } |
| |
| multiclass PatShiftXrUimm<SDPatternOperator OpNode, string Inst> { |
| def : Pat<(OpNode(v32i8 LASX256:$vj), uimm3:$imm), |
| (!cast<LAInst>(Inst#"_B") LASX256:$vj, uimm3:$imm)>; |
| def : Pat<(OpNode(v16i16 LASX256:$vj), uimm4:$imm), |
| (!cast<LAInst>(Inst#"_H") LASX256:$vj, uimm4:$imm)>; |
| def : Pat<(OpNode(v8i32 LASX256:$vj), uimm5:$imm), |
| (!cast<LAInst>(Inst#"_W") LASX256:$vj, uimm5:$imm)>; |
| def : Pat<(OpNode(v4i64 LASX256:$vj), uimm6:$imm), |
| (!cast<LAInst>(Inst#"_D") LASX256:$vj, uimm6:$imm)>; |
| } |
| |
| multiclass PatCCXrSimm5<CondCode CC, string Inst> { |
| def : Pat<(v32i8 (setcc (v32i8 LASX256:$xj), |
| (v32i8 (SplatPat_simm5 simm5:$imm)), CC)), |
| (!cast<LAInst>(Inst#"_B") LASX256:$xj, simm5:$imm)>; |
| def : Pat<(v16i16 (setcc (v16i16 LASX256:$xj), |
| (v16i16 (SplatPat_simm5 simm5:$imm)), CC)), |
| (!cast<LAInst>(Inst#"_H") LASX256:$xj, simm5:$imm)>; |
| def : Pat<(v8i32 (setcc (v8i32 LASX256:$xj), |
| (v8i32 (SplatPat_simm5 simm5:$imm)), CC)), |
| (!cast<LAInst>(Inst#"_W") LASX256:$xj, simm5:$imm)>; |
| def : Pat<(v4i64 (setcc (v4i64 LASX256:$xj), |
| (v4i64 (SplatPat_simm5 simm5:$imm)), CC)), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xj, simm5:$imm)>; |
| } |
| |
| multiclass PatCCXrUimm5<CondCode CC, string Inst> { |
| def : Pat<(v32i8 (setcc (v32i8 LASX256:$xj), |
| (v32i8 (SplatPat_uimm5 uimm5:$imm)), CC)), |
| (!cast<LAInst>(Inst#"_BU") LASX256:$xj, uimm5:$imm)>; |
| def : Pat<(v16i16 (setcc (v16i16 LASX256:$xj), |
| (v16i16 (SplatPat_uimm5 uimm5:$imm)), CC)), |
| (!cast<LAInst>(Inst#"_HU") LASX256:$xj, uimm5:$imm)>; |
| def : Pat<(v8i32 (setcc (v8i32 LASX256:$xj), |
| (v8i32 (SplatPat_uimm5 uimm5:$imm)), CC)), |
| (!cast<LAInst>(Inst#"_WU") LASX256:$xj, uimm5:$imm)>; |
| def : Pat<(v4i64 (setcc (v4i64 LASX256:$xj), |
| (v4i64 (SplatPat_uimm5 uimm5:$imm)), CC)), |
| (!cast<LAInst>(Inst#"_DU") LASX256:$xj, uimm5:$imm)>; |
| } |
| |
| multiclass PatCCXrXr<CondCode CC, string Inst> { |
| def : Pat<(v32i8 (setcc (v32i8 LASX256:$xj), (v32i8 LASX256:$xk), CC)), |
| (!cast<LAInst>(Inst#"_B") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(v16i16 (setcc (v16i16 LASX256:$xj), (v16i16 LASX256:$xk), CC)), |
| (!cast<LAInst>(Inst#"_H") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(v8i32 (setcc (v8i32 LASX256:$xj), (v8i32 LASX256:$xk), CC)), |
| (!cast<LAInst>(Inst#"_W") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(v4i64 (setcc (v4i64 LASX256:$xj), (v4i64 LASX256:$xk), CC)), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xj, LASX256:$xk)>; |
| } |
| |
| multiclass PatCCXrXrU<CondCode CC, string Inst> { |
| def : Pat<(v32i8 (setcc (v32i8 LASX256:$xj), (v32i8 LASX256:$xk), CC)), |
| (!cast<LAInst>(Inst#"_BU") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(v16i16 (setcc (v16i16 LASX256:$xj), (v16i16 LASX256:$xk), CC)), |
| (!cast<LAInst>(Inst#"_HU") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(v8i32 (setcc (v8i32 LASX256:$xj), (v8i32 LASX256:$xk), CC)), |
| (!cast<LAInst>(Inst#"_WU") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(v4i64 (setcc (v4i64 LASX256:$xj), (v4i64 LASX256:$xk), CC)), |
| (!cast<LAInst>(Inst#"_DU") LASX256:$xj, LASX256:$xk)>; |
| } |
| |
| multiclass PatCCXrXrF<CondCode CC, string Inst> { |
| def : Pat<(v8i32 (setcc (v8f32 LASX256:$xj), (v8f32 LASX256:$xk), CC)), |
| (!cast<LAInst>(Inst#"_S") LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(v4i64 (setcc (v4f64 LASX256:$xj), (v4f64 LASX256:$xk), CC)), |
| (!cast<LAInst>(Inst#"_D") LASX256:$xj, LASX256:$xk)>; |
| } |
| |
| let Predicates = [HasExtLASX] in { |
| |
| // XVADD_{B/H/W/D} |
| defm : PatXrXr<add, "XVADD">; |
| // XVSUB_{B/H/W/D} |
| defm : PatXrXr<sub, "XVSUB">; |
| |
| // XVADDI_{B/H/W/D}U |
| defm : PatXrUimm5<add, "XVADDI">; |
| // XVSUBI_{B/H/W/D}U |
| defm : PatXrUimm5<sub, "XVSUBI">; |
| |
| // XVNEG_{B/H/W/D} |
| def : Pat<(sub immAllZerosV, (v32i8 LASX256:$xj)), (XVNEG_B LASX256:$xj)>; |
| def : Pat<(sub immAllZerosV, (v16i16 LASX256:$xj)), (XVNEG_H LASX256:$xj)>; |
| def : Pat<(sub immAllZerosV, (v8i32 LASX256:$xj)), (XVNEG_W LASX256:$xj)>; |
| def : Pat<(sub immAllZerosV, (v4i64 LASX256:$xj)), (XVNEG_D LASX256:$xj)>; |
| |
| // XVMAX[I]_{B/H/W/D}[U] |
| defm : PatXrXr<smax, "XVMAX">; |
| defm : PatXrXrU<umax, "XVMAX">; |
| defm : PatXrSimm5<smax, "XVMAXI">; |
| defm : PatXrUimm5<umax, "XVMAXI">; |
| |
| // XVMIN[I]_{B/H/W/D}[U] |
| defm : PatXrXr<smin, "XVMIN">; |
| defm : PatXrXrU<umin, "XVMIN">; |
| defm : PatXrSimm5<smin, "XVMINI">; |
| defm : PatXrUimm5<umin, "XVMINI">; |
| |
| // XVMUL_{B/H/W/D} |
| defm : PatXrXr<mul, "XVMUL">; |
| |
| // XVMUH_{B/H/W/D}[U] |
| defm : PatXrXr<mulhs, "XVMUH">; |
| defm : PatXrXrU<mulhu, "XVMUH">; |
| |
| // XVMADD_{B/H/W/D} |
| defm : PatXrXrXr<muladd, "XVMADD">; |
| // XVMSUB_{B/H/W/D} |
| defm : PatXrXrXr<mulsub, "XVMSUB">; |
| |
| // XVDIV_{B/H/W/D}[U] |
| defm : PatXrXr<sdiv, "XVDIV">; |
| defm : PatXrXrU<udiv, "XVDIV">; |
| |
| // XVMOD_{B/H/W/D}[U] |
| defm : PatXrXr<srem, "XVMOD">; |
| defm : PatXrXrU<urem, "XVMOD">; |
| |
| // XVAND_V |
| foreach vt = [v32i8, v16i16, v8i32, v4i64] in |
| def : Pat<(and (vt LASX256:$xj), (vt LASX256:$xk)), |
| (XVAND_V LASX256:$xj, LASX256:$xk)>; |
| // XVOR_V |
| foreach vt = [v32i8, v16i16, v8i32, v4i64] in |
| def : Pat<(or (vt LASX256:$xj), (vt LASX256:$xk)), |
| (XVOR_V LASX256:$xj, LASX256:$xk)>; |
| // XVXOR_V |
| foreach vt = [v32i8, v16i16, v8i32, v4i64] in |
| def : Pat<(xor (vt LASX256:$xj), (vt LASX256:$xk)), |
| (XVXOR_V LASX256:$xj, LASX256:$xk)>; |
| // XVNOR_V |
| foreach vt = [v32i8, v16i16, v8i32, v4i64] in |
| def : Pat<(vnot (or (vt LASX256:$xj), (vt LASX256:$xk))), |
| (XVNOR_V LASX256:$xj, LASX256:$xk)>; |
| |
| // XVANDI_B |
| def : Pat<(and (v32i8 LASX256:$xj), (v32i8 (SplatPat_uimm8 uimm8:$imm))), |
| (XVANDI_B LASX256:$xj, uimm8:$imm)>; |
| // XVORI_B |
| def : Pat<(or (v32i8 LASX256:$xj), (v32i8 (SplatPat_uimm8 uimm8:$imm))), |
| (XVORI_B LASX256:$xj, uimm8:$imm)>; |
| |
| // XVXORI_B |
| def : Pat<(xor (v32i8 LASX256:$xj), (v32i8 (SplatPat_uimm8 uimm8:$imm))), |
| (XVXORI_B LASX256:$xj, uimm8:$imm)>; |
| |
| // XVBSLL_V |
| foreach vt = [v32i8, v16i16, v8i32, v4i64, v8f32, |
| v4f64] in def : Pat<(loongarch_vbsll(vt LASX256:$xj), uimm5:$imm), |
| (XVBSLL_V LASX256:$xj, uimm5:$imm)>; |
| |
| // XVBSRL_V |
| foreach vt = [v32i8, v16i16, v8i32, v4i64, v8f32, |
| v4f64] in def : Pat<(loongarch_vbsrl(vt LASX256:$xj), uimm5:$imm), |
| (XVBSRL_V LASX256:$xj, uimm5:$imm)>; |
| |
| // XVSLL[I]_{B/H/W/D} |
| defm : PatXrXr<shl, "XVSLL">; |
| defm : PatShiftXrXr<shl, "XVSLL">; |
| defm : PatShiftXrSplatUimm<shl, "XVSLLI">; |
| defm : PatShiftXrUimm<loongarch_vslli, "XVSLLI">; |
| |
| // XVSRL[I]_{B/H/W/D} |
| defm : PatXrXr<srl, "XVSRL">; |
| defm : PatShiftXrXr<srl, "XVSRL">; |
| defm : PatShiftXrSplatUimm<srl, "XVSRLI">; |
| defm : PatShiftXrUimm<loongarch_vsrli, "XVSRLI">; |
| |
| // XVSRA[I]_{B/H/W/D} |
| defm : PatXrXr<sra, "XVSRA">; |
| defm : PatShiftXrXr<sra, "XVSRA">; |
| defm : PatShiftXrSplatUimm<sra, "XVSRAI">; |
| |
| // XVCLZ_{B/H/W/D} |
| defm : PatXr<ctlz, "XVCLZ">; |
| |
| // XVPCNT_{B/H/W/D} |
| defm : PatXr<ctpop, "XVPCNT">; |
| |
| // XVBITCLR_{B/H/W/D} |
| def : Pat<(and v32i8:$xj, (vnot (shl vsplat_imm_eq_1, v32i8:$xk))), |
| (v32i8 (XVBITCLR_B v32i8:$xj, v32i8:$xk))>; |
| def : Pat<(and v16i16:$xj, (vnot (shl vsplat_imm_eq_1, v16i16:$xk))), |
| (v16i16 (XVBITCLR_H v16i16:$xj, v16i16:$xk))>; |
| def : Pat<(and v8i32:$xj, (vnot (shl vsplat_imm_eq_1, v8i32:$xk))), |
| (v8i32 (XVBITCLR_W v8i32:$xj, v8i32:$xk))>; |
| def : Pat<(and v4i64:$xj, (vnot (shl vsplat_imm_eq_1, v4i64:$xk))), |
| (v4i64 (XVBITCLR_D v4i64:$xj, v4i64:$xk))>; |
| def : Pat<(and v32i8:$xj, (vnot (shl vsplat_imm_eq_1, |
| (vsplati8imm7 v32i8:$xk)))), |
| (v32i8 (XVBITCLR_B v32i8:$xj, v32i8:$xk))>; |
| def : Pat<(and v16i16:$xj, (vnot (shl vsplat_imm_eq_1, |
| (vsplati16imm15 v16i16:$xk)))), |
| (v16i16 (XVBITCLR_H v16i16:$xj, v16i16:$xk))>; |
| def : Pat<(and v8i32:$xj, (vnot (shl vsplat_imm_eq_1, |
| (vsplati32imm31 v8i32:$xk)))), |
| (v8i32 (XVBITCLR_W v8i32:$xj, v8i32:$xk))>; |
| def : Pat<(and v4i64:$xj, (vnot (shl vsplat_imm_eq_1, |
| (vsplati64imm63 v4i64:$xk)))), |
| (v4i64 (XVBITCLR_D v4i64:$xj, v4i64:$xk))>; |
| |
| // XVBITCLRI_{B/H/W/D} |
| def : Pat<(and (v32i8 LASX256:$xj), (v32i8 (vsplat_uimm_inv_pow2 uimm3:$imm))), |
| (XVBITCLRI_B LASX256:$xj, uimm3:$imm)>; |
| def : Pat<(and (v16i16 LASX256:$xj), (v16i16 (vsplat_uimm_inv_pow2 uimm4:$imm))), |
| (XVBITCLRI_H LASX256:$xj, uimm4:$imm)>; |
| def : Pat<(and (v8i32 LASX256:$xj), (v8i32 (vsplat_uimm_inv_pow2 uimm5:$imm))), |
| (XVBITCLRI_W LASX256:$xj, uimm5:$imm)>; |
| def : Pat<(and (v4i64 LASX256:$xj), (v4i64 (vsplat_uimm_inv_pow2 uimm6:$imm))), |
| (XVBITCLRI_D LASX256:$xj, uimm6:$imm)>; |
| |
| // XVBITSET_{B/H/W/D} |
| def : Pat<(or v32i8:$xj, (shl vsplat_imm_eq_1, v32i8:$xk)), |
| (v32i8 (XVBITSET_B v32i8:$xj, v32i8:$xk))>; |
| def : Pat<(or v16i16:$xj, (shl vsplat_imm_eq_1, v16i16:$xk)), |
| (v16i16 (XVBITSET_H v16i16:$xj, v16i16:$xk))>; |
| def : Pat<(or v8i32:$xj, (shl vsplat_imm_eq_1, v8i32:$xk)), |
| (v8i32 (XVBITSET_W v8i32:$xj, v8i32:$xk))>; |
| def : Pat<(or v4i64:$xj, (shl vsplat_imm_eq_1, v4i64:$xk)), |
| (v4i64 (XVBITSET_D v4i64:$xj, v4i64:$xk))>; |
| def : Pat<(or v32i8:$xj, (shl vsplat_imm_eq_1, (vsplati8imm7 v32i8:$xk))), |
| (v32i8 (XVBITSET_B v32i8:$xj, v32i8:$xk))>; |
| def : Pat<(or v16i16:$xj, (shl vsplat_imm_eq_1, (vsplati16imm15 v16i16:$xk))), |
| (v16i16 (XVBITSET_H v16i16:$xj, v16i16:$xk))>; |
| def : Pat<(or v8i32:$xj, (shl vsplat_imm_eq_1, (vsplati32imm31 v8i32:$xk))), |
| (v8i32 (XVBITSET_W v8i32:$xj, v8i32:$xk))>; |
| def : Pat<(or v4i64:$xj, (shl vsplat_imm_eq_1, (vsplati64imm63 v4i64:$xk))), |
| (v4i64 (XVBITSET_D v4i64:$xj, v4i64:$xk))>; |
| |
| // XVBITSETI_{B/H/W/D} |
| def : Pat<(or (v32i8 LASX256:$xj), (v32i8 (vsplat_uimm_pow2 uimm3:$imm))), |
| (XVBITSETI_B LASX256:$xj, uimm3:$imm)>; |
| def : Pat<(or (v16i16 LASX256:$xj), (v16i16 (vsplat_uimm_pow2 uimm4:$imm))), |
| (XVBITSETI_H LASX256:$xj, uimm4:$imm)>; |
| def : Pat<(or (v8i32 LASX256:$xj), (v8i32 (vsplat_uimm_pow2 uimm5:$imm))), |
| (XVBITSETI_W LASX256:$xj, uimm5:$imm)>; |
| def : Pat<(or (v4i64 LASX256:$xj), (v4i64 (vsplat_uimm_pow2 uimm6:$imm))), |
| (XVBITSETI_D LASX256:$xj, uimm6:$imm)>; |
| |
| // XVBITREV_{B/H/W/D} |
| def : Pat<(xor v32i8:$xj, (shl vsplat_imm_eq_1, v32i8:$xk)), |
| (v32i8 (XVBITREV_B v32i8:$xj, v32i8:$xk))>; |
| def : Pat<(xor v16i16:$xj, (shl vsplat_imm_eq_1, v16i16:$xk)), |
| (v16i16 (XVBITREV_H v16i16:$xj, v16i16:$xk))>; |
| def : Pat<(xor v8i32:$xj, (shl vsplat_imm_eq_1, v8i32:$xk)), |
| (v8i32 (XVBITREV_W v8i32:$xj, v8i32:$xk))>; |
| def : Pat<(xor v4i64:$xj, (shl vsplat_imm_eq_1, v4i64:$xk)), |
| (v4i64 (XVBITREV_D v4i64:$xj, v4i64:$xk))>; |
| def : Pat<(xor v32i8:$xj, (shl vsplat_imm_eq_1, (vsplati8imm7 v32i8:$xk))), |
| (v32i8 (XVBITREV_B v32i8:$xj, v32i8:$xk))>; |
| def : Pat<(xor v16i16:$xj, (shl vsplat_imm_eq_1, (vsplati16imm15 v16i16:$xk))), |
| (v16i16 (XVBITREV_H v16i16:$xj, v16i16:$xk))>; |
| def : Pat<(xor v8i32:$xj, (shl vsplat_imm_eq_1, (vsplati32imm31 v8i32:$xk))), |
| (v8i32 (XVBITREV_W v8i32:$xj, v8i32:$xk))>; |
| def : Pat<(xor v4i64:$xj, (shl vsplat_imm_eq_1, (vsplati64imm63 v4i64:$xk))), |
| (v4i64 (XVBITREV_D v4i64:$xj, v4i64:$xk))>; |
| |
| // XVBITREVI_{B/H/W/D} |
| def : Pat<(xor (v32i8 LASX256:$xj), (v32i8 (vsplat_uimm_pow2 uimm3:$imm))), |
| (XVBITREVI_B LASX256:$xj, uimm3:$imm)>; |
| def : Pat<(xor (v16i16 LASX256:$xj), (v16i16 (vsplat_uimm_pow2 uimm4:$imm))), |
| (XVBITREVI_H LASX256:$xj, uimm4:$imm)>; |
| def : Pat<(xor (v8i32 LASX256:$xj), (v8i32 (vsplat_uimm_pow2 uimm5:$imm))), |
| (XVBITREVI_W LASX256:$xj, uimm5:$imm)>; |
| def : Pat<(xor (v4i64 LASX256:$xj), (v4i64 (vsplat_uimm_pow2 uimm6:$imm))), |
| (XVBITREVI_D LASX256:$xj, uimm6:$imm)>; |
| |
| // Vector bswaps |
| def : Pat<(bswap (v16i16 LASX256:$xj)), (XVSHUF4I_B LASX256:$xj, 0b10110001)>; |
| def : Pat<(bswap (v8i32 LASX256:$xj)), (XVSHUF4I_B LASX256:$xj, 0b00011011)>; |
| def : Pat<(bswap (v4i64 LASX256:$xj)), |
| (XVSHUF4I_W (XVSHUF4I_B LASX256:$xj, 0b00011011), 0b10110001)>; |
| |
| // XVFADD_{S/D} |
| defm : PatXrXrF<fadd, "XVFADD">; |
| |
| // XVFSUB_{S/D} |
| defm : PatXrXrF<fsub, "XVFSUB">; |
| |
| // XVFMUL_{S/D} |
| defm : PatXrXrF<fmul, "XVFMUL">; |
| |
| // XVFDIV_{S/D} |
| defm : PatXrXrF<fdiv, "XVFDIV">; |
| |
| // XVFMADD_{S/D} |
| def : Pat<(fma v8f32:$xj, v8f32:$xk, v8f32:$xa), |
| (XVFMADD_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>; |
| def : Pat<(fma v4f64:$xj, v4f64:$xk, v4f64:$xa), |
| (XVFMADD_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>; |
| |
| // XVFMSUB_{S/D} |
| def : Pat<(fma v8f32:$xj, v8f32:$xk, (fneg v8f32:$xa)), |
| (XVFMSUB_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>; |
| def : Pat<(fma v4f64:$xj, v4f64:$xk, (fneg v4f64:$xa)), |
| (XVFMSUB_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>; |
| |
| // XVFNMADD_{S/D} |
| def : Pat<(fneg (fma v8f32:$xj, v8f32:$xk, v8f32:$xa)), |
| (XVFNMADD_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>; |
| def : Pat<(fneg (fma v4f64:$xj, v4f64:$xk, v4f64:$xa)), |
| (XVFNMADD_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>; |
| def : Pat<(fma_nsz (fneg v8f32:$xj), v8f32:$xk, (fneg v8f32:$xa)), |
| (XVFNMADD_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>; |
| def : Pat<(fma_nsz (fneg v4f64:$xj), v4f64:$xk, (fneg v4f64:$xa)), |
| (XVFNMADD_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>; |
| |
| // XVFNMSUB_{S/D} |
| def : Pat<(fneg (fma v8f32:$xj, v8f32:$xk, (fneg v8f32:$xa))), |
| (XVFNMSUB_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>; |
| def : Pat<(fneg (fma v4f64:$xj, v4f64:$xk, (fneg v4f64:$xa))), |
| (XVFNMSUB_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>; |
| def : Pat<(fma_nsz (fneg v8f32:$xj), v8f32:$xk, v8f32:$xa), |
| (XVFNMSUB_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>; |
| def : Pat<(fma_nsz (fneg v4f64:$xj), v4f64:$xk, v4f64:$xa), |
| (XVFNMSUB_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>; |
| |
| // XVFSQRT_{S/D} |
| defm : PatXrF<fsqrt, "XVFSQRT">; |
| |
| // XVRECIP_{S/D} |
| def : Pat<(fdiv vsplatf32_fpimm_eq_1, v8f32:$xj), |
| (XVFRECIP_S v8f32:$xj)>; |
| def : Pat<(fdiv vsplatf64_fpimm_eq_1, v4f64:$xj), |
| (XVFRECIP_D v4f64:$xj)>; |
| |
| // XVFRSQRT_{S/D} |
| def : Pat<(fdiv vsplatf32_fpimm_eq_1, (fsqrt v8f32:$xj)), |
| (XVFRSQRT_S v8f32:$xj)>; |
| def : Pat<(fdiv vsplatf64_fpimm_eq_1, (fsqrt v4f64:$xj)), |
| (XVFRSQRT_D v4f64:$xj)>; |
| |
| // XVSEQ[I]_{B/H/W/D} |
| defm : PatCCXrSimm5<SETEQ, "XVSEQI">; |
| defm : PatCCXrXr<SETEQ, "XVSEQ">; |
| |
| // XVSLE[I]_{B/H/W/D}[U] |
| defm : PatCCXrSimm5<SETLE, "XVSLEI">; |
| defm : PatCCXrUimm5<SETULE, "XVSLEI">; |
| defm : PatCCXrXr<SETLE, "XVSLE">; |
| defm : PatCCXrXrU<SETULE, "XVSLE">; |
| |
| // XVSLT[I]_{B/H/W/D}[U] |
| defm : PatCCXrSimm5<SETLT, "XVSLTI">; |
| defm : PatCCXrUimm5<SETULT, "XVSLTI">; |
| defm : PatCCXrXr<SETLT, "XVSLT">; |
| defm : PatCCXrXrU<SETULT, "XVSLT">; |
| |
| // XVFCMP.cond.{S/D} |
| defm : PatCCXrXrF<SETEQ, "XVFCMP_CEQ">; |
| defm : PatCCXrXrF<SETOEQ, "XVFCMP_CEQ">; |
| defm : PatCCXrXrF<SETUEQ, "XVFCMP_CUEQ">; |
| |
| defm : PatCCXrXrF<SETLE, "XVFCMP_CLE">; |
| defm : PatCCXrXrF<SETOLE, "XVFCMP_CLE">; |
| defm : PatCCXrXrF<SETULE, "XVFCMP_CULE">; |
| |
| defm : PatCCXrXrF<SETLT, "XVFCMP_CLT">; |
| defm : PatCCXrXrF<SETOLT, "XVFCMP_CLT">; |
| defm : PatCCXrXrF<SETULT, "XVFCMP_CULT">; |
| |
| defm : PatCCXrXrF<SETNE, "XVFCMP_CNE">; |
| defm : PatCCXrXrF<SETONE, "XVFCMP_CNE">; |
| defm : PatCCXrXrF<SETUNE, "XVFCMP_CUNE">; |
| |
| defm : PatCCXrXrF<SETO, "XVFCMP_COR">; |
| defm : PatCCXrXrF<SETUO, "XVFCMP_CUN">; |
| |
| // PseudoXVINSGR2VR_{B/H} |
| def : Pat<(vector_insert v32i8:$xd, GRLenVT:$rj, uimm5:$imm), |
| (PseudoXVINSGR2VR_B v32i8:$xd, GRLenVT:$rj, uimm5:$imm)>; |
| def : Pat<(vector_insert v16i16:$xd, GRLenVT:$rj, uimm4:$imm), |
| (PseudoXVINSGR2VR_H v16i16:$xd, GRLenVT:$rj, uimm4:$imm)>; |
| |
| // XVINSGR2VR_{W/D} |
| def : Pat<(vector_insert v8i32:$xd, GRLenVT:$rj, uimm3:$imm), |
| (XVINSGR2VR_W v8i32:$xd, GRLenVT:$rj, uimm3:$imm)>; |
| def : Pat<(vector_insert v4i64:$xd, GRLenVT:$rj, uimm2:$imm), |
| (XVINSGR2VR_D v4i64:$xd, GRLenVT:$rj, uimm2:$imm)>; |
| |
| def : Pat<(vector_insert v8f32:$vd, FPR32:$fj, uimm3:$imm), |
| (XVINSGR2VR_W $vd, (COPY_TO_REGCLASS FPR32:$fj, GPR), uimm3:$imm)>; |
| def : Pat<(vector_insert v4f64:$vd, FPR64:$fj, uimm2:$imm), |
| (XVINSGR2VR_D $vd, (COPY_TO_REGCLASS FPR64:$fj, GPR), uimm2:$imm)>; |
| |
| // scalar_to_vector |
| def : Pat<(v8f32 (scalar_to_vector FPR32:$fj)), |
| (SUBREG_TO_REG (i64 0), FPR32:$fj, sub_32)>; |
| def : Pat<(v4f64 (scalar_to_vector FPR64:$fj)), |
| (SUBREG_TO_REG (i64 0), FPR64:$fj, sub_64)>; |
| |
| // XVPICKVE2GR_W[U] |
| def : Pat<(loongarch_vpick_sext_elt v8i32:$xd, uimm3:$imm, i32), |
| (XVPICKVE2GR_W v8i32:$xd, uimm3:$imm)>; |
| def : Pat<(loongarch_vpick_zext_elt v8i32:$xd, uimm3:$imm, i32), |
| (XVPICKVE2GR_WU v8i32:$xd, uimm3:$imm)>; |
| |
| // XVREPLGR2VR_{B/H/W/D} |
| def : Pat<(lasxsplati8 GPR:$rj), (XVREPLGR2VR_B GPR:$rj)>; |
| def : Pat<(lasxsplati16 GPR:$rj), (XVREPLGR2VR_H GPR:$rj)>; |
| def : Pat<(lasxsplati32 GPR:$rj), (XVREPLGR2VR_W GPR:$rj)>; |
| def : Pat<(lasxsplati64 GPR:$rj), (XVREPLGR2VR_D GPR:$rj)>; |
| |
| def : Pat<(v32i8 (loongarch_vreplgr2vr GRLenVT:$rj)), |
| (v32i8 (XVREPLGR2VR_B GRLenVT:$rj))>; |
| def : Pat<(v16i16 (loongarch_vreplgr2vr GRLenVT:$rj)), |
| (v16i16 (XVREPLGR2VR_H GRLenVT:$rj))>; |
| def : Pat<(v8i32 (loongarch_vreplgr2vr GRLenVT:$rj)), |
| (v8i32 (XVREPLGR2VR_W GRLenVT:$rj))>; |
| def : Pat<(v4i64 (loongarch_vreplgr2vr GRLenVT:$rj)), |
| (v4i64 (XVREPLGR2VR_D GRLenVT:$rj))>; |
| |
| // XVREPLVE_{B/H/W/D} |
| def : Pat<(loongarch_vreplve v32i8:$xj, GRLenVT:$rk), |
| (XVREPLVE_B v32i8:$xj, GRLenVT:$rk)>; |
| def : Pat<(loongarch_vreplve v16i16:$xj, GRLenVT:$rk), |
| (XVREPLVE_H v16i16:$xj, GRLenVT:$rk)>; |
| def : Pat<(loongarch_vreplve v8i32:$xj, GRLenVT:$rk), |
| (XVREPLVE_W v8i32:$xj, GRLenVT:$rk)>; |
| def : Pat<(loongarch_vreplve v4i64:$xj, GRLenVT:$rk), |
| (XVREPLVE_D v4i64:$xj, GRLenVT:$rk)>; |
| |
| // XVSHUF_{B/H/W/D} |
| def : Pat<(loongarch_vshuf v32i8:$xa, v32i8:$xj, v32i8:$xk), |
| (XVSHUF_B v32i8:$xj, v32i8:$xk, v32i8:$xa)>; |
| def : Pat<(loongarch_vshuf v16i16:$xd, v16i16:$xj, v16i16:$xk), |
| (XVSHUF_H v16i16:$xd, v16i16:$xj, v16i16:$xk)>; |
| def : Pat<(loongarch_vshuf v8i32:$xd, v8i32:$xj, v8i32:$xk), |
| (XVSHUF_W v8i32:$xd, v8i32:$xj, v8i32:$xk)>; |
| def : Pat<(loongarch_vshuf v4i64:$xd, v4i64:$xj, v4i64:$xk), |
| (XVSHUF_D v4i64:$xd, v4i64:$xj, v4i64:$xk)>; |
| def : Pat<(loongarch_vshuf v8i32:$xd, v8f32:$xj, v8f32:$xk), |
| (XVSHUF_W v8i32:$xd, v8f32:$xj, v8f32:$xk)>; |
| def : Pat<(loongarch_vshuf v4i64:$xd, v4f64:$xj, v4f64:$xk), |
| (XVSHUF_D v4i64:$xd, v4f64:$xj, v4f64:$xk)>; |
| |
| // XVPICKEV_{B/H/W/D} |
| def : Pat<(loongarch_vpickev v32i8:$xj, v32i8:$xk), |
| (XVPICKEV_B v32i8:$xj, v32i8:$xk)>; |
| def : Pat<(loongarch_vpickev v16i16:$xj, v16i16:$xk), |
| (XVPICKEV_H v16i16:$xj, v16i16:$xk)>; |
| def : Pat<(loongarch_vpickev v8i32:$xj, v8i32:$xk), |
| (XVPICKEV_W v8i32:$xj, v8i32:$xk)>; |
| def : Pat<(loongarch_vpickev v4i64:$xj, v4i64:$xk), |
| (XVPICKEV_D v4i64:$xj, v4i64:$xk)>; |
| def : Pat<(loongarch_vpickev v8f32:$xj, v8f32:$xk), |
| (XVPICKEV_W v8f32:$xj, v8f32:$xk)>; |
| def : Pat<(loongarch_vpickev v4f64:$xj, v4f64:$xk), |
| (XVPICKEV_D v4f64:$xj, v4f64:$xk)>; |
| |
| // XVPICKOD_{B/H/W/D} |
| def : Pat<(loongarch_vpickod v32i8:$xj, v32i8:$xk), |
| (XVPICKOD_B v32i8:$xj, v32i8:$xk)>; |
| def : Pat<(loongarch_vpickod v16i16:$xj, v16i16:$xk), |
| (XVPICKOD_H v16i16:$xj, v16i16:$xk)>; |
| def : Pat<(loongarch_vpickod v8i32:$xj, v8i32:$xk), |
| (XVPICKOD_W v8i32:$xj, v8i32:$xk)>; |
| def : Pat<(loongarch_vpickod v4i64:$xj, v4i64:$xk), |
| (XVPICKOD_D v4i64:$xj, v4i64:$xk)>; |
| def : Pat<(loongarch_vpickod v8f32:$xj, v8f32:$xk), |
| (XVPICKOD_W v8f32:$xj, v8f32:$xk)>; |
| def : Pat<(loongarch_vpickod v4f64:$xj, v4f64:$xk), |
| (XVPICKOD_D v4f64:$xj, v4f64:$xk)>; |
| |
| // XVPACKEV_{B/H/W/D} |
| def : Pat<(loongarch_vpackev v32i8:$xj, v32i8:$xk), |
| (XVPACKEV_B v32i8:$xj, v32i8:$xk)>; |
| def : Pat<(loongarch_vpackev v16i16:$xj, v16i16:$xk), |
| (XVPACKEV_H v16i16:$xj, v16i16:$xk)>; |
| def : Pat<(loongarch_vpackev v8i32:$xj, v8i32:$xk), |
| (XVPACKEV_W v8i32:$xj, v8i32:$xk)>; |
| def : Pat<(loongarch_vpackev v4i64:$xj, v4i64:$xk), |
| (XVPACKEV_D v4i64:$xj, v4i64:$xk)>; |
| def : Pat<(loongarch_vpackev v8f32:$xj, v8f32:$xk), |
| (XVPACKEV_W v8f32:$xj, v8f32:$xk)>; |
| def : Pat<(loongarch_vpackev v4f64:$xj, v4f64:$xk), |
| (XVPACKEV_D v4f64:$xj, v4f64:$xk)>; |
| |
| // XVPACKOD_{B/H/W/D} |
| def : Pat<(loongarch_vpackod v32i8:$xj, v32i8:$xk), |
| (XVPACKOD_B v32i8:$xj, v32i8:$xk)>; |
| def : Pat<(loongarch_vpackod v16i16:$xj, v16i16:$xk), |
| (XVPACKOD_H v16i16:$xj, v16i16:$xk)>; |
| def : Pat<(loongarch_vpackod v8i32:$xj, v8i32:$xk), |
| (XVPACKOD_W v8i32:$xj, v8i32:$xk)>; |
| def : Pat<(loongarch_vpackod v4i64:$xj, v4i64:$xk), |
| (XVPACKOD_D v4i64:$xj, v4i64:$xk)>; |
| def : Pat<(loongarch_vpackod v8f32:$xj, v8f32:$xk), |
| (XVPACKOD_W v8f32:$xj, v8f32:$xk)>; |
| def : Pat<(loongarch_vpackod v4f64:$xj, v4f64:$xk), |
| (XVPACKOD_D v4f64:$xj, v4f64:$xk)>; |
| |
| // XVILVL_{B/H/W/D} |
| def : Pat<(loongarch_vilvl v32i8:$xj, v32i8:$xk), |
| (XVILVL_B v32i8:$xj, v32i8:$xk)>; |
| def : Pat<(loongarch_vilvl v16i16:$xj, v16i16:$xk), |
| (XVILVL_H v16i16:$xj, v16i16:$xk)>; |
| def : Pat<(loongarch_vilvl v8i32:$xj, v8i32:$xk), |
| (XVILVL_W v8i32:$xj, v8i32:$xk)>; |
| def : Pat<(loongarch_vilvl v4i64:$xj, v4i64:$xk), |
| (XVILVL_D v4i64:$xj, v4i64:$xk)>; |
| def : Pat<(loongarch_vilvl v8f32:$xj, v8f32:$xk), |
| (XVILVL_W v8f32:$xj, v8f32:$xk)>; |
| def : Pat<(loongarch_vilvl v4f64:$xj, v4f64:$xk), |
| (XVILVL_D v4f64:$xj, v4f64:$xk)>; |
| |
| // XVILVH_{B/H/W/D} |
| def : Pat<(loongarch_vilvh v32i8:$xj, v32i8:$xk), |
| (XVILVH_B v32i8:$xj, v32i8:$xk)>; |
| def : Pat<(loongarch_vilvh v16i16:$xj, v16i16:$xk), |
| (XVILVH_H v16i16:$xj, v16i16:$xk)>; |
| def : Pat<(loongarch_vilvh v8i32:$xj, v8i32:$xk), |
| (XVILVH_W v8i32:$xj, v8i32:$xk)>; |
| def : Pat<(loongarch_vilvh v4i64:$xj, v4i64:$xk), |
| (XVILVH_D v4i64:$xj, v4i64:$xk)>; |
| def : Pat<(loongarch_vilvh v8f32:$xj, v8f32:$xk), |
| (XVILVH_W v8f32:$xj, v8f32:$xk)>; |
| def : Pat<(loongarch_vilvh v4f64:$xj, v4f64:$xk), |
| (XVILVH_D v4f64:$xj, v4f64:$xk)>; |
| |
| // XVSHUF4I_{B/H/W} |
| def : Pat<(loongarch_vshuf4i v32i8:$xj, immZExt8:$ui8), |
| (XVSHUF4I_B v32i8:$xj, immZExt8:$ui8)>; |
| def : Pat<(loongarch_vshuf4i v16i16:$xj, immZExt8:$ui8), |
| (XVSHUF4I_H v16i16:$xj, immZExt8:$ui8)>; |
| def : Pat<(loongarch_vshuf4i v8i32:$xj, immZExt8:$ui8), |
| (XVSHUF4I_W v8i32:$xj, immZExt8:$ui8)>; |
| def : Pat<(loongarch_vshuf4i v8f32:$xj, immZExt8:$ui8), |
| (XVSHUF4I_W v8f32:$xj, immZExt8:$ui8)>; |
| |
| // XVREPL128VEI_{B/H/W/D} |
| def : Pat<(loongarch_vreplvei v32i8:$xj, immZExt4:$ui4), |
| (XVREPL128VEI_B v32i8:$xj, immZExt4:$ui4)>; |
| def : Pat<(loongarch_vreplvei v16i16:$xj, immZExt3:$ui3), |
| (XVREPL128VEI_H v16i16:$xj, immZExt3:$ui3)>; |
| def : Pat<(loongarch_vreplvei v8i32:$xj, immZExt2:$ui2), |
| (XVREPL128VEI_W v8i32:$xj, immZExt2:$ui2)>; |
| def : Pat<(loongarch_vreplvei v4i64:$xj, immZExt1:$ui1), |
| (XVREPL128VEI_D v4i64:$xj, immZExt1:$ui1)>; |
| def : Pat<(loongarch_vreplvei v8f32:$xj, immZExt2:$ui2), |
| (XVREPL128VEI_W v8f32:$xj, immZExt2:$ui2)>; |
| def : Pat<(loongarch_vreplvei v4f64:$xj, immZExt1:$ui1), |
| (XVREPL128VEI_D v4f64:$xj, immZExt1:$ui1)>; |
| |
| // XVPERMI_D |
| def : Pat<(loongarch_xvpermi v4i64:$xj, immZExt8: $ui8), |
| (XVPERMI_D v4i64:$xj, immZExt8: $ui8)>; |
| def : Pat<(loongarch_xvpermi v4f64:$xj, immZExt8: $ui8), |
| (XVPERMI_D v4f64:$xj, immZExt8: $ui8)>; |
| |
| // XVREPLVE0_{W/D} |
| def : Pat<(lasxsplatf32 FPR32:$fj), |
| (XVREPLVE0_W (SUBREG_TO_REG (i64 0), FPR32:$fj, sub_32))>; |
| def : Pat<(lasxsplatf64 FPR64:$fj), |
| (XVREPLVE0_D (SUBREG_TO_REG (i64 0), FPR64:$fj, sub_64))>; |
| |
| // Loads/Stores |
| foreach vt = [v32i8, v16i16, v8i32, v4i64, v8f32, v4f64] in { |
| defm : LdPat<load, XVLD, vt>; |
| def : RegRegLdPat<load, XVLDX, vt>; |
| defm : StPat<store, XVST, LASX256, vt>; |
| def : RegRegStPat<store, XVSTX, LASX256, vt>; |
| } |
| |
| // Vector extraction with constant index. |
| def : Pat<(i64 (vector_extract v32i8:$xj, uimm4:$imm)), |
| (VPICKVE2GR_B (EXTRACT_SUBREG v32i8:$xj, sub_128), uimm4:$imm)>; |
| def : Pat<(i64 (vector_extract v16i16:$xj, uimm3:$imm)), |
| (VPICKVE2GR_H (EXTRACT_SUBREG v16i16:$xj, sub_128), uimm3:$imm)>; |
| def : Pat<(i64 (vector_extract v8i32:$xj, uimm3:$imm)), |
| (XVPICKVE2GR_W v8i32:$xj, uimm3:$imm)>; |
| def : Pat<(i64 (vector_extract v4i64:$xj, uimm2:$imm)), |
| (XVPICKVE2GR_D v4i64:$xj, uimm2:$imm)>; |
| def : Pat<(f32 (vector_extract v8f32:$xj, uimm3:$imm)), |
| (MOVGR2FR_W (XVPICKVE2GR_W v8f32:$xj, uimm3:$imm))>; |
| def : Pat<(f64 (vector_extract v4f64:$xj, uimm2:$imm)), |
| (MOVGR2FR_D (XVPICKVE2GR_D v4f64:$xj, uimm2:$imm))>; |
| |
| // vselect |
| def : Pat<(v32i8 (vselect LASX256:$xd, (v32i8 (SplatPat_uimm8 uimm8:$imm)), |
| LASX256:$xj)), |
| (XVBITSELI_B LASX256:$xd, LASX256:$xj, uimm8:$imm)>; |
| foreach vt = [v32i8, v16i16, v8i32, v4i64, v8f32, v4f64] in |
| def : Pat<(vt (vselect LASX256:$xa, LASX256:$xk, LASX256:$xj)), |
| (XVBITSEL_V LASX256:$xj, LASX256:$xk, LASX256:$xa)>; |
| |
| // fneg |
| def : Pat<(fneg (v8f32 LASX256:$xj)), (XVBITREVI_W LASX256:$xj, 31)>; |
| def : Pat<(fneg (v4f64 LASX256:$xj)), (XVBITREVI_D LASX256:$xj, 63)>; |
| |
| // XVFFINT_{S_W/D_L} |
| def : Pat<(v8f32 (sint_to_fp v8i32:$vj)), (XVFFINT_S_W v8i32:$vj)>; |
| def : Pat<(v4f64 (sint_to_fp v4i64:$vj)), (XVFFINT_D_L v4i64:$vj)>; |
| def : Pat<(v4f64 (sint_to_fp v4i32:$vj)), |
| (XVFFINT_D_L (VEXT2XV_D_W (SUBREG_TO_REG (i64 0), v4i32:$vj, |
| sub_128)))>; |
| def : Pat<(v4f32 (sint_to_fp v4i64:$vj)), |
| (EXTRACT_SUBREG (XVFCVT_S_D (XVPERMI_D (XVFFINT_D_L v4i64:$vj), 238), |
| (XVFFINT_D_L v4i64:$vj)), |
| sub_128)>; |
| |
| // XVFFINT_{S_WU/D_LU} |
| def : Pat<(v8f32 (uint_to_fp v8i32:$vj)), (XVFFINT_S_WU v8i32:$vj)>; |
| def : Pat<(v4f64 (uint_to_fp v4i64:$vj)), (XVFFINT_D_LU v4i64:$vj)>; |
| def : Pat<(v4f64 (uint_to_fp v4i32:$vj)), |
| (XVFFINT_D_LU (VEXT2XV_DU_WU (SUBREG_TO_REG (i64 0), v4i32:$vj, |
| sub_128)))>; |
| def : Pat<(v4f32 (uint_to_fp v4i64:$vj)), |
| (EXTRACT_SUBREG (XVFCVT_S_D (XVPERMI_D (XVFFINT_D_LU v4i64:$vj), 238), |
| (XVFFINT_D_LU v4i64:$vj)), |
| sub_128)>; |
| |
| // XVFTINTRZ_{W_S/L_D} |
| def : Pat<(v8i32 (fp_to_sint v8f32:$vj)), (XVFTINTRZ_W_S v8f32:$vj)>; |
| def : Pat<(v4i64 (fp_to_sint v4f64:$vj)), (XVFTINTRZ_L_D v4f64:$vj)>; |
| def : Pat<(v4i64 (fp_to_sint v4f32:$vj)), |
| (VEXT2XV_D_W (SUBREG_TO_REG (i64 0), (VFTINTRZ_W_S v4f32:$vj), |
| sub_128))>; |
| def : Pat<(v4i32 (fp_to_sint (v4f64 LASX256:$vj))), |
| (EXTRACT_SUBREG (XVFTINTRZ_W_S (XVFCVT_S_D (XVPERMI_D v4f64:$vj, 238), |
| v4f64:$vj)), |
| sub_128)>; |
| |
| // XVFTINTRZ_{W_SU/L_DU} |
| def : Pat<(v8i32 (fp_to_uint v8f32:$vj)), (XVFTINTRZ_WU_S v8f32:$vj)>; |
| def : Pat<(v4i64 (fp_to_uint v4f64:$vj)), (XVFTINTRZ_LU_D v4f64:$vj)>; |
| def : Pat<(v4i64 (fp_to_uint v4f32:$vj)), |
| (VEXT2XV_DU_WU (SUBREG_TO_REG (i64 0), (VFTINTRZ_WU_S v4f32:$vj), |
| sub_128))>; |
| def : Pat<(v4i32 (fp_to_uint (v4f64 LASX256:$vj))), |
| (EXTRACT_SUBREG (XVFTINTRZ_W_S (XVFCVT_S_D (XVPERMI_D v4f64:$vj, 238), |
| v4f64:$vj)), |
| sub_128)>; |
| |
| // XVPERMI_Q |
| foreach vt = [v32i8, v16i16, v8i32, v4i64, v8f32, v4f64] in |
| def : Pat<(vt (concat_vectors LSX128:$vd, LSX128:$vj)), |
| (XVPERMI_Q (SUBREG_TO_REG (i64 0), LSX128:$vd, sub_128), |
| (SUBREG_TO_REG (i64 0), LSX128:$vj, sub_128), 2)>; |
| |
| } // Predicates = [HasExtLASX] |
| |
| /// Intrinsic pattern |
| |
| class deriveLASXIntrinsic<string Inst> { |
| Intrinsic ret = !cast<Intrinsic>(!tolower("int_loongarch_lasx_"#Inst)); |
| } |
| |
| let Predicates = [HasExtLASX] in { |
| |
| // vty: v32i8/v16i16/v8i32/v4i64 |
| // Pat<(Intrinsic vty:$xj, vty:$xk), |
| // (LAInst vty:$xj, vty:$xk)>; |
| foreach Inst = ["XVSADD_B", "XVSADD_BU", "XVSSUB_B", "XVSSUB_BU", |
| "XVHADDW_H_B", "XVHADDW_HU_BU", "XVHSUBW_H_B", "XVHSUBW_HU_BU", |
| "XVADDWEV_H_B", "XVADDWOD_H_B", "XVSUBWEV_H_B", "XVSUBWOD_H_B", |
| "XVADDWEV_H_BU", "XVADDWOD_H_BU", "XVSUBWEV_H_BU", "XVSUBWOD_H_BU", |
| "XVADDWEV_H_BU_B", "XVADDWOD_H_BU_B", |
| "XVAVG_B", "XVAVG_BU", "XVAVGR_B", "XVAVGR_BU", |
| "XVABSD_B", "XVABSD_BU", "XVADDA_B", "XVMUH_B", "XVMUH_BU", |
| "XVMULWEV_H_B", "XVMULWOD_H_B", "XVMULWEV_H_BU", "XVMULWOD_H_BU", |
| "XVMULWEV_H_BU_B", "XVMULWOD_H_BU_B", "XVSIGNCOV_B", |
| "XVANDN_V", "XVORN_V", "XVROTR_B", "XVSRLR_B", "XVSRAR_B", |
| "XVSEQ_B", "XVSLE_B", "XVSLE_BU", "XVSLT_B", "XVSLT_BU", |
| "XVPACKEV_B", "XVPACKOD_B", "XVPICKEV_B", "XVPICKOD_B", |
| "XVILVL_B", "XVILVH_B"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v32i8 LASX256:$xj), (v32i8 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>; |
| foreach Inst = ["XVSADD_H", "XVSADD_HU", "XVSSUB_H", "XVSSUB_HU", |
| "XVHADDW_W_H", "XVHADDW_WU_HU", "XVHSUBW_W_H", "XVHSUBW_WU_HU", |
| "XVADDWEV_W_H", "XVADDWOD_W_H", "XVSUBWEV_W_H", "XVSUBWOD_W_H", |
| "XVADDWEV_W_HU", "XVADDWOD_W_HU", "XVSUBWEV_W_HU", "XVSUBWOD_W_HU", |
| "XVADDWEV_W_HU_H", "XVADDWOD_W_HU_H", |
| "XVAVG_H", "XVAVG_HU", "XVAVGR_H", "XVAVGR_HU", |
| "XVABSD_H", "XVABSD_HU", "XVADDA_H", "XVMUH_H", "XVMUH_HU", |
| "XVMULWEV_W_H", "XVMULWOD_W_H", "XVMULWEV_W_HU", "XVMULWOD_W_HU", |
| "XVMULWEV_W_HU_H", "XVMULWOD_W_HU_H", "XVSIGNCOV_H", "XVROTR_H", |
| "XVSRLR_H", "XVSRAR_H", "XVSRLN_B_H", "XVSRAN_B_H", "XVSRLRN_B_H", |
| "XVSRARN_B_H", "XVSSRLN_B_H", "XVSSRAN_B_H", "XVSSRLN_BU_H", |
| "XVSSRAN_BU_H", "XVSSRLRN_B_H", "XVSSRARN_B_H", "XVSSRLRN_BU_H", |
| "XVSSRARN_BU_H", |
| "XVSEQ_H", "XVSLE_H", "XVSLE_HU", "XVSLT_H", "XVSLT_HU", |
| "XVPACKEV_H", "XVPACKOD_H", "XVPICKEV_H", "XVPICKOD_H", |
| "XVILVL_H", "XVILVH_H"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v16i16 LASX256:$xj), (v16i16 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>; |
| foreach Inst = ["XVSADD_W", "XVSADD_WU", "XVSSUB_W", "XVSSUB_WU", |
| "XVHADDW_D_W", "XVHADDW_DU_WU", "XVHSUBW_D_W", "XVHSUBW_DU_WU", |
| "XVADDWEV_D_W", "XVADDWOD_D_W", "XVSUBWEV_D_W", "XVSUBWOD_D_W", |
| "XVADDWEV_D_WU", "XVADDWOD_D_WU", "XVSUBWEV_D_WU", "XVSUBWOD_D_WU", |
| "XVADDWEV_D_WU_W", "XVADDWOD_D_WU_W", |
| "XVAVG_W", "XVAVG_WU", "XVAVGR_W", "XVAVGR_WU", |
| "XVABSD_W", "XVABSD_WU", "XVADDA_W", "XVMUH_W", "XVMUH_WU", |
| "XVMULWEV_D_W", "XVMULWOD_D_W", "XVMULWEV_D_WU", "XVMULWOD_D_WU", |
| "XVMULWEV_D_WU_W", "XVMULWOD_D_WU_W", "XVSIGNCOV_W", "XVROTR_W", |
| "XVSRLR_W", "XVSRAR_W", "XVSRLN_H_W", "XVSRAN_H_W", "XVSRLRN_H_W", |
| "XVSRARN_H_W", "XVSSRLN_H_W", "XVSSRAN_H_W", "XVSSRLN_HU_W", |
| "XVSSRAN_HU_W", "XVSSRLRN_H_W", "XVSSRARN_H_W", "XVSSRLRN_HU_W", |
| "XVSSRARN_HU_W", |
| "XVSEQ_W", "XVSLE_W", "XVSLE_WU", "XVSLT_W", "XVSLT_WU", |
| "XVPACKEV_W", "XVPACKOD_W", "XVPICKEV_W", "XVPICKOD_W", |
| "XVILVL_W", "XVILVH_W", "XVPERM_W"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v8i32 LASX256:$xj), (v8i32 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>; |
| foreach Inst = ["XVADD_Q", "XVSUB_Q", |
| "XVSADD_D", "XVSADD_DU", "XVSSUB_D", "XVSSUB_DU", |
| "XVHADDW_Q_D", "XVHADDW_QU_DU", "XVHSUBW_Q_D", "XVHSUBW_QU_DU", |
| "XVADDWEV_Q_D", "XVADDWOD_Q_D", "XVSUBWEV_Q_D", "XVSUBWOD_Q_D", |
| "XVADDWEV_Q_DU", "XVADDWOD_Q_DU", "XVSUBWEV_Q_DU", "XVSUBWOD_Q_DU", |
| "XVADDWEV_Q_DU_D", "XVADDWOD_Q_DU_D", |
| "XVAVG_D", "XVAVG_DU", "XVAVGR_D", "XVAVGR_DU", |
| "XVABSD_D", "XVABSD_DU", "XVADDA_D", "XVMUH_D", "XVMUH_DU", |
| "XVMULWEV_Q_D", "XVMULWOD_Q_D", "XVMULWEV_Q_DU", "XVMULWOD_Q_DU", |
| "XVMULWEV_Q_DU_D", "XVMULWOD_Q_DU_D", "XVSIGNCOV_D", "XVROTR_D", |
| "XVSRLR_D", "XVSRAR_D", "XVSRLN_W_D", "XVSRAN_W_D", "XVSRLRN_W_D", |
| "XVSRARN_W_D", "XVSSRLN_W_D", "XVSSRAN_W_D", "XVSSRLN_WU_D", |
| "XVSSRAN_WU_D", "XVSSRLRN_W_D", "XVSSRARN_W_D", "XVSSRLRN_WU_D", |
| "XVSSRARN_WU_D", "XVFFINT_S_L", |
| "XVSEQ_D", "XVSLE_D", "XVSLE_DU", "XVSLT_D", "XVSLT_DU", |
| "XVPACKEV_D", "XVPACKOD_D", "XVPICKEV_D", "XVPICKOD_D", |
| "XVILVL_D", "XVILVH_D"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v4i64 LASX256:$xj), (v4i64 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>; |
| |
| // vty: v32i8/v16i16/v8i32/v4i64 |
| // Pat<(Intrinsic vty:$xd, vty:$xj, vty:$xk), |
| // (LAInst vty:$xd, vty:$xj, vty:$xk)>; |
| foreach Inst = ["XVMADDWEV_H_B", "XVMADDWOD_H_B", "XVMADDWEV_H_BU", |
| "XVMADDWOD_H_BU", "XVMADDWEV_H_BU_B", "XVMADDWOD_H_BU_B"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v16i16 LASX256:$xd), (v32i8 LASX256:$xj), (v32i8 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| foreach Inst = ["XVMADDWEV_W_H", "XVMADDWOD_W_H", "XVMADDWEV_W_HU", |
| "XVMADDWOD_W_HU", "XVMADDWEV_W_HU_H", "XVMADDWOD_W_HU_H"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v8i32 LASX256:$xd), (v16i16 LASX256:$xj), (v16i16 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| foreach Inst = ["XVMADDWEV_D_W", "XVMADDWOD_D_W", "XVMADDWEV_D_WU", |
| "XVMADDWOD_D_WU", "XVMADDWEV_D_WU_W", "XVMADDWOD_D_WU_W"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v4i64 LASX256:$xd), (v8i32 LASX256:$xj), (v8i32 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| foreach Inst = ["XVMADDWEV_Q_D", "XVMADDWOD_Q_D", "XVMADDWEV_Q_DU", |
| "XVMADDWOD_Q_DU", "XVMADDWEV_Q_DU_D", "XVMADDWOD_Q_DU_D"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v4i64 LASX256:$xd), (v4i64 LASX256:$xj), (v4i64 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| |
| // vty: v32i8/v16i16/v8i32/v4i64 |
| // Pat<(Intrinsic vty:$xj), |
| // (LAInst vty:$xj)>; |
| foreach Inst = ["XVEXTH_H_B", "XVEXTH_HU_BU", |
| "XVMSKLTZ_B", "XVMSKGEZ_B", "XVMSKNZ_B", |
| "XVCLO_B", "VEXT2XV_H_B", "VEXT2XV_HU_BU", |
| "VEXT2XV_W_B", "VEXT2XV_WU_BU", "VEXT2XV_D_B", |
| "VEXT2XV_DU_BU", "XVREPLVE0_B", "XVREPLVE0_Q"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v32i8 LASX256:$xj)), |
| (!cast<LAInst>(Inst) LASX256:$xj)>; |
| foreach Inst = ["XVEXTH_W_H", "XVEXTH_WU_HU", "XVMSKLTZ_H", |
| "XVCLO_H", "XVFCVTL_S_H", "XVFCVTH_S_H", |
| "VEXT2XV_W_H", "VEXT2XV_WU_HU", "VEXT2XV_D_H", |
| "VEXT2XV_DU_HU", "XVREPLVE0_H"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v16i16 LASX256:$xj)), |
| (!cast<LAInst>(Inst) LASX256:$xj)>; |
| foreach Inst = ["XVEXTH_D_W", "XVEXTH_DU_WU", "XVMSKLTZ_W", |
| "XVCLO_W", "XVFFINT_S_W", "XVFFINT_S_WU", |
| "XVFFINTL_D_W", "XVFFINTH_D_W", |
| "VEXT2XV_D_W", "VEXT2XV_DU_WU", "XVREPLVE0_W"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v8i32 LASX256:$xj)), |
| (!cast<LAInst>(Inst) LASX256:$xj)>; |
| foreach Inst = ["XVEXTH_Q_D", "XVEXTH_QU_DU", "XVMSKLTZ_D", |
| "XVEXTL_Q_D", "XVEXTL_QU_DU", |
| "XVCLO_D", "XVFFINT_D_L", "XVFFINT_D_LU", |
| "XVREPLVE0_D"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v4i64 LASX256:$xj)), |
| (!cast<LAInst>(Inst) LASX256:$xj)>; |
| |
| // Pat<(Intrinsic timm:$imm) |
| // (LAInst timm:$imm)>; |
| def : Pat<(int_loongarch_lasx_xvldi timm:$imm), |
| (XVLDI (to_valid_timm timm:$imm))>; |
| foreach Inst = ["XVREPLI_B", "XVREPLI_H", "XVREPLI_W", "XVREPLI_D"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret timm:$imm), |
| (!cast<LAInst>("Pseudo"#Inst) (to_valid_timm timm:$imm))>; |
| |
| // vty: v32i8/v16i16/v8i32/v4i64 |
| // Pat<(Intrinsic vty:$xj, timm:$imm) |
| // (LAInst vty:$xj, timm:$imm)>; |
| foreach Inst = ["XVSAT_B", "XVSAT_BU", "XVNORI_B", "XVROTRI_B", "XVSLLWIL_H_B", |
| "XVSLLWIL_HU_BU", "XVSRLRI_B", "XVSRARI_B", |
| "XVSEQI_B", "XVSLEI_B", "XVSLEI_BU", "XVSLTI_B", "XVSLTI_BU", |
| "XVREPL128VEI_B", "XVBSLL_V", "XVBSRL_V", "XVSHUF4I_B"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v32i8 LASX256:$xj), timm:$imm), |
| (!cast<LAInst>(Inst) LASX256:$xj, (to_valid_timm timm:$imm))>; |
| foreach Inst = ["XVSAT_H", "XVSAT_HU", "XVROTRI_H", "XVSLLWIL_W_H", |
| "XVSLLWIL_WU_HU", "XVSRLRI_H", "XVSRARI_H", |
| "XVSEQI_H", "XVSLEI_H", "XVSLEI_HU", "XVSLTI_H", "XVSLTI_HU", |
| "XVREPL128VEI_H", "XVSHUF4I_H"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v16i16 LASX256:$xj), timm:$imm), |
| (!cast<LAInst>(Inst) LASX256:$xj, (to_valid_timm timm:$imm))>; |
| foreach Inst = ["XVSAT_W", "XVSAT_WU", "XVROTRI_W", "XVSLLWIL_D_W", |
| "XVSLLWIL_DU_WU", "XVSRLRI_W", "XVSRARI_W", |
| "XVSEQI_W", "XVSLEI_W", "XVSLEI_WU", "XVSLTI_W", "XVSLTI_WU", |
| "XVREPL128VEI_W", "XVSHUF4I_W", "XVPICKVE_W"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v8i32 LASX256:$xj), timm:$imm), |
| (!cast<LAInst>(Inst) LASX256:$xj, (to_valid_timm timm:$imm))>; |
| foreach Inst = ["XVSAT_D", "XVSAT_DU", "XVROTRI_D", "XVSRLRI_D", "XVSRARI_D", |
| "XVSEQI_D", "XVSLEI_D", "XVSLEI_DU", "XVSLTI_D", "XVSLTI_DU", |
| "XVPICKVE2GR_D", "XVPICKVE2GR_DU", |
| "XVREPL128VEI_D", "XVPERMI_D", "XVPICKVE_D"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v4i64 LASX256:$xj), timm:$imm), |
| (!cast<LAInst>(Inst) LASX256:$xj, (to_valid_timm timm:$imm))>; |
| |
| // vty: v32i8/v16i16/v8i32/v4i64 |
| // Pat<(Intrinsic vty:$xd, vty:$xj, timm:$imm) |
| // (LAInst vty:$xd, vty:$xj, timm:$imm)>; |
| foreach Inst = ["XVSRLNI_B_H", "XVSRANI_B_H", "XVSRLRNI_B_H", "XVSRARNI_B_H", |
| "XVSSRLNI_B_H", "XVSSRANI_B_H", "XVSSRLNI_BU_H", "XVSSRANI_BU_H", |
| "XVSSRLRNI_B_H", "XVSSRARNI_B_H", "XVSSRLRNI_BU_H", "XVSSRARNI_BU_H", |
| "XVFRSTPI_B", "XVBITSELI_B", "XVEXTRINS_B", "XVPERMI_Q"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v32i8 LASX256:$xd), (v32i8 LASX256:$xj), timm:$imm), |
| (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, |
| (to_valid_timm timm:$imm))>; |
| foreach Inst = ["XVSRLNI_H_W", "XVSRANI_H_W", "XVSRLRNI_H_W", "XVSRARNI_H_W", |
| "XVSSRLNI_H_W", "XVSSRANI_H_W", "XVSSRLNI_HU_W", "XVSSRANI_HU_W", |
| "XVSSRLRNI_H_W", "XVSSRARNI_H_W", "XVSSRLRNI_HU_W", "XVSSRARNI_HU_W", |
| "XVFRSTPI_H", "XVEXTRINS_H"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v16i16 LASX256:$xd), (v16i16 LASX256:$xj), timm:$imm), |
| (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, |
| (to_valid_timm timm:$imm))>; |
| foreach Inst = ["XVSRLNI_W_D", "XVSRANI_W_D", "XVSRLRNI_W_D", "XVSRARNI_W_D", |
| "XVSSRLNI_W_D", "XVSSRANI_W_D", "XVSSRLNI_WU_D", "XVSSRANI_WU_D", |
| "XVSSRLRNI_W_D", "XVSSRARNI_W_D", "XVSSRLRNI_WU_D", "XVSSRARNI_WU_D", |
| "XVPERMI_W", "XVEXTRINS_W", "XVINSVE0_W"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v8i32 LASX256:$xd), (v8i32 LASX256:$xj), timm:$imm), |
| (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, |
| (to_valid_timm timm:$imm))>; |
| foreach Inst = ["XVSRLNI_D_Q", "XVSRANI_D_Q", "XVSRLRNI_D_Q", "XVSRARNI_D_Q", |
| "XVSSRLNI_D_Q", "XVSSRANI_D_Q", "XVSSRLNI_DU_Q", "XVSSRANI_DU_Q", |
| "XVSSRLRNI_D_Q", "XVSSRARNI_D_Q", "XVSSRLRNI_DU_Q", "XVSSRARNI_DU_Q", |
| "XVSHUF4I_D", "XVEXTRINS_D", "XVINSVE0_D"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v4i64 LASX256:$xd), (v4i64 LASX256:$xj), timm:$imm), |
| (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, |
| (to_valid_timm timm:$imm))>; |
| |
| // vty: v32i8/v16i16/v8i32/v4i64 |
| // Pat<(Intrinsic vty:$xd, vty:$xj, vty:$xk), |
| // (LAInst vty:$xd, vty:$xj, vty:$xk)>; |
| foreach Inst = ["XVFRSTP_B", "XVBITSEL_V", "XVSHUF_B"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v32i8 LASX256:$xd), (v32i8 LASX256:$xj), (v32i8 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| foreach Inst = ["XVFRSTP_H", "XVSHUF_H"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v16i16 LASX256:$xd), (v16i16 LASX256:$xj), (v16i16 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(int_loongarch_lasx_xvshuf_w (v8i32 LASX256:$xd), (v8i32 LASX256:$xj), |
| (v8i32 LASX256:$xk)), |
| (XVSHUF_W LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| def : Pat<(int_loongarch_lasx_xvshuf_d (v4i64 LASX256:$xd), (v4i64 LASX256:$xj), |
| (v4i64 LASX256:$xk)), |
| (XVSHUF_D LASX256:$xd, LASX256:$xj, LASX256:$xk)>; |
| |
| // vty: v8f32/v4f64 |
| // Pat<(Intrinsic vty:$xj, vty:$xk, vty:$xa), |
| // (LAInst vty:$xj, vty:$xk, vty:$xa)>; |
| foreach Inst = ["XVFMSUB_S", "XVFNMADD_S", "XVFNMSUB_S"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v8f32 LASX256:$xj), (v8f32 LASX256:$xk), (v8f32 LASX256:$xa)), |
| (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk, LASX256:$xa)>; |
| foreach Inst = ["XVFMSUB_D", "XVFNMADD_D", "XVFNMSUB_D"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v4f64 LASX256:$xj), (v4f64 LASX256:$xk), (v4f64 LASX256:$xa)), |
| (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk, LASX256:$xa)>; |
| |
| // vty: v8f32/v4f64 |
| // Pat<(Intrinsic vty:$xj, vty:$xk), |
| // (LAInst vty:$xj, vty:$xk)>; |
| foreach Inst = ["XVFMAX_S", "XVFMIN_S", "XVFMAXA_S", "XVFMINA_S", "XVFCVT_H_S", |
| "XVFCMP_CAF_S", "XVFCMP_CUN_S", "XVFCMP_CEQ_S", "XVFCMP_CUEQ_S", |
| "XVFCMP_CLT_S", "XVFCMP_CULT_S", "XVFCMP_CLE_S", "XVFCMP_CULE_S", |
| "XVFCMP_CNE_S", "XVFCMP_COR_S", "XVFCMP_CUNE_S", |
| "XVFCMP_SAF_S", "XVFCMP_SUN_S", "XVFCMP_SEQ_S", "XVFCMP_SUEQ_S", |
| "XVFCMP_SLT_S", "XVFCMP_SULT_S", "XVFCMP_SLE_S", "XVFCMP_SULE_S", |
| "XVFCMP_SNE_S", "XVFCMP_SOR_S", "XVFCMP_SUNE_S"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v8f32 LASX256:$xj), (v8f32 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>; |
| foreach Inst = ["XVFMAX_D", "XVFMIN_D", "XVFMAXA_D", "XVFMINA_D", "XVFCVT_S_D", |
| "XVFTINTRNE_W_D", "XVFTINTRZ_W_D", "XVFTINTRP_W_D", "XVFTINTRM_W_D", |
| "XVFTINT_W_D", |
| "XVFCMP_CAF_D", "XVFCMP_CUN_D", "XVFCMP_CEQ_D", "XVFCMP_CUEQ_D", |
| "XVFCMP_CLT_D", "XVFCMP_CULT_D", "XVFCMP_CLE_D", "XVFCMP_CULE_D", |
| "XVFCMP_CNE_D", "XVFCMP_COR_D", "XVFCMP_CUNE_D", |
| "XVFCMP_SAF_D", "XVFCMP_SUN_D", "XVFCMP_SEQ_D", "XVFCMP_SUEQ_D", |
| "XVFCMP_SLT_D", "XVFCMP_SULT_D", "XVFCMP_SLE_D", "XVFCMP_SULE_D", |
| "XVFCMP_SNE_D", "XVFCMP_SOR_D", "XVFCMP_SUNE_D"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret |
| (v4f64 LASX256:$xj), (v4f64 LASX256:$xk)), |
| (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>; |
| |
| // vty: v8f32/v4f64 |
| // Pat<(Intrinsic vty:$xj), |
| // (LAInst vty:$xj)>; |
| foreach Inst = ["XVFLOGB_S", "XVFCLASS_S", "XVFSQRT_S", "XVFRECIP_S", "XVFRSQRT_S", |
| "XVFRINT_S", "XVFCVTL_D_S", "XVFCVTH_D_S", |
| "XVFRINTRNE_S", "XVFRINTRZ_S", "XVFRINTRP_S", "XVFRINTRM_S", |
| "XVFTINTRNE_W_S", "XVFTINTRZ_W_S", "XVFTINTRP_W_S", "XVFTINTRM_W_S", |
| "XVFTINT_W_S", "XVFTINTRZ_WU_S", "XVFTINT_WU_S", |
| "XVFTINTRNEL_L_S", "XVFTINTRNEH_L_S", "XVFTINTRZL_L_S", |
| "XVFTINTRZH_L_S", "XVFTINTRPL_L_S", "XVFTINTRPH_L_S", |
| "XVFTINTRML_L_S", "XVFTINTRMH_L_S", "XVFTINTL_L_S", |
| "XVFTINTH_L_S"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v8f32 LASX256:$xj)), |
| (!cast<LAInst>(Inst) LASX256:$xj)>; |
| foreach Inst = ["XVFLOGB_D", "XVFCLASS_D", "XVFSQRT_D", "XVFRECIP_D", "XVFRSQRT_D", |
| "XVFRINT_D", |
| "XVFRINTRNE_D", "XVFRINTRZ_D", "XVFRINTRP_D", "XVFRINTRM_D", |
| "XVFTINTRNE_L_D", "XVFTINTRZ_L_D", "XVFTINTRP_L_D", "XVFTINTRM_L_D", |
| "XVFTINT_L_D", "XVFTINTRZ_LU_D", "XVFTINT_LU_D"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v4f64 LASX256:$xj)), |
| (!cast<LAInst>(Inst) LASX256:$xj)>; |
| |
| // 256-Bit vector FP approximate reciprocal operation |
| let Predicates = [HasFrecipe] in { |
| foreach Inst = ["XVFRECIPE_S", "XVFRSQRTE_S"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v8f32 LASX256:$xj)), |
| (!cast<LAInst>(Inst) LASX256:$xj)>; |
| foreach Inst = ["XVFRECIPE_D", "XVFRSQRTE_D"] in |
| def : Pat<(deriveLASXIntrinsic<Inst>.ret (v4f64 LASX256:$xj)), |
| (!cast<LAInst>(Inst) LASX256:$xj)>; |
| |
| def : Pat<(loongarch_vfrecipe v8f32:$src), |
| (XVFRECIPE_S v8f32:$src)>; |
| def : Pat<(loongarch_vfrecipe v4f64:$src), |
| (XVFRECIPE_D v4f64:$src)>; |
| def : Pat<(loongarch_vfrsqrte v8f32:$src), |
| (XVFRSQRTE_S v8f32:$src)>; |
| def : Pat<(loongarch_vfrsqrte v4f64:$src), |
| (XVFRSQRTE_D v4f64:$src)>; |
| } |
| |
| def : Pat<(int_loongarch_lasx_xvpickve_w_f v8f32:$xj, timm:$imm), |
| (XVPICKVE_W v8f32:$xj, (to_valid_timm timm:$imm))>; |
| def : Pat<(int_loongarch_lasx_xvpickve_d_f v4f64:$xj, timm:$imm), |
| (XVPICKVE_D v4f64:$xj, (to_valid_timm timm:$imm))>; |
| |
| // load |
| def : Pat<(int_loongarch_lasx_xvld GPR:$rj, timm:$imm), |
| (XVLD GPR:$rj, (to_valid_timm timm:$imm))>; |
| def : Pat<(int_loongarch_lasx_xvldx GPR:$rj, GPR:$rk), |
| (XVLDX GPR:$rj, GPR:$rk)>; |
| |
| def : Pat<(int_loongarch_lasx_xvldrepl_b GPR:$rj, timm:$imm), |
| (XVLDREPL_B GPR:$rj, (to_valid_timm timm:$imm))>; |
| def : Pat<(int_loongarch_lasx_xvldrepl_h GPR:$rj, timm:$imm), |
| (XVLDREPL_H GPR:$rj, (to_valid_timm timm:$imm))>; |
| def : Pat<(int_loongarch_lasx_xvldrepl_w GPR:$rj, timm:$imm), |
| (XVLDREPL_W GPR:$rj, (to_valid_timm timm:$imm))>; |
| def : Pat<(int_loongarch_lasx_xvldrepl_d GPR:$rj, timm:$imm), |
| (XVLDREPL_D GPR:$rj, (to_valid_timm timm:$imm))>; |
| |
| // store |
| def : Pat<(int_loongarch_lasx_xvst LASX256:$xd, GPR:$rj, timm:$imm), |
| (XVST LASX256:$xd, GPR:$rj, (to_valid_timm timm:$imm))>; |
| def : Pat<(int_loongarch_lasx_xvstx LASX256:$xd, GPR:$rj, GPR:$rk), |
| (XVSTX LASX256:$xd, GPR:$rj, GPR:$rk)>; |
| |
| def : Pat<(int_loongarch_lasx_xvstelm_b v32i8:$xd, GPR:$rj, timm:$imm, timm:$idx), |
| (XVSTELM_B v32i8:$xd, GPR:$rj, (to_valid_timm timm:$imm), |
| (to_valid_timm timm:$idx))>; |
| def : Pat<(int_loongarch_lasx_xvstelm_h v16i16:$xd, GPR:$rj, timm:$imm, timm:$idx), |
| (XVSTELM_H v16i16:$xd, GPR:$rj, (to_valid_timm timm:$imm), |
| (to_valid_timm timm:$idx))>; |
| def : Pat<(int_loongarch_lasx_xvstelm_w v8i32:$xd, GPR:$rj, timm:$imm, timm:$idx), |
| (XVSTELM_W v8i32:$xd, GPR:$rj, (to_valid_timm timm:$imm), |
| (to_valid_timm timm:$idx))>; |
| def : Pat<(int_loongarch_lasx_xvstelm_d v4i64:$xd, GPR:$rj, timm:$imm, timm:$idx), |
| (XVSTELM_D v4i64:$xd, GPR:$rj, (to_valid_timm timm:$imm), |
| (to_valid_timm timm:$idx))>; |
| |
| } // Predicates = [HasExtLASX] |