| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc %s -mtriple=aarch64 -mattr=+v8.3a,+sha3 -o - | FileCheck %s |
| |
| define <2 x i64> @test_vsha512h(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) { |
| ; CHECK-LABEL: test_vsha512h: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: sha512h q0, q1, v2.2d |
| ; CHECK-NEXT: ret |
| entry: |
| %vsha512h.i = tail call <2 x i64> @llvm.aarch64.crypto.sha512h(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) |
| ret <2 x i64> %vsha512h.i |
| } |
| |
| define <2 x i64> @test_vsha512h2(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) { |
| ; CHECK-LABEL: test_vsha512h2: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: sha512h2 q0, q1, v2.2d |
| ; CHECK-NEXT: ret |
| entry: |
| %vsha512h2.i = tail call <2 x i64> @llvm.aarch64.crypto.sha512h2(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) |
| ret <2 x i64> %vsha512h2.i |
| } |
| |
| define <2 x i64> @test_vsha512su0(<2 x i64> %a, <2 x i64> %b) { |
| ; CHECK-LABEL: test_vsha512su0: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: sha512su0 v0.2d, v1.2d |
| ; CHECK-NEXT: ret |
| entry: |
| %vsha512su0.i = tail call <2 x i64> @llvm.aarch64.crypto.sha512su0(<2 x i64> %a, <2 x i64> %b) |
| ret <2 x i64> %vsha512su0.i |
| } |
| |
| define <2 x i64> @test_vsha512su1(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) { |
| ; CHECK-LABEL: test_vsha512su1: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: sha512su1 v0.2d, v1.2d, v2.2d |
| ; CHECK-NEXT: ret |
| entry: |
| %vsha512su1.i = tail call <2 x i64> @llvm.aarch64.crypto.sha512su1(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) |
| ret <2 x i64> %vsha512su1.i |
| } |
| |
| define <2 x i64> @test_vrax1(<2 x i64> %a, <2 x i64> %b) { |
| ; CHECK-LABEL: test_vrax1: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: rax1 v0.2d, v0.2d, v1.2d |
| ; CHECK-NEXT: ret |
| entry: |
| %vrax1.i = tail call <2 x i64> @llvm.aarch64.crypto.rax1(<2 x i64> %a, <2 x i64> %b) |
| ret <2 x i64> %vrax1.i |
| } |
| |
| define <2 x i64> @test_vxar(<2 x i64> %a, <2 x i64> %b) { |
| ; CHECK-LABEL: test_vxar: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: xar v0.2d, v0.2d, v1.2d, #1 |
| ; CHECK-NEXT: ret |
| entry: |
| %vxar.i = tail call <2 x i64> @llvm.aarch64.crypto.xar(<2 x i64> %a, <2 x i64> %b, i64 1) |
| ret <2 x i64> %vxar.i |
| } |
| |
| define <16 x i8> @test_bcax_8(<16 x i8> %a, <16 x i8> %b, <16 x i8> %c) { |
| ; CHECK-LABEL: test_bcax_8: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: bcax v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %vbcax_8.i = tail call <16 x i8> @llvm.aarch64.crypto.bcaxu.v16i8(<16 x i8> %a, <16 x i8> %b, <16 x i8> %c) |
| ret <16 x i8> %vbcax_8.i |
| } |
| |
| define <16 x i8> @test_eor3_8(<16 x i8> %a, <16 x i8> %b, <16 x i8> %c) { |
| ; CHECK-LABEL: test_eor3_8: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: eor3 v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %veor3_8.i = tail call <16 x i8> @llvm.aarch64.crypto.eor3u.v16i8(<16 x i8> %a, <16 x i8> %b, <16 x i8> %c) |
| ret <16 x i8> %veor3_8.i |
| } |
| |
| define <16 x i8> @test_bcax_s8(<16 x i8> %a, <16 x i8> %b, <16 x i8> %c) { |
| ; CHECK-LABEL: test_bcax_s8: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: bcax v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %vbcax_8.i = tail call <16 x i8> @llvm.aarch64.crypto.bcaxs.v16i8(<16 x i8> %a, <16 x i8> %b, <16 x i8> %c) |
| ret <16 x i8> %vbcax_8.i |
| } |
| |
| define <16 x i8> @test_eor3_s8(<16 x i8> %a, <16 x i8> %b, <16 x i8> %c) { |
| ; CHECK-LABEL: test_eor3_s8: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: eor3 v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %veor3_8.i = tail call <16 x i8> @llvm.aarch64.crypto.eor3s.v16i8(<16 x i8> %a, <16 x i8> %b, <16 x i8> %c) |
| ret <16 x i8> %veor3_8.i |
| } |
| |
| define <8 x i16> @test_bcax_16(<8 x i16> %a, <8 x i16> %b, <8 x i16> %c) { |
| ; CHECK-LABEL: test_bcax_16: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: bcax v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %vbcax_16.i = tail call <8 x i16> @llvm.aarch64.crypto.bcaxu.v8i16(<8 x i16> %a, <8 x i16> %b, <8 x i16> %c) |
| ret <8 x i16> %vbcax_16.i |
| } |
| |
| define <8 x i16> @test_eor3_16(<8 x i16> %a, <8 x i16> %b, <8 x i16> %c) { |
| ; CHECK-LABEL: test_eor3_16: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: eor3 v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %veor3_16.i = tail call <8 x i16> @llvm.aarch64.crypto.eor3u.v8i16(<8 x i16> %a, <8 x i16> %b, <8 x i16> %c) |
| ret <8 x i16> %veor3_16.i |
| } |
| |
| define <8 x i16> @test_bcax_s16(<8 x i16> %a, <8 x i16> %b, <8 x i16> %c) { |
| ; CHECK-LABEL: test_bcax_s16: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: bcax v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %vbcax_16.i = tail call <8 x i16> @llvm.aarch64.crypto.bcaxs.v8i16(<8 x i16> %a, <8 x i16> %b, <8 x i16> %c) |
| ret <8 x i16> %vbcax_16.i |
| } |
| |
| define <8 x i16> @test_eor3_s16(<8 x i16> %a, <8 x i16> %b, <8 x i16> %c) { |
| ; CHECK-LABEL: test_eor3_s16: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: eor3 v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %veor3_16.i = tail call <8 x i16> @llvm.aarch64.crypto.eor3s.v8i16(<8 x i16> %a, <8 x i16> %b, <8 x i16> %c) |
| ret <8 x i16> %veor3_16.i |
| } |
| |
| define <4 x i32> @test_bcax_32(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) { |
| ; CHECK-LABEL: test_bcax_32: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: bcax v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %vbcax_32.i = tail call <4 x i32> @llvm.aarch64.crypto.bcaxu.v4i32(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) |
| ret <4 x i32> %vbcax_32.i |
| } |
| |
| define <4 x i32> @test_eor3_32(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) { |
| ; CHECK-LABEL: test_eor3_32: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: eor3 v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %veor3_32.i = tail call <4 x i32> @llvm.aarch64.crypto.eor3u.v4i32(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) |
| ret <4 x i32> %veor3_32.i |
| } |
| |
| define <4 x i32> @test_bcax_s32(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) { |
| ; CHECK-LABEL: test_bcax_s32: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: bcax v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %vbcax_32.i = tail call <4 x i32> @llvm.aarch64.crypto.bcaxs.v4i32(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) |
| ret <4 x i32> %vbcax_32.i |
| } |
| |
| define <4 x i32> @test_eor3_s32(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) { |
| ; CHECK-LABEL: test_eor3_s32: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: eor3 v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %veor3_32.i = tail call <4 x i32> @llvm.aarch64.crypto.eor3s.v4i32(<4 x i32> %a, <4 x i32> %b, <4 x i32> %c) |
| ret <4 x i32> %veor3_32.i |
| } |
| |
| define <2 x i64> @test_bcax_64(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) { |
| ; CHECK-LABEL: test_bcax_64: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: bcax v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %vbcax_64.i = tail call <2 x i64> @llvm.aarch64.crypto.bcaxu.v2i64(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) |
| ret <2 x i64> %vbcax_64.i |
| } |
| |
| define <2 x i64> @test_eor3_64(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) { |
| ; CHECK-LABEL: test_eor3_64: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: eor3 v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %veor3_64.i = tail call <2 x i64> @llvm.aarch64.crypto.eor3u.v2i64(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) |
| ret <2 x i64> %veor3_64.i |
| } |
| |
| define <2 x i64> @test_bcax_s64(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) { |
| ; CHECK-LABEL: test_bcax_s64: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: bcax v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %vbcax_64.i = tail call <2 x i64> @llvm.aarch64.crypto.bcaxs.v2i64(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) |
| ret <2 x i64> %vbcax_64.i |
| } |
| |
| define <2 x i64> @test_eor3_s64(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) { |
| ; CHECK-LABEL: test_eor3_s64: |
| ; CHECK: // %bb.0: // %entry |
| ; CHECK-NEXT: eor3 v0.16b, v0.16b, v1.16b, v2.16b |
| ; CHECK-NEXT: ret |
| entry: |
| %veor3_64.i = tail call <2 x i64> @llvm.aarch64.crypto.eor3s.v2i64(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) |
| ret <2 x i64> %veor3_64.i |
| } |
| |
| declare <2 x i64> @llvm.aarch64.crypto.sha512h(<2 x i64>, <2 x i64>, <2 x i64>) |
| declare <2 x i64> @llvm.aarch64.crypto.sha512h2(<2 x i64>, <2 x i64>, <2 x i64>) |
| declare <2 x i64> @llvm.aarch64.crypto.sha512su0(<2 x i64>, <2 x i64>) |
| declare <2 x i64> @llvm.aarch64.crypto.sha512su1(<2 x i64>, <2 x i64>, <2 x i64>) |
| declare <2 x i64> @llvm.aarch64.crypto.rax1(<2 x i64>, <2 x i64>) |
| declare <2 x i64> @llvm.aarch64.crypto.xar(<2 x i64>, <2 x i64>, i64 immarg) |
| declare <16 x i8> @llvm.aarch64.crypto.bcaxu.v16i8(<16 x i8>, <16 x i8>, <16 x i8>) |
| declare <8 x i16> @llvm.aarch64.crypto.bcaxu.v8i16(<8 x i16>, <8 x i16>, <8 x i16>) |
| declare <4 x i32> @llvm.aarch64.crypto.bcaxu.v4i32(<4 x i32>, <4 x i32>, <4 x i32>) |
| declare <2 x i64> @llvm.aarch64.crypto.bcaxu.v2i64(<2 x i64>, <2 x i64>, <2 x i64>) |
| declare <16 x i8> @llvm.aarch64.crypto.bcaxs.v16i8(<16 x i8>, <16 x i8>, <16 x i8>) |
| declare <8 x i16> @llvm.aarch64.crypto.bcaxs.v8i16(<8 x i16>, <8 x i16>, <8 x i16>) |
| declare <4 x i32> @llvm.aarch64.crypto.bcaxs.v4i32(<4 x i32>, <4 x i32>, <4 x i32>) |
| declare <2 x i64> @llvm.aarch64.crypto.bcaxs.v2i64(<2 x i64>, <2 x i64>, <2 x i64>) |
| declare <16 x i8> @llvm.aarch64.crypto.eor3u.v16i8(<16 x i8>, <16 x i8>, <16 x i8>) |
| declare <8 x i16> @llvm.aarch64.crypto.eor3u.v8i16(<8 x i16>, <8 x i16>, <8 x i16>) |
| declare <4 x i32> @llvm.aarch64.crypto.eor3u.v4i32(<4 x i32>, <4 x i32>, <4 x i32>) |
| declare <2 x i64> @llvm.aarch64.crypto.eor3u.v2i64(<2 x i64>, <2 x i64>, <2 x i64>) |
| declare <16 x i8> @llvm.aarch64.crypto.eor3s.v16i8(<16 x i8>, <16 x i8>, <16 x i8>) |
| declare <8 x i16> @llvm.aarch64.crypto.eor3s.v8i16(<8 x i16>, <8 x i16>, <8 x i16>) |
| declare <4 x i32> @llvm.aarch64.crypto.eor3s.v4i32(<4 x i32>, <4 x i32>, <4 x i32>) |
| declare <2 x i64> @llvm.aarch64.crypto.eor3s.v2i64(<2 x i64>, <2 x i64>, <2 x i64>) |
| |