| // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py |
| // RUN: %clang_cc1 -triple riscv64 -target-feature +experimental-zbc -emit-llvm %s -o - \ |
| // RUN: | FileCheck %s -check-prefix=RV64ZBC |
| |
| // RV64ZBC-LABEL: @clmul( |
| // RV64ZBC-NEXT: entry: |
| // RV64ZBC-NEXT: [[A_ADDR:%.*]] = alloca i64, align 8 |
| // RV64ZBC-NEXT: [[B_ADDR:%.*]] = alloca i64, align 8 |
| // RV64ZBC-NEXT: store i64 [[A:%.*]], i64* [[A_ADDR]], align 8 |
| // RV64ZBC-NEXT: store i64 [[B:%.*]], i64* [[B_ADDR]], align 8 |
| // RV64ZBC-NEXT: [[TMP0:%.*]] = load i64, i64* [[A_ADDR]], align 8 |
| // RV64ZBC-NEXT: [[TMP1:%.*]] = load i64, i64* [[B_ADDR]], align 8 |
| // RV64ZBC-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.clmul.i64(i64 [[TMP0]], i64 [[TMP1]]) |
| // RV64ZBC-NEXT: ret i64 [[TMP2]] |
| // |
| long clmul(long a, long b) { |
| return __builtin_riscv_clmul(a, b); |
| } |
| |
| // RV64ZBC-LABEL: @clmulh( |
| // RV64ZBC-NEXT: entry: |
| // RV64ZBC-NEXT: [[A_ADDR:%.*]] = alloca i64, align 8 |
| // RV64ZBC-NEXT: [[B_ADDR:%.*]] = alloca i64, align 8 |
| // RV64ZBC-NEXT: store i64 [[A:%.*]], i64* [[A_ADDR]], align 8 |
| // RV64ZBC-NEXT: store i64 [[B:%.*]], i64* [[B_ADDR]], align 8 |
| // RV64ZBC-NEXT: [[TMP0:%.*]] = load i64, i64* [[A_ADDR]], align 8 |
| // RV64ZBC-NEXT: [[TMP1:%.*]] = load i64, i64* [[B_ADDR]], align 8 |
| // RV64ZBC-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.clmulh.i64(i64 [[TMP0]], i64 [[TMP1]]) |
| // RV64ZBC-NEXT: ret i64 [[TMP2]] |
| // |
| long clmulh(long a, long b) { |
| return __builtin_riscv_clmulh(a, b); |
| } |
| |
| // RV64ZBC-LABEL: @clmulr( |
| // RV64ZBC-NEXT: entry: |
| // RV64ZBC-NEXT: [[A_ADDR:%.*]] = alloca i64, align 8 |
| // RV64ZBC-NEXT: [[B_ADDR:%.*]] = alloca i64, align 8 |
| // RV64ZBC-NEXT: store i64 [[A:%.*]], i64* [[A_ADDR]], align 8 |
| // RV64ZBC-NEXT: store i64 [[B:%.*]], i64* [[B_ADDR]], align 8 |
| // RV64ZBC-NEXT: [[TMP0:%.*]] = load i64, i64* [[A_ADDR]], align 8 |
| // RV64ZBC-NEXT: [[TMP1:%.*]] = load i64, i64* [[B_ADDR]], align 8 |
| // RV64ZBC-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.clmulr.i64(i64 [[TMP0]], i64 [[TMP1]]) |
| // RV64ZBC-NEXT: ret i64 [[TMP2]] |
| // |
| long clmulr(long a, long b) { |
| return __builtin_riscv_clmulr(a, b); |
| } |