| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \ |
| ; RUN: | FileCheck -check-prefix=RV64I %s |
| ; RUN: llc -mtriple=riscv64 -mattr=+f -target-abi lp64f -verify-machineinstrs < %s \ |
| ; RUN: | FileCheck -check-prefix=RV64IF %s |
| |
| ; The test cases check that we use the si versions of the conversions from |
| ; double. |
| |
| declare i32 @llvm.experimental.constrained.fptosi.i32.f64(double, metadata) |
| declare i32 @llvm.experimental.constrained.fptoui.i32.f64(double, metadata) |
| |
| define i32 @strict_fp64_to_ui32(double %a) nounwind strictfp { |
| ; RV64I-LABEL: strict_fp64_to_ui32: |
| ; RV64I: # %bb.0: # %entry |
| ; RV64I-NEXT: addi sp, sp, -16 |
| ; RV64I-NEXT: sd ra, 8(sp) # 8-byte Folded Spill |
| ; RV64I-NEXT: call __fixunsdfsi@plt |
| ; RV64I-NEXT: ld ra, 8(sp) # 8-byte Folded Reload |
| ; RV64I-NEXT: addi sp, sp, 16 |
| ; RV64I-NEXT: ret |
| ; |
| ; RV64IF-LABEL: strict_fp64_to_ui32: |
| ; RV64IF: # %bb.0: # %entry |
| ; RV64IF-NEXT: addi sp, sp, -16 |
| ; RV64IF-NEXT: sd ra, 8(sp) # 8-byte Folded Spill |
| ; RV64IF-NEXT: call __fixunsdfsi@plt |
| ; RV64IF-NEXT: ld ra, 8(sp) # 8-byte Folded Reload |
| ; RV64IF-NEXT: addi sp, sp, 16 |
| ; RV64IF-NEXT: ret |
| entry: |
| %conv = tail call i32 @llvm.experimental.constrained.fptoui.i32.f64(double %a, metadata !"fpexcept.strict") |
| ret i32 %conv |
| } |
| |
| define i32 @strict_fp64_to_si32(double %a) nounwind strictfp { |
| ; RV64I-LABEL: strict_fp64_to_si32: |
| ; RV64I: # %bb.0: # %entry |
| ; RV64I-NEXT: addi sp, sp, -16 |
| ; RV64I-NEXT: sd ra, 8(sp) # 8-byte Folded Spill |
| ; RV64I-NEXT: call __fixdfsi@plt |
| ; RV64I-NEXT: ld ra, 8(sp) # 8-byte Folded Reload |
| ; RV64I-NEXT: addi sp, sp, 16 |
| ; RV64I-NEXT: ret |
| ; |
| ; RV64IF-LABEL: strict_fp64_to_si32: |
| ; RV64IF: # %bb.0: # %entry |
| ; RV64IF-NEXT: addi sp, sp, -16 |
| ; RV64IF-NEXT: sd ra, 8(sp) # 8-byte Folded Spill |
| ; RV64IF-NEXT: call __fixdfsi@plt |
| ; RV64IF-NEXT: ld ra, 8(sp) # 8-byte Folded Reload |
| ; RV64IF-NEXT: addi sp, sp, 16 |
| ; RV64IF-NEXT: ret |
| entry: |
| %conv = tail call i32 @llvm.experimental.constrained.fptosi.i32.f64(double %a, metadata !"fpexcept.strict") |
| ret i32 %conv |
| } |
| |