blob: 1820a92a2488070548e2880c60b23ee36160ba29 [file] [log] [blame]
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
; RUN: llc < %s -o - -mtriple=riscv64 -mattr=+v -verify-machineinstrs | FileCheck %s
; We have an invariant that any vmv0 use won't clobber an existing v0 definition that's used.
; Check that %asm2 has a $v0 = COPY just before it so that %x doesn't clobber it.
define <vscale x 1 x i64> @between_inline_asm(<vscale x 1 x i64> %a, <vscale x 1 x i64> %b, <vscale x 1 x i1> %mask, ptr %p) {
; CHECK-LABEL: between_inline_asm:
; CHECK: # %bb.0:
; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma
; CHECK-NEXT: vmv1r.v v10, v0
; CHECK-NEXT: #APP
; CHECK-NEXT: vadd.vv v0, v8, v9
; CHECK-NEXT: #NO_APP
; CHECK-NEXT: vsetvli a1, zero, e64, m1, ta, ma
; CHECK-NEXT: vmv1r.v v11, v0
; CHECK-NEXT: vmv1r.v v0, v10
; CHECK-NEXT: vadd.vv v9, v8, v9, v0.t
; CHECK-NEXT: vmv1r.v v0, v11
; CHECK-NEXT: #APP
; CHECK-NEXT: vadd.vv v8, v8, v0
; CHECK-NEXT: #NO_APP
; CHECK-NEXT: vs1r.v v9, (a0)
; CHECK-NEXT: ret
%asm1 = tail call <vscale x 1 x i64> asm "vadd.vv $0, $1, $2", "={v0},^vr,^vr"(<vscale x 1 x i64> %a, <vscale x 1 x i64> %b)
%x = call <vscale x 1 x i64> @llvm.riscv.vadd.mask(<vscale x 1 x i64> poison, <vscale x 1 x i64> %a, <vscale x 1 x i64> %b, <vscale x 1 x i1> %mask, i64 -1, i64 0)
store <vscale x 1 x i64> %x, ptr %p
%asm2 = tail call <vscale x 1 x i64> asm "vadd.vv $0, $1, $2", "=^vr,^vr,{v0}"(<vscale x 1 x i64> %a, <vscale x 1 x i64> %asm1)
ret <vscale x 1 x i64> %asm2
}