| ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py |
| ; RUN: opt -passes=instcombine -S < %s | FileCheck %s |
| |
| declare double @llvm.powi.f64.i32(double, i32) |
| declare double @llvm.powi.f64.i64(double, i64) |
| declare double @llvm.fabs.f64(double) |
| declare double @llvm.copysign.f64(double, double) |
| declare void @use(double) |
| |
| define double @powi_fneg_even_int(double %x) { |
| ; CHECK-LABEL: @powi_fneg_even_int( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 4) |
| ; CHECK-NEXT: ret double [[R]] |
| ; |
| entry: |
| %fneg = fneg double %x |
| %r = tail call double @llvm.powi.f64.i32(double %fneg, i32 4) |
| ret double %r |
| } |
| |
| define double @powi_fabs_even_int(double %x) { |
| ; CHECK-LABEL: @powi_fabs_even_int( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 4) |
| ; CHECK-NEXT: ret double [[R]] |
| ; |
| entry: |
| %f = tail call double @llvm.fabs.f64(double %x) |
| %r = tail call double @llvm.powi.f64.i32(double %f, i32 4) |
| ret double %r |
| } |
| |
| define double @powi_copysign_even_int(double %x, double %y) { |
| ; CHECK-LABEL: @powi_copysign_even_int( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 4) |
| ; CHECK-NEXT: ret double [[R]] |
| ; |
| entry: |
| %cs = tail call double @llvm.copysign.f64(double %x, double %y) |
| %r = tail call double @llvm.powi.f64.i32(double %cs, i32 4) |
| ret double %r |
| } |
| |
| define double @powi_fneg_odd_int(double %x) { |
| ; CHECK-LABEL: @powi_fneg_odd_int( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[FNEG:%.*]] = fneg double [[X:%.*]] |
| ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[FNEG]], i32 5) |
| ; CHECK-NEXT: ret double [[R]] |
| ; |
| entry: |
| %fneg = fneg double %x |
| %r = tail call double @llvm.powi.f64.i32(double %fneg, i32 5) |
| ret double %r |
| } |
| |
| define double @powi_fabs_odd_int(double %x) { |
| ; CHECK-LABEL: @powi_fabs_odd_int( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[F:%.*]] = tail call double @llvm.fabs.f64(double [[X:%.*]]) |
| ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[F]], i32 5) |
| ; CHECK-NEXT: ret double [[R]] |
| ; |
| entry: |
| %f = tail call double @llvm.fabs.f64(double %x) |
| %r = tail call double @llvm.powi.f64.i32(double %f, i32 5) |
| ret double %r |
| } |
| |
| define double @powi_copysign_odd_int(double %x, double %y) { |
| ; CHECK-LABEL: @powi_copysign_odd_int( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[CS:%.*]] = tail call double @llvm.copysign.f64(double [[X:%.*]], double [[Y:%.*]]) |
| ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[CS]], i32 5) |
| ; CHECK-NEXT: ret double [[R]] |
| ; |
| entry: |
| %cs = tail call double @llvm.copysign.f64(double %x, double %y) |
| %r = tail call double @llvm.powi.f64.i32(double %cs, i32 5) |
| ret double %r |
| } |
| |
| define double @powi_fmul_arg0_no_reassoc(double %x, i32 %i) { |
| ; CHECK-LABEL: @powi_fmul_arg0_no_reassoc( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[POW:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[I:%.*]]) |
| ; CHECK-NEXT: [[MUL:%.*]] = fmul double [[POW]], [[X]] |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %pow = tail call double @llvm.powi.f64.i32(double %x, i32 %i) |
| %mul = fmul double %pow, %x |
| ret double %mul |
| } |
| |
| |
| define double @powi_fmul_arg0(double %x, i32 %i) { |
| ; CHECK-LABEL: @powi_fmul_arg0( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[POW:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[I:%.*]]) |
| ; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[POW]], [[X]] |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %pow = tail call double @llvm.powi.f64.i32(double %x, i32 %i) |
| %mul = fmul reassoc double %pow, %x |
| ret double %mul |
| } |
| |
| define double @powi_fmul_arg0_use(double %x, i32 %i) { |
| ; CHECK-LABEL: @powi_fmul_arg0_use( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[POW:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[I:%.*]]) |
| ; CHECK-NEXT: tail call void @use(double [[POW]]) |
| ; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[POW]], [[X]] |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %pow = tail call double @llvm.powi.f64.i32(double %x, i32 %i) |
| tail call void @use(double %pow) |
| %mul = fmul reassoc double %pow, %x |
| ret double %mul |
| } |
| |
| define double @powi_fmul_powi_no_reassoc(double %x, i32 %y, i32 %z) { |
| ; CHECK-LABEL: @powi_fmul_powi_no_reassoc( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]]) |
| ; CHECK-NEXT: [[P2:%.*]] = tail call double @llvm.powi.f64.i32(double [[X]], i32 [[Z:%.*]]) |
| ; CHECK-NEXT: [[MUL:%.*]] = fmul double [[P2]], [[P1]] |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) |
| %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z) |
| %mul = fmul double %p2, %p1 |
| ret double %mul |
| } |
| |
| |
| define double @powi_fmul_powi(double %x, i32 %y, i32 %z) { |
| ; CHECK-LABEL: @powi_fmul_powi( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Z:%.*]], [[Y:%.*]] |
| ; CHECK-NEXT: [[MUL:%.*]] = call reassoc double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[TMP0]]) |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) |
| %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z) |
| %mul = fmul reassoc double %p2, %p1 |
| ret double %mul |
| } |
| |
| define double @powi_fmul_powi_fast_on_fmul(double %x, i32 %y, i32 %z) { |
| ; CHECK-LABEL: @powi_fmul_powi_fast_on_fmul( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Z:%.*]], [[Y:%.*]] |
| ; CHECK-NEXT: [[MUL:%.*]] = call fast double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[TMP0]]) |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) |
| %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z) |
| %mul = fmul fast double %p2, %p1 |
| ret double %mul |
| } |
| |
| define double @powi_fmul_powi_fast_on_powi(double %x, i32 %y, i32 %z) { |
| ; CHECK-LABEL: @powi_fmul_powi_fast_on_powi( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[P1:%.*]] = tail call fast double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]]) |
| ; CHECK-NEXT: [[P2:%.*]] = tail call fast double @llvm.powi.f64.i32(double [[X]], i32 [[Z:%.*]]) |
| ; CHECK-NEXT: [[MUL:%.*]] = fmul double [[P2]], [[P1]] |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %p1 = tail call fast double @llvm.powi.f64.i32(double %x, i32 %y) |
| %p2 = tail call fast double @llvm.powi.f64.i32(double %x, i32 %z) |
| %mul = fmul double %p2, %p1 |
| ret double %mul |
| } |
| |
| define double @powi_fmul_powi_same_power(double %x, i32 %y, i32 %z) { |
| ; CHECK-LABEL: @powi_fmul_powi_same_power( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[TMP0:%.*]] = shl i32 [[Y:%.*]], 1 |
| ; CHECK-NEXT: [[MUL:%.*]] = call reassoc double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[TMP0]]) |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) |
| %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) |
| %mul = fmul reassoc double %p2, %p1 |
| ret double %mul |
| } |
| |
| define double @powi_fmul_powi_use_first(double %x, i32 %y, i32 %z) { |
| ; CHECK-LABEL: @powi_fmul_powi_use_first( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]]) |
| ; CHECK-NEXT: tail call void @use(double [[P1]]) |
| ; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Y]], [[Z:%.*]] |
| ; CHECK-NEXT: [[MUL:%.*]] = call reassoc double @llvm.powi.f64.i32(double [[X]], i32 [[TMP0]]) |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) |
| tail call void @use(double %p1) |
| %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z) |
| %mul = fmul reassoc double %p1, %p2 |
| ret double %mul |
| } |
| |
| define double @powi_fmul_powi_use_second(double %x, i32 %y, i32 %z) { |
| ; CHECK-LABEL: @powi_fmul_powi_use_second( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Z:%.*]]) |
| ; CHECK-NEXT: tail call void @use(double [[P1]]) |
| ; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Y:%.*]], [[Z]] |
| ; CHECK-NEXT: [[MUL:%.*]] = call reassoc double @llvm.powi.f64.i32(double [[X]], i32 [[TMP0]]) |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %z) |
| tail call void @use(double %p1) |
| %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) |
| %mul = fmul reassoc double %p2, %p1 |
| ret double %mul |
| } |
| |
| define double @powi_fmul_different_base(double %x, double %m, i32 %y, i32 %z) { |
| ; CHECK-LABEL: @powi_fmul_different_base( |
| ; CHECK-NEXT: entry: |
| ; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]]) |
| ; CHECK-NEXT: [[P2:%.*]] = tail call double @llvm.powi.f64.i32(double [[M:%.*]], i32 [[Z:%.*]]) |
| ; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[P2]], [[P1]] |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| entry: |
| %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) |
| %p2 = tail call double @llvm.powi.f64.i32(double %m, i32 %z) |
| %mul = fmul reassoc double %p2, %p1 |
| ret double %mul |
| } |
| |
| define double @different_types_powi(double %x, i32 %y, i64 %z) { |
| ; CHECK-LABEL: @different_types_powi( |
| ; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]]) |
| ; CHECK-NEXT: [[P2:%.*]] = tail call double @llvm.powi.f64.i64(double [[X]], i64 [[Z:%.*]]) |
| ; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[P2]], [[P1]] |
| ; CHECK-NEXT: ret double [[MUL]] |
| ; |
| %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) |
| %p2 = tail call double @llvm.powi.f64.i64(double %x, i64 %z) |
| %mul = fmul reassoc double %p2, %p1 |
| ret double %mul |
| } |