| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4 |
| ; RUN: llc -global-isel=0 -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 < %s | FileCheck -check-prefixes=GCN,SDAG %s |
| ; RUN: llc -global-isel=1 -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 < %s | FileCheck -check-prefixes=GCN,GISEL %s |
| |
| define float @sitofp_i128_to_f32(i128 %x) { |
| ; SDAG-LABEL: sitofp_i128_to_f32: |
| ; SDAG: ; %bb.0: ; %itofp-entry |
| ; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; SDAG-NEXT: v_or_b32_e32 v5, v1, v3 |
| ; SDAG-NEXT: v_or_b32_e32 v4, v0, v2 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; SDAG-NEXT: v_mov_b32_e32 v4, 0 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB0_14 |
| ; SDAG-NEXT: ; %bb.1: ; %itofp-if-end |
| ; SDAG-NEXT: v_sub_co_u32_e32 v4, vcc, 0, v0 |
| ; SDAG-NEXT: v_subb_co_u32_e32 v5, vcc, 0, v1, vcc |
| ; SDAG-NEXT: v_subb_co_u32_e32 v6, vcc, 0, v2, vcc |
| ; SDAG-NEXT: v_subb_co_u32_e32 v7, vcc, 0, v3, vcc |
| ; SDAG-NEXT: v_cmp_gt_i64_e32 vcc, 0, v[2:3] |
| ; SDAG-NEXT: ; implicit-def: $vgpr8 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v0, v0, v4, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v4, v2, v6, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v1, v1, v5, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v5, v3, v7, vcc |
| ; SDAG-NEXT: v_ffbh_u32_e32 v2, v4 |
| ; SDAG-NEXT: v_add_u32_e32 v2, 32, v2 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v6, v5 |
| ; SDAG-NEXT: v_min_u32_e32 v2, v2, v6 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v6, v0 |
| ; SDAG-NEXT: v_add_u32_e32 v6, 32, v6 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v7, v1 |
| ; SDAG-NEXT: v_min_u32_e32 v6, v6, v7 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; SDAG-NEXT: v_add_u32_e32 v6, 64, v6 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, v6, v2, vcc |
| ; SDAG-NEXT: v_sub_u32_e32 v6, 0x80, v7 |
| ; SDAG-NEXT: v_sub_u32_e32 v2, 0x7f, v7 |
| ; SDAG-NEXT: v_cmp_gt_i32_e32 vcc, 25, v6 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; SDAG-NEXT: ; %bb.2: ; %itofp-if-else |
| ; SDAG-NEXT: v_add_u32_e32 v4, 0xffffff98, v7 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v4, v[0:1] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v4 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, 0, v0, vcc |
| ; SDAG-NEXT: ; implicit-def: $vgpr6 |
| ; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1 |
| ; SDAG-NEXT: ; implicit-def: $vgpr7 |
| ; SDAG-NEXT: ; implicit-def: $vgpr4_vgpr5 |
| ; SDAG-NEXT: ; %bb.3: ; %Flow3 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB0_13 |
| ; SDAG-NEXT: ; %bb.4: ; %NodeBlock |
| ; SDAG-NEXT: v_cmp_lt_i32_e32 vcc, 25, v6 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB0_8 |
| ; SDAG-NEXT: ; %bb.5: ; %LeafBlock |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 26, v6 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB0_7 |
| ; SDAG-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; SDAG-NEXT: v_sub_u32_e32 v12, 0x66, v7 |
| ; SDAG-NEXT: v_sub_u32_e32 v10, 64, v12 |
| ; SDAG-NEXT: v_lshrrev_b64 v[8:9], v12, v[0:1] |
| ; SDAG-NEXT: v_lshlrev_b64 v[10:11], v10, v[4:5] |
| ; SDAG-NEXT: v_sub_u32_e32 v13, 38, v7 |
| ; SDAG-NEXT: v_or_b32_e32 v11, v9, v11 |
| ; SDAG-NEXT: v_or_b32_e32 v10, v8, v10 |
| ; SDAG-NEXT: v_lshrrev_b64 v[8:9], v13, v[4:5] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v12 |
| ; SDAG-NEXT: v_add_u32_e32 v14, 26, v7 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v9, v9, v11, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v12 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, v8, v10, vcc |
| ; SDAG-NEXT: v_lshrrev_b64 v[10:11], v13, v[0:1] |
| ; SDAG-NEXT: v_lshlrev_b64 v[12:13], v14, v[4:5] |
| ; SDAG-NEXT: v_subrev_u32_e32 v7, 38, v7 |
| ; SDAG-NEXT: v_cndmask_b32_e64 v15, v8, v0, s[4:5] |
| ; SDAG-NEXT: v_lshlrev_b64 v[7:8], v7, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v9, v9, v1, s[4:5] |
| ; SDAG-NEXT: v_or_b32_e32 v11, v13, v11 |
| ; SDAG-NEXT: v_or_b32_e32 v10, v12, v10 |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v14 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v14, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, v8, v11, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v14 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, v7, v10, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e64 v5, v8, v5, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v4, v7, v4, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v1, 0, v1, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v1, v1, v5 |
| ; SDAG-NEXT: v_or_b32_e32 v0, v0, v4 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v8, v15, v0 |
| ; SDAG-NEXT: v_mov_b32_e32 v0, v8 |
| ; SDAG-NEXT: v_mov_b32_e32 v1, v9 |
| ; SDAG-NEXT: .LBB0_7: ; %Flow1 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; SDAG-NEXT: .LBB0_8: ; %Flow2 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; SDAG-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], 1, v[0:1] |
| ; SDAG-NEXT: ; %bb.10: ; %itofp-sw-epilog |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: v_lshrrev_b32_e32 v4, 2, v0 |
| ; SDAG-NEXT: v_and_or_b32 v0, v4, 1, v0 |
| ; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 1, v0 |
| ; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc |
| ; SDAG-NEXT: v_and_b32_e32 v4, 0x4000000, v0 |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 0, v4 |
| ; SDAG-NEXT: v_alignbit_b32 v8, v1, v0, 2 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; SDAG-NEXT: v_alignbit_b32 v8, v1, v0, 3 |
| ; SDAG-NEXT: v_mov_b32_e32 v2, v6 |
| ; SDAG-NEXT: ; %bb.12: ; %Flow |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: .LBB0_13: ; %Flow4 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; SDAG-NEXT: v_and_b32_e32 v0, 0x80000000, v3 |
| ; SDAG-NEXT: v_lshl_add_u32 v1, v2, 23, 1.0 |
| ; SDAG-NEXT: v_and_b32_e32 v2, 0x7fffff, v8 |
| ; SDAG-NEXT: v_or3_b32 v4, v2, v0, v1 |
| ; SDAG-NEXT: .LBB0_14: ; %Flow5 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; SDAG-NEXT: v_mov_b32_e32 v0, v4 |
| ; SDAG-NEXT: s_setpc_b64 s[30:31] |
| ; |
| ; GISEL-LABEL: sitofp_i128_to_f32: |
| ; GISEL: ; %bb.0: ; %itofp-entry |
| ; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; GISEL-NEXT: v_or_b32_e32 v4, v0, v2 |
| ; GISEL-NEXT: v_or_b32_e32 v5, v1, v3 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; GISEL-NEXT: s_mov_b32 s4, 0 |
| ; GISEL-NEXT: v_mov_b32_e32 v4, s4 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB0_14 |
| ; GISEL-NEXT: ; %bb.1: ; %itofp-if-end |
| ; GISEL-NEXT: v_ashrrev_i32_e32 v6, 31, v3 |
| ; GISEL-NEXT: v_xor_b32_e32 v0, v6, v0 |
| ; GISEL-NEXT: v_xor_b32_e32 v1, v6, v1 |
| ; GISEL-NEXT: v_sub_co_u32_e32 v0, vcc, v0, v6 |
| ; GISEL-NEXT: v_xor_b32_e32 v2, v6, v2 |
| ; GISEL-NEXT: v_subb_co_u32_e32 v1, vcc, v1, v6, vcc |
| ; GISEL-NEXT: v_xor_b32_e32 v3, v6, v3 |
| ; GISEL-NEXT: v_subb_co_u32_e32 v2, vcc, v2, v6, vcc |
| ; GISEL-NEXT: v_ffbh_u32_e32 v5, v0 |
| ; GISEL-NEXT: v_subb_co_u32_e32 v3, vcc, v3, v6, vcc |
| ; GISEL-NEXT: v_ffbh_u32_e32 v4, v1 |
| ; GISEL-NEXT: v_add_u32_e32 v5, 32, v5 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v7, v2 |
| ; GISEL-NEXT: v_min_u32_e32 v4, v4, v5 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v5, v3 |
| ; GISEL-NEXT: v_add_u32_e32 v7, 32, v7 |
| ; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, 0, v[2:3] |
| ; GISEL-NEXT: v_add_u32_e32 v4, 64, v4 |
| ; GISEL-NEXT: v_min_u32_e32 v5, v5, v7 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v5, v5, v4, vcc |
| ; GISEL-NEXT: v_sub_u32_e32 v8, 0x80, v5 |
| ; GISEL-NEXT: v_sub_u32_e32 v7, 0x7f, v5 |
| ; GISEL-NEXT: v_cmp_ge_i32_e32 vcc, 24, v8 |
| ; GISEL-NEXT: ; implicit-def: $vgpr4 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; GISEL-NEXT: ; %bb.2: ; %itofp-if-else |
| ; GISEL-NEXT: v_add_u32_e32 v2, 0xffffff98, v5 |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], v2, v[0:1] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, 0, v0, vcc |
| ; GISEL-NEXT: ; implicit-def: $vgpr8 |
| ; GISEL-NEXT: ; implicit-def: $vgpr0 |
| ; GISEL-NEXT: ; implicit-def: $vgpr5 |
| ; GISEL-NEXT: ; implicit-def: $vgpr2 |
| ; GISEL-NEXT: ; %bb.3: ; %Flow3 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB0_13 |
| ; GISEL-NEXT: ; %bb.4: ; %NodeBlock |
| ; GISEL-NEXT: v_cmp_le_i32_e32 vcc, 26, v8 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB0_8 |
| ; GISEL-NEXT: ; %bb.5: ; %LeafBlock |
| ; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 26, v8 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB0_7 |
| ; GISEL-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; GISEL-NEXT: v_sub_u32_e32 v4, 0x66, v5 |
| ; GISEL-NEXT: v_sub_u32_e32 v11, 64, v4 |
| ; GISEL-NEXT: v_lshrrev_b64 v[9:10], v4, v[0:1] |
| ; GISEL-NEXT: v_lshlrev_b64 v[11:12], v11, v[2:3] |
| ; GISEL-NEXT: v_add_u32_e32 v13, 0xffffffc0, v4 |
| ; GISEL-NEXT: v_or_b32_e32 v11, v9, v11 |
| ; GISEL-NEXT: v_or_b32_e32 v12, v10, v12 |
| ; GISEL-NEXT: v_lshrrev_b64 v[9:10], v13, v[2:3] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v4 |
| ; GISEL-NEXT: v_add_u32_e32 v14, 26, v5 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, v9, v11, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v10, v10, v12, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e32 vcc, 0, v4 |
| ; GISEL-NEXT: v_sub_u32_e32 v11, 64, v14 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v13, v9, v0, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, v10, v1, vcc |
| ; GISEL-NEXT: v_lshrrev_b64 v[9:10], v14, -1 |
| ; GISEL-NEXT: v_lshlrev_b64 v[11:12], v11, -1 |
| ; GISEL-NEXT: v_add_u32_e32 v5, 0xffffffda, v5 |
| ; GISEL-NEXT: v_or_b32_e32 v15, v9, v11 |
| ; GISEL-NEXT: v_or_b32_e32 v16, v10, v12 |
| ; GISEL-NEXT: v_lshrrev_b64 v[11:12], v5, -1 |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v14 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v5, v11, v15, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v11, v12, v16, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v14 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, 0, v9, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v10, 0, v10, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e64 v5, v5, -1, s[4:5] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v11, v11, -1, s[4:5] |
| ; GISEL-NEXT: v_and_b32_e32 v2, v9, v2 |
| ; GISEL-NEXT: v_and_b32_e32 v3, v10, v3 |
| ; GISEL-NEXT: v_and_or_b32 v0, v5, v0, v2 |
| ; GISEL-NEXT: v_and_or_b32 v1, v11, v1, v3 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; GISEL-NEXT: v_or_b32_e32 v3, v13, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v3 |
| ; GISEL-NEXT: v_mov_b32_e32 v1, v4 |
| ; GISEL-NEXT: v_mov_b32_e32 v2, v5 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, v6 |
| ; GISEL-NEXT: .LBB0_7: ; %Flow1 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; GISEL-NEXT: .LBB0_8: ; %Flow2 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; GISEL-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], 1, v[0:1] |
| ; GISEL-NEXT: ; %bb.10: ; %itofp-sw-epilog |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: v_bfe_u32 v2, v0, 2, 1 |
| ; GISEL-NEXT: v_or_b32_e32 v0, v0, v2 |
| ; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 1, v0 |
| ; GISEL-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc |
| ; GISEL-NEXT: v_and_b32_e32 v2, 0x4000000, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, 0 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], 2, v[0:1] |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[2:3] |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], 3, v[0:1] |
| ; GISEL-NEXT: v_mov_b32_e32 v7, v8 |
| ; GISEL-NEXT: ; %bb.12: ; %Flow |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: .LBB0_13: ; %Flow4 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; GISEL-NEXT: v_and_b32_e32 v0, 0x80000000, v6 |
| ; GISEL-NEXT: v_lshl_add_u32 v1, v7, 23, 1.0 |
| ; GISEL-NEXT: v_and_b32_e32 v2, 0x7fffff, v4 |
| ; GISEL-NEXT: v_or3_b32 v4, v2, v0, v1 |
| ; GISEL-NEXT: .LBB0_14: ; %Flow5 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v4 |
| ; GISEL-NEXT: s_setpc_b64 s[30:31] |
| %cvt = sitofp i128 %x to float |
| ret float %cvt |
| } |
| |
| define float @uitofp_i128_to_f32(i128 %x) { |
| ; SDAG-LABEL: uitofp_i128_to_f32: |
| ; SDAG: ; %bb.0: ; %itofp-entry |
| ; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; SDAG-NEXT: v_or_b32_e32 v5, v1, v3 |
| ; SDAG-NEXT: v_or_b32_e32 v4, v0, v2 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; SDAG-NEXT: v_mov_b32_e32 v4, 0 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB1_14 |
| ; SDAG-NEXT: ; %bb.1: ; %itofp-if-end |
| ; SDAG-NEXT: v_ffbh_u32_e32 v4, v2 |
| ; SDAG-NEXT: v_add_u32_e32 v4, 32, v4 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v5, v3 |
| ; SDAG-NEXT: v_min_u32_e32 v4, v4, v5 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v5, v0 |
| ; SDAG-NEXT: v_add_u32_e32 v5, 32, v5 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v6, v1 |
| ; SDAG-NEXT: v_min_u32_e32 v5, v5, v6 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[2:3] |
| ; SDAG-NEXT: v_add_u32_e32 v5, 64, v5 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v6, v5, v4, vcc |
| ; SDAG-NEXT: v_sub_u32_e32 v5, 0x80, v6 |
| ; SDAG-NEXT: v_sub_u32_e32 v4, 0x7f, v6 |
| ; SDAG-NEXT: v_cmp_gt_i32_e32 vcc, 25, v5 |
| ; SDAG-NEXT: ; implicit-def: $vgpr7 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; SDAG-NEXT: ; %bb.2: ; %itofp-if-else |
| ; SDAG-NEXT: v_add_u32_e32 v2, 0xffffff98, v6 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v2, v[0:1] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, 0, v0, vcc |
| ; SDAG-NEXT: ; implicit-def: $vgpr5 |
| ; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1 |
| ; SDAG-NEXT: ; implicit-def: $vgpr6 |
| ; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3 |
| ; SDAG-NEXT: ; %bb.3: ; %Flow3 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB1_13 |
| ; SDAG-NEXT: ; %bb.4: ; %NodeBlock |
| ; SDAG-NEXT: v_cmp_lt_i32_e32 vcc, 25, v5 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB1_8 |
| ; SDAG-NEXT: ; %bb.5: ; %LeafBlock |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 26, v5 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB1_7 |
| ; SDAG-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; SDAG-NEXT: v_sub_u32_e32 v11, 0x66, v6 |
| ; SDAG-NEXT: v_sub_u32_e32 v9, 64, v11 |
| ; SDAG-NEXT: v_lshrrev_b64 v[7:8], v11, v[0:1] |
| ; SDAG-NEXT: v_lshlrev_b64 v[9:10], v9, v[2:3] |
| ; SDAG-NEXT: v_sub_u32_e32 v12, 38, v6 |
| ; SDAG-NEXT: v_or_b32_e32 v10, v8, v10 |
| ; SDAG-NEXT: v_or_b32_e32 v9, v7, v9 |
| ; SDAG-NEXT: v_lshrrev_b64 v[7:8], v12, v[2:3] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v11 |
| ; SDAG-NEXT: v_add_u32_e32 v13, 26, v6 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, v8, v10, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v11 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, v7, v9, vcc |
| ; SDAG-NEXT: v_lshrrev_b64 v[9:10], v12, v[0:1] |
| ; SDAG-NEXT: v_lshlrev_b64 v[11:12], v13, v[2:3] |
| ; SDAG-NEXT: v_subrev_u32_e32 v6, 38, v6 |
| ; SDAG-NEXT: v_cndmask_b32_e64 v14, v7, v0, s[4:5] |
| ; SDAG-NEXT: v_lshlrev_b64 v[6:7], v6, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v8, v8, v1, s[4:5] |
| ; SDAG-NEXT: v_or_b32_e32 v10, v12, v10 |
| ; SDAG-NEXT: v_or_b32_e32 v9, v11, v9 |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v13 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v13, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, v7, v10, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v13 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v6, v6, v9, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e64 v3, v7, v3, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v2, v6, v2, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v1, 0, v1, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v1, v1, v3 |
| ; SDAG-NEXT: v_or_b32_e32 v0, v0, v2 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v7, v14, v0 |
| ; SDAG-NEXT: v_mov_b32_e32 v0, v7 |
| ; SDAG-NEXT: v_mov_b32_e32 v1, v8 |
| ; SDAG-NEXT: .LBB1_7: ; %Flow1 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; SDAG-NEXT: .LBB1_8: ; %Flow2 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; SDAG-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], 1, v[0:1] |
| ; SDAG-NEXT: ; %bb.10: ; %itofp-sw-epilog |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: v_lshrrev_b32_e32 v2, 2, v0 |
| ; SDAG-NEXT: v_and_or_b32 v0, v2, 1, v0 |
| ; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 1, v0 |
| ; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc |
| ; SDAG-NEXT: v_and_b32_e32 v2, 0x4000000, v0 |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 0, v2 |
| ; SDAG-NEXT: v_alignbit_b32 v7, v1, v0, 2 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; SDAG-NEXT: v_alignbit_b32 v7, v1, v0, 3 |
| ; SDAG-NEXT: v_mov_b32_e32 v4, v5 |
| ; SDAG-NEXT: ; %bb.12: ; %Flow |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: .LBB1_13: ; %Flow4 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; SDAG-NEXT: v_and_b32_e32 v0, 0x7fffff, v7 |
| ; SDAG-NEXT: v_lshl_or_b32 v0, v4, 23, v0 |
| ; SDAG-NEXT: v_add_u32_e32 v4, 1.0, v0 |
| ; SDAG-NEXT: .LBB1_14: ; %Flow5 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; SDAG-NEXT: v_mov_b32_e32 v0, v4 |
| ; SDAG-NEXT: s_setpc_b64 s[30:31] |
| ; |
| ; GISEL-LABEL: uitofp_i128_to_f32: |
| ; GISEL: ; %bb.0: ; %itofp-entry |
| ; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; GISEL-NEXT: v_or_b32_e32 v4, v0, v2 |
| ; GISEL-NEXT: v_or_b32_e32 v5, v1, v3 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; GISEL-NEXT: s_mov_b32 s4, 0 |
| ; GISEL-NEXT: v_mov_b32_e32 v4, s4 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB1_14 |
| ; GISEL-NEXT: ; %bb.1: ; %itofp-if-end |
| ; GISEL-NEXT: v_ffbh_u32_e32 v5, v0 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v4, v1 |
| ; GISEL-NEXT: v_add_u32_e32 v5, 32, v5 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v6, v2 |
| ; GISEL-NEXT: v_min_u32_e32 v4, v4, v5 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v5, v3 |
| ; GISEL-NEXT: v_add_u32_e32 v6, 32, v6 |
| ; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, 0, v[2:3] |
| ; GISEL-NEXT: v_add_u32_e32 v4, 64, v4 |
| ; GISEL-NEXT: v_min_u32_e32 v5, v5, v6 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v5, v5, v4, vcc |
| ; GISEL-NEXT: v_sub_u32_e32 v7, 0x80, v5 |
| ; GISEL-NEXT: v_sub_u32_e32 v6, 0x7f, v5 |
| ; GISEL-NEXT: v_cmp_ge_i32_e32 vcc, 24, v7 |
| ; GISEL-NEXT: ; implicit-def: $vgpr4 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; GISEL-NEXT: ; %bb.2: ; %itofp-if-else |
| ; GISEL-NEXT: v_add_u32_e32 v2, 0xffffff98, v5 |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], v2, v[0:1] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, 0, v0, vcc |
| ; GISEL-NEXT: ; implicit-def: $vgpr7 |
| ; GISEL-NEXT: ; implicit-def: $vgpr0 |
| ; GISEL-NEXT: ; implicit-def: $vgpr5 |
| ; GISEL-NEXT: ; implicit-def: $vgpr2 |
| ; GISEL-NEXT: ; %bb.3: ; %Flow3 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB1_13 |
| ; GISEL-NEXT: ; %bb.4: ; %NodeBlock |
| ; GISEL-NEXT: v_cmp_le_i32_e32 vcc, 26, v7 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB1_8 |
| ; GISEL-NEXT: ; %bb.5: ; %LeafBlock |
| ; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 26, v7 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB1_7 |
| ; GISEL-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; GISEL-NEXT: v_sub_u32_e32 v4, 0x66, v5 |
| ; GISEL-NEXT: v_sub_u32_e32 v10, 64, v4 |
| ; GISEL-NEXT: v_lshrrev_b64 v[8:9], v4, v[0:1] |
| ; GISEL-NEXT: v_lshlrev_b64 v[10:11], v10, v[2:3] |
| ; GISEL-NEXT: v_add_u32_e32 v12, 0xffffffc0, v4 |
| ; GISEL-NEXT: v_or_b32_e32 v10, v8, v10 |
| ; GISEL-NEXT: v_or_b32_e32 v11, v9, v11 |
| ; GISEL-NEXT: v_lshrrev_b64 v[8:9], v12, v[2:3] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v4 |
| ; GISEL-NEXT: v_add_u32_e32 v13, 26, v5 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v8, v8, v10, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, v9, v11, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e32 vcc, 0, v4 |
| ; GISEL-NEXT: v_sub_u32_e32 v10, 64, v13 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v12, v8, v0, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, v9, v1, vcc |
| ; GISEL-NEXT: v_lshrrev_b64 v[8:9], v13, -1 |
| ; GISEL-NEXT: v_lshlrev_b64 v[10:11], v10, -1 |
| ; GISEL-NEXT: v_add_u32_e32 v5, 0xffffffda, v5 |
| ; GISEL-NEXT: v_or_b32_e32 v14, v8, v10 |
| ; GISEL-NEXT: v_or_b32_e32 v15, v9, v11 |
| ; GISEL-NEXT: v_lshrrev_b64 v[10:11], v5, -1 |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v13 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v5, v10, v14, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v10, v11, v15, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v13 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v8, 0, v8, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, 0, v9, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e64 v5, v5, -1, s[4:5] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v10, v10, -1, s[4:5] |
| ; GISEL-NEXT: v_and_b32_e32 v2, v8, v2 |
| ; GISEL-NEXT: v_and_b32_e32 v3, v9, v3 |
| ; GISEL-NEXT: v_and_or_b32 v0, v5, v0, v2 |
| ; GISEL-NEXT: v_and_or_b32 v1, v10, v1, v3 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; GISEL-NEXT: v_or_b32_e32 v3, v12, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v3 |
| ; GISEL-NEXT: v_mov_b32_e32 v1, v4 |
| ; GISEL-NEXT: v_mov_b32_e32 v2, v5 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, v6 |
| ; GISEL-NEXT: .LBB1_7: ; %Flow1 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; GISEL-NEXT: .LBB1_8: ; %Flow2 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; GISEL-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], 1, v[0:1] |
| ; GISEL-NEXT: ; %bb.10: ; %itofp-sw-epilog |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: v_bfe_u32 v2, v0, 2, 1 |
| ; GISEL-NEXT: v_or_b32_e32 v0, v0, v2 |
| ; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 1, v0 |
| ; GISEL-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc |
| ; GISEL-NEXT: v_and_b32_e32 v2, 0x4000000, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, 0 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], 2, v[0:1] |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[2:3] |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], 3, v[0:1] |
| ; GISEL-NEXT: v_mov_b32_e32 v6, v7 |
| ; GISEL-NEXT: ; %bb.12: ; %Flow |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: .LBB1_13: ; %Flow4 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; GISEL-NEXT: v_lshl_add_u32 v0, v6, 23, 1.0 |
| ; GISEL-NEXT: v_mov_b32_e32 v1, 0x7fffff |
| ; GISEL-NEXT: v_and_or_b32 v4, v4, v1, v0 |
| ; GISEL-NEXT: .LBB1_14: ; %Flow5 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v4 |
| ; GISEL-NEXT: s_setpc_b64 s[30:31] |
| %cvt = uitofp i128 %x to float |
| ret float %cvt |
| } |
| |
| define double @sitofp_i128_to_f64(i128 %x) { |
| ; SDAG-LABEL: sitofp_i128_to_f64: |
| ; SDAG: ; %bb.0: ; %itofp-entry |
| ; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; SDAG-NEXT: v_mov_b32_e32 v5, v1 |
| ; SDAG-NEXT: v_mov_b32_e32 v4, v0 |
| ; SDAG-NEXT: v_or_b32_e32 v1, v5, v3 |
| ; SDAG-NEXT: v_or_b32_e32 v0, v4, v2 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; SDAG-NEXT: v_mov_b32_e32 v0, 0 |
| ; SDAG-NEXT: v_mov_b32_e32 v1, 0 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB2_14 |
| ; SDAG-NEXT: ; %bb.1: ; %itofp-if-end |
| ; SDAG-NEXT: v_sub_co_u32_e32 v0, vcc, 0, v4 |
| ; SDAG-NEXT: v_subb_co_u32_e32 v1, vcc, 0, v5, vcc |
| ; SDAG-NEXT: v_subb_co_u32_e32 v6, vcc, 0, v2, vcc |
| ; SDAG-NEXT: v_subb_co_u32_e32 v7, vcc, 0, v3, vcc |
| ; SDAG-NEXT: v_cmp_gt_i64_e32 vcc, 0, v[2:3] |
| ; SDAG-NEXT: ; implicit-def: $vgpr10 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v6, v2, v6, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v4, v4, v0, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, v3, v7, vcc |
| ; SDAG-NEXT: v_ffbh_u32_e32 v0, v6 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v5, v5, v1, vcc |
| ; SDAG-NEXT: v_add_u32_e32 v0, 32, v0 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v1, v7 |
| ; SDAG-NEXT: v_min_u32_e32 v0, v0, v1 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v1, v4 |
| ; SDAG-NEXT: v_add_u32_e32 v1, 32, v1 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v2, v5 |
| ; SDAG-NEXT: v_min_u32_e32 v1, v1, v2 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[6:7] |
| ; SDAG-NEXT: v_add_u32_e32 v1, 64, v1 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v9, v1, v0, vcc |
| ; SDAG-NEXT: v_sub_u32_e32 v8, 0x80, v9 |
| ; SDAG-NEXT: v_sub_u32_e32 v2, 0x7f, v9 |
| ; SDAG-NEXT: v_cmp_gt_i32_e32 vcc, 54, v8 |
| ; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; SDAG-NEXT: ; %bb.2: ; %itofp-if-else |
| ; SDAG-NEXT: v_add_u32_e32 v6, 0xffffffb5, v9 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v6, v[4:5] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v6 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v10, 0, v1, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc |
| ; SDAG-NEXT: ; implicit-def: $vgpr8 |
| ; SDAG-NEXT: ; implicit-def: $vgpr6_vgpr7 |
| ; SDAG-NEXT: ; implicit-def: $vgpr4_vgpr5 |
| ; SDAG-NEXT: ; implicit-def: $vgpr9 |
| ; SDAG-NEXT: ; %bb.3: ; %Flow3 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB2_13 |
| ; SDAG-NEXT: ; %bb.4: ; %NodeBlock |
| ; SDAG-NEXT: v_cmp_lt_i32_e32 vcc, 54, v8 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB2_8 |
| ; SDAG-NEXT: ; %bb.5: ; %LeafBlock |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 55, v8 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB2_7 |
| ; SDAG-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; SDAG-NEXT: v_sub_u32_e32 v12, 0x49, v9 |
| ; SDAG-NEXT: v_sub_u32_e32 v10, 64, v12 |
| ; SDAG-NEXT: v_lshrrev_b64 v[0:1], v12, v[4:5] |
| ; SDAG-NEXT: v_lshlrev_b64 v[10:11], v10, v[6:7] |
| ; SDAG-NEXT: v_sub_u32_e32 v13, 9, v9 |
| ; SDAG-NEXT: v_or_b32_e32 v11, v1, v11 |
| ; SDAG-NEXT: v_or_b32_e32 v10, v0, v10 |
| ; SDAG-NEXT: v_lshrrev_b64 v[0:1], v13, v[6:7] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v12 |
| ; SDAG-NEXT: v_add_u32_e32 v16, 55, v9 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v1, v1, v11, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v12 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v0, v0, v10, vcc |
| ; SDAG-NEXT: v_lshrrev_b64 v[10:11], v12, v[6:7] |
| ; SDAG-NEXT: v_lshrrev_b64 v[12:13], v13, v[4:5] |
| ; SDAG-NEXT: v_lshlrev_b64 v[14:15], v16, v[6:7] |
| ; SDAG-NEXT: v_add_u32_e32 v9, -9, v9 |
| ; SDAG-NEXT: v_or_b32_e32 v15, v15, v13 |
| ; SDAG-NEXT: v_or_b32_e32 v14, v14, v12 |
| ; SDAG-NEXT: v_lshlrev_b64 v[12:13], v9, v[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v11, 0, v11, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v10, 0, v10, vcc |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v16 |
| ; SDAG-NEXT: v_cndmask_b32_e64 v1, v1, v5, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v0, v0, v4, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v9, v13, v15, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v16 |
| ; SDAG-NEXT: v_lshlrev_b64 v[4:5], v16, v[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v7, v9, v7, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v9, v12, v14, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e64 v6, v9, v6, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v5, 0, v5, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v4, 0, v4, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v5, v5, v7 |
| ; SDAG-NEXT: v_or_b32_e32 v4, v4, v6 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; SDAG-NEXT: v_mov_b32_e32 v6, v10 |
| ; SDAG-NEXT: v_cndmask_b32_e64 v4, 0, 1, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v0, v0, v4 |
| ; SDAG-NEXT: v_mov_b32_e32 v5, v1 |
| ; SDAG-NEXT: v_mov_b32_e32 v4, v0 |
| ; SDAG-NEXT: v_mov_b32_e32 v7, v11 |
| ; SDAG-NEXT: .LBB2_7: ; %Flow1 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; SDAG-NEXT: .LBB2_8: ; %Flow2 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; SDAG-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; SDAG-NEXT: v_lshlrev_b64 v[6:7], 1, v[6:7] |
| ; SDAG-NEXT: v_lshrrev_b32_e32 v0, 31, v5 |
| ; SDAG-NEXT: v_lshlrev_b64 v[4:5], 1, v[4:5] |
| ; SDAG-NEXT: v_or_b32_e32 v6, v6, v0 |
| ; SDAG-NEXT: ; %bb.10: ; %itofp-sw-epilog |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: v_lshrrev_b32_e32 v0, 2, v4 |
| ; SDAG-NEXT: v_and_or_b32 v0, v0, 1, v4 |
| ; SDAG-NEXT: v_add_co_u32_e32 v4, vcc, 1, v0 |
| ; SDAG-NEXT: v_addc_co_u32_e32 v5, vcc, 0, v5, vcc |
| ; SDAG-NEXT: v_addc_co_u32_e32 v6, vcc, 0, v6, vcc |
| ; SDAG-NEXT: v_lshrrev_b64 v[0:1], 2, v[4:5] |
| ; SDAG-NEXT: v_lshlrev_b32_e32 v7, 30, v6 |
| ; SDAG-NEXT: v_or_b32_e32 v10, v1, v7 |
| ; SDAG-NEXT: v_and_b32_e32 v1, 0x800000, v5 |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 0, v1 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; SDAG-NEXT: v_lshrrev_b64 v[0:1], 3, v[4:5] |
| ; SDAG-NEXT: v_lshlrev_b32_e32 v2, 29, v6 |
| ; SDAG-NEXT: v_or_b32_e32 v10, v1, v2 |
| ; SDAG-NEXT: v_mov_b32_e32 v2, v8 |
| ; SDAG-NEXT: ; %bb.12: ; %Flow |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: .LBB2_13: ; %Flow4 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; SDAG-NEXT: v_and_b32_e32 v1, 0x80000000, v3 |
| ; SDAG-NEXT: v_mov_b32_e32 v3, 0x3ff00000 |
| ; SDAG-NEXT: v_lshl_add_u32 v2, v2, 20, v3 |
| ; SDAG-NEXT: v_and_b32_e32 v3, 0xfffff, v10 |
| ; SDAG-NEXT: v_or3_b32 v1, v3, v1, v2 |
| ; SDAG-NEXT: .LBB2_14: ; %Flow5 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; SDAG-NEXT: s_setpc_b64 s[30:31] |
| ; |
| ; GISEL-LABEL: sitofp_i128_to_f64: |
| ; GISEL: ; %bb.0: ; %itofp-entry |
| ; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; GISEL-NEXT: v_mov_b32_e32 v4, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v5, v1 |
| ; GISEL-NEXT: s_mov_b64 s[4:5], 0 |
| ; GISEL-NEXT: v_or_b32_e32 v0, v4, v2 |
| ; GISEL-NEXT: v_or_b32_e32 v1, v5, v3 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; GISEL-NEXT: v_mov_b32_e32 v0, s4 |
| ; GISEL-NEXT: v_mov_b32_e32 v1, s5 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB2_14 |
| ; GISEL-NEXT: ; %bb.1: ; %itofp-if-end |
| ; GISEL-NEXT: v_ashrrev_i32_e32 v6, 31, v3 |
| ; GISEL-NEXT: v_xor_b32_e32 v0, v6, v4 |
| ; GISEL-NEXT: v_xor_b32_e32 v1, v6, v5 |
| ; GISEL-NEXT: v_xor_b32_e32 v4, v6, v2 |
| ; GISEL-NEXT: v_sub_co_u32_e32 v2, vcc, v0, v6 |
| ; GISEL-NEXT: v_xor_b32_e32 v5, v6, v3 |
| ; GISEL-NEXT: v_subb_co_u32_e32 v3, vcc, v1, v6, vcc |
| ; GISEL-NEXT: v_subb_co_u32_e32 v4, vcc, v4, v6, vcc |
| ; GISEL-NEXT: v_ffbh_u32_e32 v1, v2 |
| ; GISEL-NEXT: v_subb_co_u32_e32 v5, vcc, v5, v6, vcc |
| ; GISEL-NEXT: v_ffbh_u32_e32 v0, v3 |
| ; GISEL-NEXT: v_add_u32_e32 v1, 32, v1 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v7, v4 |
| ; GISEL-NEXT: v_min_u32_e32 v0, v0, v1 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v1, v5 |
| ; GISEL-NEXT: v_add_u32_e32 v7, 32, v7 |
| ; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, 0, v[4:5] |
| ; GISEL-NEXT: v_add_u32_e32 v0, 64, v0 |
| ; GISEL-NEXT: v_min_u32_e32 v1, v1, v7 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, v1, v0, vcc |
| ; GISEL-NEXT: v_sub_u32_e32 v8, 0x80, v9 |
| ; GISEL-NEXT: v_sub_u32_e32 v7, 0x7f, v9 |
| ; GISEL-NEXT: v_cmp_ge_i32_e32 vcc, 53, v8 |
| ; GISEL-NEXT: ; implicit-def: $vgpr10 |
| ; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; GISEL-NEXT: ; %bb.2: ; %itofp-if-else |
| ; GISEL-NEXT: v_add_u32_e32 v4, 0xffffffb5, v9 |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], v4, v[2:3] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v4 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v10, 0, v1, vcc |
| ; GISEL-NEXT: ; implicit-def: $vgpr8 |
| ; GISEL-NEXT: ; implicit-def: $vgpr2 |
| ; GISEL-NEXT: ; implicit-def: $vgpr9 |
| ; GISEL-NEXT: ; %bb.3: ; %Flow3 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB2_13 |
| ; GISEL-NEXT: ; %bb.4: ; %NodeBlock |
| ; GISEL-NEXT: v_cmp_le_i32_e32 vcc, 55, v8 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB2_8 |
| ; GISEL-NEXT: ; %bb.5: ; %LeafBlock |
| ; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 55, v8 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB2_7 |
| ; GISEL-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; GISEL-NEXT: v_sub_u32_e32 v14, 0x49, v9 |
| ; GISEL-NEXT: v_sub_u32_e32 v10, 64, v14 |
| ; GISEL-NEXT: v_lshrrev_b64 v[0:1], v14, v[2:3] |
| ; GISEL-NEXT: v_lshlrev_b64 v[10:11], v10, v[4:5] |
| ; GISEL-NEXT: v_add_u32_e32 v15, 0xffffffc0, v14 |
| ; GISEL-NEXT: v_lshrrev_b64 v[12:13], v14, v[4:5] |
| ; GISEL-NEXT: v_or_b32_e32 v10, v0, v10 |
| ; GISEL-NEXT: v_or_b32_e32 v11, v1, v11 |
| ; GISEL-NEXT: v_lshrrev_b64 v[0:1], v15, v[4:5] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v14 |
| ; GISEL-NEXT: v_add_u32_e32 v15, 55, v9 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v0, v0, v10, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v1, v1, v11, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v14 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v11, 0, v12, vcc |
| ; GISEL-NEXT: v_sub_u32_e32 v12, 64, v15 |
| ; GISEL-NEXT: v_cndmask_b32_e64 v14, v0, v2, s[4:5] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v10, v1, v3, s[4:5] |
| ; GISEL-NEXT: v_lshrrev_b64 v[0:1], v15, -1 |
| ; GISEL-NEXT: v_lshlrev_b64 v[12:13], v12, -1 |
| ; GISEL-NEXT: v_add_u32_e32 v9, -9, v9 |
| ; GISEL-NEXT: v_or_b32_e32 v16, v0, v12 |
| ; GISEL-NEXT: v_or_b32_e32 v17, v1, v13 |
| ; GISEL-NEXT: v_lshrrev_b64 v[12:13], v9, -1 |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v15 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, v12, v16, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v12, v13, v17, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v15 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v1, 0, v1, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e64 v9, v9, -1, s[4:5] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v12, v12, -1, s[4:5] |
| ; GISEL-NEXT: v_and_b32_e32 v0, v0, v4 |
| ; GISEL-NEXT: v_and_b32_e32 v1, v1, v5 |
| ; GISEL-NEXT: v_and_or_b32 v0, v9, v2, v0 |
| ; GISEL-NEXT: v_and_or_b32 v1, v12, v3, v1 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; GISEL-NEXT: v_or_b32_e32 v9, v14, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v2, v9 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, v10 |
| ; GISEL-NEXT: v_mov_b32_e32 v4, v11 |
| ; GISEL-NEXT: v_mov_b32_e32 v5, v12 |
| ; GISEL-NEXT: .LBB2_7: ; %Flow1 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; GISEL-NEXT: .LBB2_8: ; %Flow2 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; GISEL-NEXT: s_cbranch_execz .LBB2_10 |
| ; GISEL-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; GISEL-NEXT: v_lshlrev_b64 v[4:5], 1, v[4:5] |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], 1, v[2:3] |
| ; GISEL-NEXT: v_lshrrev_b32_e32 v2, 31, v3 |
| ; GISEL-NEXT: v_or_b32_e32 v2, v4, v2 |
| ; GISEL-NEXT: v_mov_b32_e32 v5, v3 |
| ; GISEL-NEXT: v_mov_b32_e32 v4, v2 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, v1 |
| ; GISEL-NEXT: v_mov_b32_e32 v2, v0 |
| ; GISEL-NEXT: .LBB2_10: ; %itofp-sw-epilog |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: v_bfe_u32 v0, v2, 2, 1 |
| ; GISEL-NEXT: v_or_b32_e32 v0, v2, v0 |
| ; GISEL-NEXT: v_add_co_u32_e32 v2, vcc, 1, v0 |
| ; GISEL-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v3, vcc |
| ; GISEL-NEXT: v_addc_co_u32_e32 v4, vcc, 0, v4, vcc |
| ; GISEL-NEXT: v_lshrrev_b64 v[0:1], 2, v[2:3] |
| ; GISEL-NEXT: v_mov_b32_e32 v9, 0 |
| ; GISEL-NEXT: v_and_b32_e32 v10, 0x800000, v3 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[9:10] |
| ; GISEL-NEXT: v_lshl_or_b32 v10, v4, 30, v1 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; GISEL-NEXT: v_lshrrev_b64 v[0:1], 3, v[2:3] |
| ; GISEL-NEXT: v_mov_b32_e32 v7, v8 |
| ; GISEL-NEXT: v_lshl_or_b32 v10, v4, 29, v1 |
| ; GISEL-NEXT: ; %bb.12: ; %Flow |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: .LBB2_13: ; %Flow4 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; GISEL-NEXT: v_and_b32_e32 v1, 0x80000000, v6 |
| ; GISEL-NEXT: v_mov_b32_e32 v2, 0x3ff00000 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, 0xfffff |
| ; GISEL-NEXT: v_lshl_add_u32 v2, v7, 20, v2 |
| ; GISEL-NEXT: v_and_or_b32 v1, v10, v3, v1 |
| ; GISEL-NEXT: v_or3_b32 v1, v1, v2, 0 |
| ; GISEL-NEXT: .LBB2_14: ; %Flow5 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; GISEL-NEXT: s_setpc_b64 s[30:31] |
| %cvt = sitofp i128 %x to double |
| ret double %cvt |
| } |
| |
| define double @uitofp_i128_to_f64(i128 %x) { |
| ; SDAG-LABEL: uitofp_i128_to_f64: |
| ; SDAG: ; %bb.0: ; %itofp-entry |
| ; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; SDAG-NEXT: v_or_b32_e32 v5, v1, v3 |
| ; SDAG-NEXT: v_or_b32_e32 v4, v0, v2 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; SDAG-NEXT: v_mov_b32_e32 v4, 0 |
| ; SDAG-NEXT: v_mov_b32_e32 v5, 0 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB3_14 |
| ; SDAG-NEXT: ; %bb.1: ; %itofp-if-end |
| ; SDAG-NEXT: v_ffbh_u32_e32 v4, v2 |
| ; SDAG-NEXT: v_add_u32_e32 v4, 32, v4 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v5, v3 |
| ; SDAG-NEXT: v_min_u32_e32 v4, v4, v5 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v5, v0 |
| ; SDAG-NEXT: v_add_u32_e32 v5, 32, v5 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v6, v1 |
| ; SDAG-NEXT: v_min_u32_e32 v5, v5, v6 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[2:3] |
| ; SDAG-NEXT: v_add_u32_e32 v5, 64, v5 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, v5, v4, vcc |
| ; SDAG-NEXT: v_sub_u32_e32 v7, 0x80, v8 |
| ; SDAG-NEXT: v_sub_u32_e32 v6, 0x7f, v8 |
| ; SDAG-NEXT: v_cmp_gt_i32_e32 vcc, 54, v7 |
| ; SDAG-NEXT: ; implicit-def: $vgpr9 |
| ; SDAG-NEXT: ; implicit-def: $vgpr4_vgpr5 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; SDAG-NEXT: ; %bb.2: ; %itofp-if-else |
| ; SDAG-NEXT: v_add_u32_e32 v2, 0xffffffb5, v8 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v2, v[0:1] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v9, 0, v1, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v4, 0, v0, vcc |
| ; SDAG-NEXT: ; implicit-def: $vgpr7 |
| ; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3 |
| ; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1 |
| ; SDAG-NEXT: ; implicit-def: $vgpr8 |
| ; SDAG-NEXT: ; %bb.3: ; %Flow3 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB3_13 |
| ; SDAG-NEXT: ; %bb.4: ; %NodeBlock |
| ; SDAG-NEXT: v_cmp_lt_i32_e32 vcc, 54, v7 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB3_8 |
| ; SDAG-NEXT: ; %bb.5: ; %LeafBlock |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 55, v7 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB3_7 |
| ; SDAG-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; SDAG-NEXT: v_sub_u32_e32 v11, 0x49, v8 |
| ; SDAG-NEXT: v_sub_u32_e32 v9, 64, v11 |
| ; SDAG-NEXT: v_lshrrev_b64 v[4:5], v11, v[0:1] |
| ; SDAG-NEXT: v_lshlrev_b64 v[9:10], v9, v[2:3] |
| ; SDAG-NEXT: v_sub_u32_e32 v12, 9, v8 |
| ; SDAG-NEXT: v_or_b32_e32 v10, v5, v10 |
| ; SDAG-NEXT: v_or_b32_e32 v9, v4, v9 |
| ; SDAG-NEXT: v_lshrrev_b64 v[4:5], v12, v[2:3] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v11 |
| ; SDAG-NEXT: v_add_u32_e32 v15, 55, v8 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v5, v5, v10, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v11 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v4, v4, v9, vcc |
| ; SDAG-NEXT: v_lshrrev_b64 v[9:10], v11, v[2:3] |
| ; SDAG-NEXT: v_lshrrev_b64 v[11:12], v12, v[0:1] |
| ; SDAG-NEXT: v_lshlrev_b64 v[13:14], v15, v[2:3] |
| ; SDAG-NEXT: v_add_u32_e32 v8, -9, v8 |
| ; SDAG-NEXT: v_or_b32_e32 v14, v14, v12 |
| ; SDAG-NEXT: v_or_b32_e32 v13, v13, v11 |
| ; SDAG-NEXT: v_lshlrev_b64 v[11:12], v8, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v10, 0, v10, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v9, 0, v9, vcc |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v15 |
| ; SDAG-NEXT: v_cndmask_b32_e64 v5, v5, v1, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v4, v4, v0, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, v12, v14, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v15 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v15, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v3, v8, v3, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, v11, v13, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e64 v2, v8, v2, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v1, 0, v1, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v1, v1, v3 |
| ; SDAG-NEXT: v_or_b32_e32 v0, v0, v2 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; SDAG-NEXT: v_mov_b32_e32 v2, v9 |
| ; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v4, v4, v0 |
| ; SDAG-NEXT: v_mov_b32_e32 v0, v4 |
| ; SDAG-NEXT: v_mov_b32_e32 v1, v5 |
| ; SDAG-NEXT: v_mov_b32_e32 v3, v10 |
| ; SDAG-NEXT: .LBB3_7: ; %Flow1 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; SDAG-NEXT: .LBB3_8: ; %Flow2 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; SDAG-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; SDAG-NEXT: v_lshlrev_b64 v[2:3], 1, v[2:3] |
| ; SDAG-NEXT: v_lshrrev_b32_e32 v3, 31, v1 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], 1, v[0:1] |
| ; SDAG-NEXT: v_or_b32_e32 v2, v2, v3 |
| ; SDAG-NEXT: ; %bb.10: ; %itofp-sw-epilog |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: v_lshrrev_b32_e32 v3, 2, v0 |
| ; SDAG-NEXT: v_and_or_b32 v0, v3, 1, v0 |
| ; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 1, v0 |
| ; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc |
| ; SDAG-NEXT: v_addc_co_u32_e32 v2, vcc, 0, v2, vcc |
| ; SDAG-NEXT: v_lshrrev_b64 v[4:5], 2, v[0:1] |
| ; SDAG-NEXT: v_and_b32_e32 v3, 0x800000, v1 |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 0, v3 |
| ; SDAG-NEXT: v_alignbit_b32 v9, v2, v1, 2 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; SDAG-NEXT: v_lshrrev_b64 v[4:5], 3, v[0:1] |
| ; SDAG-NEXT: v_alignbit_b32 v9, v2, v1, 3 |
| ; SDAG-NEXT: v_mov_b32_e32 v6, v7 |
| ; SDAG-NEXT: ; %bb.12: ; %Flow |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: .LBB3_13: ; %Flow4 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; SDAG-NEXT: v_and_b32_e32 v0, 0xfffff, v9 |
| ; SDAG-NEXT: v_lshl_or_b32 v0, v6, 20, v0 |
| ; SDAG-NEXT: v_add_u32_e32 v5, 0x3ff00000, v0 |
| ; SDAG-NEXT: .LBB3_14: ; %Flow5 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; SDAG-NEXT: v_mov_b32_e32 v0, v4 |
| ; SDAG-NEXT: v_mov_b32_e32 v1, v5 |
| ; SDAG-NEXT: s_setpc_b64 s[30:31] |
| ; |
| ; GISEL-LABEL: uitofp_i128_to_f64: |
| ; GISEL: ; %bb.0: ; %itofp-entry |
| ; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; GISEL-NEXT: s_mov_b64 s[4:5], 0 |
| ; GISEL-NEXT: v_or_b32_e32 v4, v0, v2 |
| ; GISEL-NEXT: v_or_b32_e32 v5, v1, v3 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; GISEL-NEXT: v_mov_b32_e32 v4, s4 |
| ; GISEL-NEXT: v_mov_b32_e32 v5, s5 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB3_14 |
| ; GISEL-NEXT: ; %bb.1: ; %itofp-if-end |
| ; GISEL-NEXT: v_ffbh_u32_e32 v5, v0 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v4, v1 |
| ; GISEL-NEXT: v_add_u32_e32 v5, 32, v5 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v6, v2 |
| ; GISEL-NEXT: v_min_u32_e32 v4, v4, v5 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v5, v3 |
| ; GISEL-NEXT: v_add_u32_e32 v6, 32, v6 |
| ; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, 0, v[2:3] |
| ; GISEL-NEXT: v_add_u32_e32 v4, 64, v4 |
| ; GISEL-NEXT: v_min_u32_e32 v5, v5, v6 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v8, v5, v4, vcc |
| ; GISEL-NEXT: v_sub_u32_e32 v7, 0x80, v8 |
| ; GISEL-NEXT: v_sub_u32_e32 v6, 0x7f, v8 |
| ; GISEL-NEXT: v_cmp_ge_i32_e32 vcc, 53, v7 |
| ; GISEL-NEXT: ; implicit-def: $vgpr9 |
| ; GISEL-NEXT: ; implicit-def: $vgpr4_vgpr5 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; GISEL-NEXT: ; %bb.2: ; %itofp-if-else |
| ; GISEL-NEXT: v_add_u32_e32 v2, 0xffffffb5, v8 |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], v2, v[0:1] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, 0, v0, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, 0, v1, vcc |
| ; GISEL-NEXT: ; implicit-def: $vgpr7 |
| ; GISEL-NEXT: ; implicit-def: $vgpr0 |
| ; GISEL-NEXT: ; implicit-def: $vgpr8 |
| ; GISEL-NEXT: ; %bb.3: ; %Flow3 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB3_13 |
| ; GISEL-NEXT: ; %bb.4: ; %NodeBlock |
| ; GISEL-NEXT: v_cmp_le_i32_e32 vcc, 55, v7 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB3_8 |
| ; GISEL-NEXT: ; %bb.5: ; %LeafBlock |
| ; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 55, v7 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB3_7 |
| ; GISEL-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; GISEL-NEXT: v_sub_u32_e32 v13, 0x49, v8 |
| ; GISEL-NEXT: v_sub_u32_e32 v9, 64, v13 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], v13, v[0:1] |
| ; GISEL-NEXT: v_lshlrev_b64 v[9:10], v9, v[2:3] |
| ; GISEL-NEXT: v_add_u32_e32 v14, 0xffffffc0, v13 |
| ; GISEL-NEXT: v_lshrrev_b64 v[11:12], v13, v[2:3] |
| ; GISEL-NEXT: v_or_b32_e32 v9, v4, v9 |
| ; GISEL-NEXT: v_or_b32_e32 v10, v5, v10 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], v14, v[2:3] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v13 |
| ; GISEL-NEXT: v_add_u32_e32 v15, 55, v8 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, v4, v9, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v5, v5, v10, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v13 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v10, 0, v11, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v11, 0, v12, vcc |
| ; GISEL-NEXT: v_sub_u32_e32 v12, 64, v15 |
| ; GISEL-NEXT: v_cndmask_b32_e64 v14, v4, v0, s[4:5] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v9, v5, v1, s[4:5] |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], v15, -1 |
| ; GISEL-NEXT: v_lshlrev_b64 v[12:13], v12, -1 |
| ; GISEL-NEXT: v_add_u32_e32 v8, -9, v8 |
| ; GISEL-NEXT: v_or_b32_e32 v16, v4, v12 |
| ; GISEL-NEXT: v_or_b32_e32 v17, v5, v13 |
| ; GISEL-NEXT: v_lshrrev_b64 v[12:13], v8, -1 |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v15 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v8, v12, v16, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v12, v13, v17, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v15 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, 0, v4, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v5, 0, v5, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e64 v8, v8, -1, s[4:5] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v12, v12, -1, s[4:5] |
| ; GISEL-NEXT: v_and_b32_e32 v2, v4, v2 |
| ; GISEL-NEXT: v_and_b32_e32 v3, v5, v3 |
| ; GISEL-NEXT: v_and_or_b32 v0, v8, v0, v2 |
| ; GISEL-NEXT: v_and_or_b32 v1, v12, v1, v3 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; GISEL-NEXT: v_or_b32_e32 v8, v14, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v8 |
| ; GISEL-NEXT: v_mov_b32_e32 v1, v9 |
| ; GISEL-NEXT: v_mov_b32_e32 v2, v10 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, v11 |
| ; GISEL-NEXT: .LBB3_7: ; %Flow1 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; GISEL-NEXT: .LBB3_8: ; %Flow2 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; GISEL-NEXT: s_cbranch_execz .LBB3_10 |
| ; GISEL-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; GISEL-NEXT: v_lshlrev_b64 v[8:9], 1, v[0:1] |
| ; GISEL-NEXT: v_lshlrev_b64 v[10:11], 1, v[2:3] |
| ; GISEL-NEXT: v_lshrrev_b32_e32 v0, 31, v1 |
| ; GISEL-NEXT: v_or_b32_e32 v10, v10, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v8 |
| ; GISEL-NEXT: v_mov_b32_e32 v1, v9 |
| ; GISEL-NEXT: v_mov_b32_e32 v2, v10 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, v11 |
| ; GISEL-NEXT: .LBB3_10: ; %itofp-sw-epilog |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: v_bfe_u32 v4, v0, 2, 1 |
| ; GISEL-NEXT: v_or_b32_e32 v0, v0, v4 |
| ; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 1, v0 |
| ; GISEL-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc |
| ; GISEL-NEXT: v_addc_co_u32_e32 v2, vcc, 0, v2, vcc |
| ; GISEL-NEXT: v_addc_co_u32_e32 v3, vcc, 0, v3, vcc |
| ; GISEL-NEXT: v_mov_b32_e32 v8, 0 |
| ; GISEL-NEXT: v_and_b32_e32 v9, 0x800000, v1 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], 2, v[0:1] |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[8:9] |
| ; GISEL-NEXT: v_lshlrev_b64 v[8:9], 30, v[2:3] |
| ; GISEL-NEXT: v_lshrrev_b32_e32 v5, 2, v1 |
| ; GISEL-NEXT: v_or_b32_e32 v9, v5, v8 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; GISEL-NEXT: v_lshlrev_b64 v[2:3], 29, v[2:3] |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], 3, v[0:1] |
| ; GISEL-NEXT: v_lshrrev_b32_e32 v0, 3, v1 |
| ; GISEL-NEXT: v_or_b32_e32 v9, v0, v2 |
| ; GISEL-NEXT: v_mov_b32_e32 v6, v7 |
| ; GISEL-NEXT: ; %bb.12: ; %Flow |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: .LBB3_13: ; %Flow4 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; GISEL-NEXT: v_mov_b32_e32 v0, 0x3ff00000 |
| ; GISEL-NEXT: v_lshl_add_u32 v0, v6, 20, v0 |
| ; GISEL-NEXT: v_and_b32_e32 v1, 0xfffff, v9 |
| ; GISEL-NEXT: v_or3_b32 v5, v1, v0, 0 |
| ; GISEL-NEXT: .LBB3_14: ; %Flow5 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v4 |
| ; GISEL-NEXT: v_mov_b32_e32 v1, v5 |
| ; GISEL-NEXT: s_setpc_b64 s[30:31] |
| %cvt = uitofp i128 %x to double |
| ret double %cvt |
| } |
| |
| define half @sitofp_i128_to_f16(i128 %x) { |
| ; SDAG-LABEL: sitofp_i128_to_f16: |
| ; SDAG: ; %bb.0: ; %itofp-entry |
| ; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; SDAG-NEXT: v_or_b32_e32 v5, v1, v3 |
| ; SDAG-NEXT: v_or_b32_e32 v4, v0, v2 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; SDAG-NEXT: v_mov_b32_e32 v4, 0 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB4_14 |
| ; SDAG-NEXT: ; %bb.1: ; %itofp-if-end |
| ; SDAG-NEXT: v_sub_co_u32_e32 v4, vcc, 0, v0 |
| ; SDAG-NEXT: v_subb_co_u32_e32 v5, vcc, 0, v1, vcc |
| ; SDAG-NEXT: v_subb_co_u32_e32 v6, vcc, 0, v2, vcc |
| ; SDAG-NEXT: v_subb_co_u32_e32 v7, vcc, 0, v3, vcc |
| ; SDAG-NEXT: v_cmp_gt_i64_e32 vcc, 0, v[2:3] |
| ; SDAG-NEXT: ; implicit-def: $vgpr8 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v0, v0, v4, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v4, v2, v6, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v1, v1, v5, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v5, v3, v7, vcc |
| ; SDAG-NEXT: v_ffbh_u32_e32 v2, v4 |
| ; SDAG-NEXT: v_add_u32_e32 v2, 32, v2 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v6, v5 |
| ; SDAG-NEXT: v_min_u32_e32 v2, v2, v6 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v6, v0 |
| ; SDAG-NEXT: v_add_u32_e32 v6, 32, v6 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v7, v1 |
| ; SDAG-NEXT: v_min_u32_e32 v6, v6, v7 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; SDAG-NEXT: v_add_u32_e32 v6, 64, v6 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, v6, v2, vcc |
| ; SDAG-NEXT: v_sub_u32_e32 v6, 0x80, v7 |
| ; SDAG-NEXT: v_sub_u32_e32 v2, 0x7f, v7 |
| ; SDAG-NEXT: v_cmp_gt_i32_e32 vcc, 25, v6 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; SDAG-NEXT: ; %bb.2: ; %itofp-if-else |
| ; SDAG-NEXT: v_add_u32_e32 v4, 0xffffff98, v7 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v4, v[0:1] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v4 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, 0, v0, vcc |
| ; SDAG-NEXT: ; implicit-def: $vgpr6 |
| ; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1 |
| ; SDAG-NEXT: ; implicit-def: $vgpr7 |
| ; SDAG-NEXT: ; implicit-def: $vgpr4_vgpr5 |
| ; SDAG-NEXT: ; %bb.3: ; %Flow3 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB4_13 |
| ; SDAG-NEXT: ; %bb.4: ; %NodeBlock |
| ; SDAG-NEXT: v_cmp_lt_i32_e32 vcc, 25, v6 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB4_8 |
| ; SDAG-NEXT: ; %bb.5: ; %LeafBlock |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 26, v6 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB4_7 |
| ; SDAG-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; SDAG-NEXT: v_sub_u32_e32 v12, 0x66, v7 |
| ; SDAG-NEXT: v_sub_u32_e32 v10, 64, v12 |
| ; SDAG-NEXT: v_lshrrev_b64 v[8:9], v12, v[0:1] |
| ; SDAG-NEXT: v_lshlrev_b64 v[10:11], v10, v[4:5] |
| ; SDAG-NEXT: v_sub_u32_e32 v13, 38, v7 |
| ; SDAG-NEXT: v_or_b32_e32 v11, v9, v11 |
| ; SDAG-NEXT: v_or_b32_e32 v10, v8, v10 |
| ; SDAG-NEXT: v_lshrrev_b64 v[8:9], v13, v[4:5] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v12 |
| ; SDAG-NEXT: v_add_u32_e32 v14, 26, v7 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v9, v9, v11, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v12 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, v8, v10, vcc |
| ; SDAG-NEXT: v_lshrrev_b64 v[10:11], v13, v[0:1] |
| ; SDAG-NEXT: v_lshlrev_b64 v[12:13], v14, v[4:5] |
| ; SDAG-NEXT: v_subrev_u32_e32 v7, 38, v7 |
| ; SDAG-NEXT: v_cndmask_b32_e64 v15, v8, v0, s[4:5] |
| ; SDAG-NEXT: v_lshlrev_b64 v[7:8], v7, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v9, v9, v1, s[4:5] |
| ; SDAG-NEXT: v_or_b32_e32 v11, v13, v11 |
| ; SDAG-NEXT: v_or_b32_e32 v10, v12, v10 |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v14 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v14, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, v8, v11, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v14 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, v7, v10, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e64 v5, v8, v5, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v4, v7, v4, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v1, 0, v1, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v1, v1, v5 |
| ; SDAG-NEXT: v_or_b32_e32 v0, v0, v4 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v8, v15, v0 |
| ; SDAG-NEXT: v_mov_b32_e32 v0, v8 |
| ; SDAG-NEXT: v_mov_b32_e32 v1, v9 |
| ; SDAG-NEXT: .LBB4_7: ; %Flow1 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; SDAG-NEXT: .LBB4_8: ; %Flow2 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; SDAG-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], 1, v[0:1] |
| ; SDAG-NEXT: ; %bb.10: ; %itofp-sw-epilog |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: v_lshrrev_b32_e32 v4, 2, v0 |
| ; SDAG-NEXT: v_and_or_b32 v0, v4, 1, v0 |
| ; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 1, v0 |
| ; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc |
| ; SDAG-NEXT: v_and_b32_e32 v4, 0x4000000, v0 |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 0, v4 |
| ; SDAG-NEXT: v_alignbit_b32 v8, v1, v0, 2 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; SDAG-NEXT: v_alignbit_b32 v8, v1, v0, 3 |
| ; SDAG-NEXT: v_mov_b32_e32 v2, v6 |
| ; SDAG-NEXT: ; %bb.12: ; %Flow |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: .LBB4_13: ; %Flow4 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; SDAG-NEXT: v_and_b32_e32 v0, 0x80000000, v3 |
| ; SDAG-NEXT: v_lshl_add_u32 v1, v2, 23, 1.0 |
| ; SDAG-NEXT: v_and_b32_e32 v2, 0x7fffff, v8 |
| ; SDAG-NEXT: v_or3_b32 v0, v2, v0, v1 |
| ; SDAG-NEXT: v_cvt_f16_f32_e32 v4, v0 |
| ; SDAG-NEXT: .LBB4_14: ; %Flow5 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; SDAG-NEXT: v_mov_b32_e32 v0, v4 |
| ; SDAG-NEXT: s_setpc_b64 s[30:31] |
| ; |
| ; GISEL-LABEL: sitofp_i128_to_f16: |
| ; GISEL: ; %bb.0: ; %itofp-entry |
| ; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; GISEL-NEXT: v_or_b32_e32 v4, v0, v2 |
| ; GISEL-NEXT: v_or_b32_e32 v5, v1, v3 |
| ; GISEL-NEXT: s_mov_b32 s4, 0 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; GISEL-NEXT: v_mov_b32_e32 v4, s4 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB4_14 |
| ; GISEL-NEXT: ; %bb.1: ; %itofp-if-end |
| ; GISEL-NEXT: v_ashrrev_i32_e32 v6, 31, v3 |
| ; GISEL-NEXT: v_xor_b32_e32 v0, v6, v0 |
| ; GISEL-NEXT: v_xor_b32_e32 v1, v6, v1 |
| ; GISEL-NEXT: v_sub_co_u32_e32 v0, vcc, v0, v6 |
| ; GISEL-NEXT: v_xor_b32_e32 v2, v6, v2 |
| ; GISEL-NEXT: v_subb_co_u32_e32 v1, vcc, v1, v6, vcc |
| ; GISEL-NEXT: v_xor_b32_e32 v3, v6, v3 |
| ; GISEL-NEXT: v_subb_co_u32_e32 v2, vcc, v2, v6, vcc |
| ; GISEL-NEXT: v_ffbh_u32_e32 v5, v0 |
| ; GISEL-NEXT: v_subb_co_u32_e32 v3, vcc, v3, v6, vcc |
| ; GISEL-NEXT: v_ffbh_u32_e32 v4, v1 |
| ; GISEL-NEXT: v_add_u32_e32 v5, 32, v5 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v7, v2 |
| ; GISEL-NEXT: v_min_u32_e32 v4, v4, v5 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v5, v3 |
| ; GISEL-NEXT: v_add_u32_e32 v7, 32, v7 |
| ; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, 0, v[2:3] |
| ; GISEL-NEXT: v_add_u32_e32 v4, 64, v4 |
| ; GISEL-NEXT: v_min_u32_e32 v5, v5, v7 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v5, v5, v4, vcc |
| ; GISEL-NEXT: v_sub_u32_e32 v8, 0x80, v5 |
| ; GISEL-NEXT: v_sub_u32_e32 v7, 0x7f, v5 |
| ; GISEL-NEXT: v_cmp_ge_i32_e32 vcc, 24, v8 |
| ; GISEL-NEXT: ; implicit-def: $vgpr4 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; GISEL-NEXT: ; %bb.2: ; %itofp-if-else |
| ; GISEL-NEXT: v_add_u32_e32 v2, 0xffffff98, v5 |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], v2, v[0:1] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, 0, v0, vcc |
| ; GISEL-NEXT: ; implicit-def: $vgpr8 |
| ; GISEL-NEXT: ; implicit-def: $vgpr0 |
| ; GISEL-NEXT: ; implicit-def: $vgpr5 |
| ; GISEL-NEXT: ; implicit-def: $vgpr2 |
| ; GISEL-NEXT: ; %bb.3: ; %Flow3 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB4_13 |
| ; GISEL-NEXT: ; %bb.4: ; %NodeBlock |
| ; GISEL-NEXT: v_cmp_le_i32_e32 vcc, 26, v8 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB4_8 |
| ; GISEL-NEXT: ; %bb.5: ; %LeafBlock |
| ; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 26, v8 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB4_7 |
| ; GISEL-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; GISEL-NEXT: v_sub_u32_e32 v4, 0x66, v5 |
| ; GISEL-NEXT: v_sub_u32_e32 v11, 64, v4 |
| ; GISEL-NEXT: v_lshrrev_b64 v[9:10], v4, v[0:1] |
| ; GISEL-NEXT: v_lshlrev_b64 v[11:12], v11, v[2:3] |
| ; GISEL-NEXT: v_add_u32_e32 v13, 0xffffffc0, v4 |
| ; GISEL-NEXT: v_or_b32_e32 v11, v9, v11 |
| ; GISEL-NEXT: v_or_b32_e32 v12, v10, v12 |
| ; GISEL-NEXT: v_lshrrev_b64 v[9:10], v13, v[2:3] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v4 |
| ; GISEL-NEXT: v_add_u32_e32 v14, 26, v5 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, v9, v11, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v10, v10, v12, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e32 vcc, 0, v4 |
| ; GISEL-NEXT: v_sub_u32_e32 v11, 64, v14 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v13, v9, v0, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, v10, v1, vcc |
| ; GISEL-NEXT: v_lshrrev_b64 v[9:10], v14, -1 |
| ; GISEL-NEXT: v_lshlrev_b64 v[11:12], v11, -1 |
| ; GISEL-NEXT: v_add_u32_e32 v5, 0xffffffda, v5 |
| ; GISEL-NEXT: v_or_b32_e32 v15, v9, v11 |
| ; GISEL-NEXT: v_or_b32_e32 v16, v10, v12 |
| ; GISEL-NEXT: v_lshrrev_b64 v[11:12], v5, -1 |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v14 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v5, v11, v15, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v11, v12, v16, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v14 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, 0, v9, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v10, 0, v10, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e64 v5, v5, -1, s[4:5] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v11, v11, -1, s[4:5] |
| ; GISEL-NEXT: v_and_b32_e32 v2, v9, v2 |
| ; GISEL-NEXT: v_and_b32_e32 v3, v10, v3 |
| ; GISEL-NEXT: v_and_or_b32 v0, v5, v0, v2 |
| ; GISEL-NEXT: v_and_or_b32 v1, v11, v1, v3 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; GISEL-NEXT: v_or_b32_e32 v3, v13, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v3 |
| ; GISEL-NEXT: v_mov_b32_e32 v1, v4 |
| ; GISEL-NEXT: v_mov_b32_e32 v2, v5 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, v6 |
| ; GISEL-NEXT: .LBB4_7: ; %Flow1 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; GISEL-NEXT: .LBB4_8: ; %Flow2 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; GISEL-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], 1, v[0:1] |
| ; GISEL-NEXT: ; %bb.10: ; %itofp-sw-epilog |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: v_bfe_u32 v2, v0, 2, 1 |
| ; GISEL-NEXT: v_or_b32_e32 v0, v0, v2 |
| ; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 1, v0 |
| ; GISEL-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc |
| ; GISEL-NEXT: v_and_b32_e32 v2, 0x4000000, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, 0 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], 2, v[0:1] |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[2:3] |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], 3, v[0:1] |
| ; GISEL-NEXT: v_mov_b32_e32 v7, v8 |
| ; GISEL-NEXT: ; %bb.12: ; %Flow |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: .LBB4_13: ; %Flow4 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; GISEL-NEXT: v_and_b32_e32 v0, 0x80000000, v6 |
| ; GISEL-NEXT: v_lshl_add_u32 v1, v7, 23, 1.0 |
| ; GISEL-NEXT: v_and_b32_e32 v2, 0x7fffff, v4 |
| ; GISEL-NEXT: v_or3_b32 v0, v2, v0, v1 |
| ; GISEL-NEXT: v_cvt_f16_f32_e32 v4, v0 |
| ; GISEL-NEXT: .LBB4_14: ; %Flow5 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v4 |
| ; GISEL-NEXT: s_setpc_b64 s[30:31] |
| %cvt = sitofp i128 %x to half |
| ret half %cvt |
| } |
| |
| define half @uitofp_i128_to_f16(i128 %x) { |
| ; SDAG-LABEL: uitofp_i128_to_f16: |
| ; SDAG: ; %bb.0: ; %itofp-entry |
| ; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; SDAG-NEXT: v_or_b32_e32 v5, v1, v3 |
| ; SDAG-NEXT: v_or_b32_e32 v4, v0, v2 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; SDAG-NEXT: v_mov_b32_e32 v4, 0 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB5_14 |
| ; SDAG-NEXT: ; %bb.1: ; %itofp-if-end |
| ; SDAG-NEXT: v_ffbh_u32_e32 v4, v2 |
| ; SDAG-NEXT: v_add_u32_e32 v4, 32, v4 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v5, v3 |
| ; SDAG-NEXT: v_min_u32_e32 v4, v4, v5 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v5, v0 |
| ; SDAG-NEXT: v_add_u32_e32 v5, 32, v5 |
| ; SDAG-NEXT: v_ffbh_u32_e32 v6, v1 |
| ; SDAG-NEXT: v_min_u32_e32 v5, v5, v6 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[2:3] |
| ; SDAG-NEXT: v_add_u32_e32 v5, 64, v5 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v6, v5, v4, vcc |
| ; SDAG-NEXT: v_sub_u32_e32 v5, 0x80, v6 |
| ; SDAG-NEXT: v_sub_u32_e32 v4, 0x7f, v6 |
| ; SDAG-NEXT: v_cmp_gt_i32_e32 vcc, 25, v5 |
| ; SDAG-NEXT: ; implicit-def: $vgpr7 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; SDAG-NEXT: ; %bb.2: ; %itofp-if-else |
| ; SDAG-NEXT: v_add_u32_e32 v2, 0xffffff98, v6 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v2, v[0:1] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, 0, v0, vcc |
| ; SDAG-NEXT: ; implicit-def: $vgpr5 |
| ; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1 |
| ; SDAG-NEXT: ; implicit-def: $vgpr6 |
| ; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3 |
| ; SDAG-NEXT: ; %bb.3: ; %Flow3 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB5_13 |
| ; SDAG-NEXT: ; %bb.4: ; %NodeBlock |
| ; SDAG-NEXT: v_cmp_lt_i32_e32 vcc, 25, v5 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; SDAG-NEXT: s_cbranch_execz .LBB5_8 |
| ; SDAG-NEXT: ; %bb.5: ; %LeafBlock |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 26, v5 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; SDAG-NEXT: s_cbranch_execz .LBB5_7 |
| ; SDAG-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; SDAG-NEXT: v_sub_u32_e32 v11, 0x66, v6 |
| ; SDAG-NEXT: v_sub_u32_e32 v9, 64, v11 |
| ; SDAG-NEXT: v_lshrrev_b64 v[7:8], v11, v[0:1] |
| ; SDAG-NEXT: v_lshlrev_b64 v[9:10], v9, v[2:3] |
| ; SDAG-NEXT: v_sub_u32_e32 v12, 38, v6 |
| ; SDAG-NEXT: v_or_b32_e32 v10, v8, v10 |
| ; SDAG-NEXT: v_or_b32_e32 v9, v7, v9 |
| ; SDAG-NEXT: v_lshrrev_b64 v[7:8], v12, v[2:3] |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v11 |
| ; SDAG-NEXT: v_add_u32_e32 v13, 26, v6 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v8, v8, v10, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v11 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, v7, v9, vcc |
| ; SDAG-NEXT: v_lshrrev_b64 v[9:10], v12, v[0:1] |
| ; SDAG-NEXT: v_lshlrev_b64 v[11:12], v13, v[2:3] |
| ; SDAG-NEXT: v_subrev_u32_e32 v6, 38, v6 |
| ; SDAG-NEXT: v_cndmask_b32_e64 v14, v7, v0, s[4:5] |
| ; SDAG-NEXT: v_lshlrev_b64 v[6:7], v6, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v8, v8, v1, s[4:5] |
| ; SDAG-NEXT: v_or_b32_e32 v10, v12, v10 |
| ; SDAG-NEXT: v_or_b32_e32 v9, v11, v9 |
| ; SDAG-NEXT: v_cmp_gt_u32_e32 vcc, 64, v13 |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], v13, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v7, v7, v10, vcc |
| ; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v13 |
| ; SDAG-NEXT: v_cndmask_b32_e32 v6, v6, v9, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e64 v3, v7, v3, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v2, v6, v2, s[4:5] |
| ; SDAG-NEXT: v_cndmask_b32_e32 v1, 0, v1, vcc |
| ; SDAG-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v1, v1, v3 |
| ; SDAG-NEXT: v_or_b32_e32 v0, v0, v2 |
| ; SDAG-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; SDAG-NEXT: v_or_b32_e32 v7, v14, v0 |
| ; SDAG-NEXT: v_mov_b32_e32 v0, v7 |
| ; SDAG-NEXT: v_mov_b32_e32 v1, v8 |
| ; SDAG-NEXT: .LBB5_7: ; %Flow1 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; SDAG-NEXT: .LBB5_8: ; %Flow2 |
| ; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; SDAG-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; SDAG-NEXT: v_lshlrev_b64 v[0:1], 1, v[0:1] |
| ; SDAG-NEXT: ; %bb.10: ; %itofp-sw-epilog |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: v_lshrrev_b32_e32 v2, 2, v0 |
| ; SDAG-NEXT: v_and_or_b32 v0, v2, 1, v0 |
| ; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 1, v0 |
| ; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc |
| ; SDAG-NEXT: v_and_b32_e32 v2, 0x4000000, v0 |
| ; SDAG-NEXT: v_cmp_ne_u32_e32 vcc, 0, v2 |
| ; SDAG-NEXT: v_alignbit_b32 v7, v1, v0, 2 |
| ; SDAG-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; SDAG-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; SDAG-NEXT: v_alignbit_b32 v7, v1, v0, 3 |
| ; SDAG-NEXT: v_mov_b32_e32 v4, v5 |
| ; SDAG-NEXT: ; %bb.12: ; %Flow |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; SDAG-NEXT: .LBB5_13: ; %Flow4 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; SDAG-NEXT: v_and_b32_e32 v0, 0x7fffff, v7 |
| ; SDAG-NEXT: v_lshl_or_b32 v0, v4, 23, v0 |
| ; SDAG-NEXT: v_add_u32_e32 v0, 1.0, v0 |
| ; SDAG-NEXT: v_cvt_f16_f32_e32 v4, v0 |
| ; SDAG-NEXT: .LBB5_14: ; %Flow5 |
| ; SDAG-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; SDAG-NEXT: v_mov_b32_e32 v0, v4 |
| ; SDAG-NEXT: s_setpc_b64 s[30:31] |
| ; |
| ; GISEL-LABEL: uitofp_i128_to_f16: |
| ; GISEL: ; %bb.0: ; %itofp-entry |
| ; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| ; GISEL-NEXT: v_or_b32_e32 v4, v0, v2 |
| ; GISEL-NEXT: v_or_b32_e32 v5, v1, v3 |
| ; GISEL-NEXT: s_mov_b32 s4, 0 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[4:5] |
| ; GISEL-NEXT: v_mov_b32_e32 v4, s4 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB5_14 |
| ; GISEL-NEXT: ; %bb.1: ; %itofp-if-end |
| ; GISEL-NEXT: v_ffbh_u32_e32 v5, v0 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v4, v1 |
| ; GISEL-NEXT: v_add_u32_e32 v5, 32, v5 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v6, v2 |
| ; GISEL-NEXT: v_min_u32_e32 v4, v4, v5 |
| ; GISEL-NEXT: v_ffbh_u32_e32 v5, v3 |
| ; GISEL-NEXT: v_add_u32_e32 v6, 32, v6 |
| ; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, 0, v[2:3] |
| ; GISEL-NEXT: v_add_u32_e32 v4, 64, v4 |
| ; GISEL-NEXT: v_min_u32_e32 v5, v5, v6 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v5, v5, v4, vcc |
| ; GISEL-NEXT: v_sub_u32_e32 v7, 0x80, v5 |
| ; GISEL-NEXT: v_sub_u32_e32 v6, 0x7f, v5 |
| ; GISEL-NEXT: v_cmp_ge_i32_e32 vcc, 24, v7 |
| ; GISEL-NEXT: ; implicit-def: $vgpr4 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[4:5], exec, s[4:5] |
| ; GISEL-NEXT: ; %bb.2: ; %itofp-if-else |
| ; GISEL-NEXT: v_add_u32_e32 v2, 0xffffff98, v5 |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], v2, v[0:1] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, 0, v0, vcc |
| ; GISEL-NEXT: ; implicit-def: $vgpr7 |
| ; GISEL-NEXT: ; implicit-def: $vgpr0 |
| ; GISEL-NEXT: ; implicit-def: $vgpr5 |
| ; GISEL-NEXT: ; implicit-def: $vgpr2 |
| ; GISEL-NEXT: ; %bb.3: ; %Flow3 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[8:9], s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB5_13 |
| ; GISEL-NEXT: ; %bb.4: ; %NodeBlock |
| ; GISEL-NEXT: v_cmp_le_i32_e32 vcc, 26, v7 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: s_xor_b64 s[10:11], exec, s[4:5] |
| ; GISEL-NEXT: s_cbranch_execz .LBB5_8 |
| ; GISEL-NEXT: ; %bb.5: ; %LeafBlock |
| ; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 26, v7 |
| ; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc |
| ; GISEL-NEXT: s_cbranch_execz .LBB5_7 |
| ; GISEL-NEXT: ; %bb.6: ; %itofp-sw-default |
| ; GISEL-NEXT: v_sub_u32_e32 v4, 0x66, v5 |
| ; GISEL-NEXT: v_sub_u32_e32 v10, 64, v4 |
| ; GISEL-NEXT: v_lshrrev_b64 v[8:9], v4, v[0:1] |
| ; GISEL-NEXT: v_lshlrev_b64 v[10:11], v10, v[2:3] |
| ; GISEL-NEXT: v_add_u32_e32 v12, 0xffffffc0, v4 |
| ; GISEL-NEXT: v_or_b32_e32 v10, v8, v10 |
| ; GISEL-NEXT: v_or_b32_e32 v11, v9, v11 |
| ; GISEL-NEXT: v_lshrrev_b64 v[8:9], v12, v[2:3] |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v4 |
| ; GISEL-NEXT: v_add_u32_e32 v13, 26, v5 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v8, v8, v10, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, v9, v11, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e32 vcc, 0, v4 |
| ; GISEL-NEXT: v_sub_u32_e32 v10, 64, v13 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v12, v8, v0, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v4, v9, v1, vcc |
| ; GISEL-NEXT: v_lshrrev_b64 v[8:9], v13, -1 |
| ; GISEL-NEXT: v_lshlrev_b64 v[10:11], v10, -1 |
| ; GISEL-NEXT: v_add_u32_e32 v5, 0xffffffda, v5 |
| ; GISEL-NEXT: v_or_b32_e32 v14, v8, v10 |
| ; GISEL-NEXT: v_or_b32_e32 v15, v9, v11 |
| ; GISEL-NEXT: v_lshrrev_b64 v[10:11], v5, -1 |
| ; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v13 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v5, v10, v14, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v10, v11, v15, vcc |
| ; GISEL-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v13 |
| ; GISEL-NEXT: v_cndmask_b32_e32 v8, 0, v8, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e32 v9, 0, v9, vcc |
| ; GISEL-NEXT: v_cndmask_b32_e64 v5, v5, -1, s[4:5] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v10, v10, -1, s[4:5] |
| ; GISEL-NEXT: v_and_b32_e32 v2, v8, v2 |
| ; GISEL-NEXT: v_and_b32_e32 v3, v9, v3 |
| ; GISEL-NEXT: v_and_or_b32 v0, v5, v0, v2 |
| ; GISEL-NEXT: v_and_or_b32 v1, v10, v1, v3 |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[0:1] |
| ; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc |
| ; GISEL-NEXT: v_or_b32_e32 v3, v12, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v3 |
| ; GISEL-NEXT: v_mov_b32_e32 v1, v4 |
| ; GISEL-NEXT: v_mov_b32_e32 v2, v5 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, v6 |
| ; GISEL-NEXT: .LBB5_7: ; %Flow1 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[12:13] |
| ; GISEL-NEXT: .LBB5_8: ; %Flow2 |
| ; GISEL-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11] |
| ; GISEL-NEXT: ; %bb.9: ; %itofp-sw-bb |
| ; GISEL-NEXT: v_lshlrev_b64 v[0:1], 1, v[0:1] |
| ; GISEL-NEXT: ; %bb.10: ; %itofp-sw-epilog |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: v_bfe_u32 v2, v0, 2, 1 |
| ; GISEL-NEXT: v_or_b32_e32 v0, v0, v2 |
| ; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 1, v0 |
| ; GISEL-NEXT: v_addc_co_u32_e32 v1, vcc, 0, v1, vcc |
| ; GISEL-NEXT: v_and_b32_e32 v2, 0x4000000, v0 |
| ; GISEL-NEXT: v_mov_b32_e32 v3, 0 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], 2, v[0:1] |
| ; GISEL-NEXT: v_cmp_ne_u64_e32 vcc, 0, v[2:3] |
| ; GISEL-NEXT: s_and_saveexec_b64 s[4:5], vcc |
| ; GISEL-NEXT: ; %bb.11: ; %itofp-if-then20 |
| ; GISEL-NEXT: v_lshrrev_b64 v[4:5], 3, v[0:1] |
| ; GISEL-NEXT: v_mov_b32_e32 v6, v7 |
| ; GISEL-NEXT: ; %bb.12: ; %Flow |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[4:5] |
| ; GISEL-NEXT: .LBB5_13: ; %Flow4 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[8:9] |
| ; GISEL-NEXT: v_lshl_add_u32 v0, v6, 23, 1.0 |
| ; GISEL-NEXT: v_mov_b32_e32 v1, 0x7fffff |
| ; GISEL-NEXT: v_and_or_b32 v0, v4, v1, v0 |
| ; GISEL-NEXT: v_cvt_f16_f32_e32 v4, v0 |
| ; GISEL-NEXT: .LBB5_14: ; %Flow5 |
| ; GISEL-NEXT: s_or_b64 exec, exec, s[6:7] |
| ; GISEL-NEXT: v_mov_b32_e32 v0, v4 |
| ; GISEL-NEXT: s_setpc_b64 s[30:31] |
| %cvt = uitofp i128 %x to half |
| ret half %cvt |
| } |
| |
| ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line: |
| ; GCN: {{.*}} |