|  | ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py | 
|  | ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx | FileCheck %s --check-prefixes=X86,X86-AVX,X86-AVX1 | 
|  | ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=X86,X86-AVX,X86-AVX2 | 
|  | ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512vl | FileCheck %s --check-prefixes=X86,X86-AVX512 | 
|  | ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512bw,+avx512vl | FileCheck %s --check-prefixes=X86,X86-AVX512 | 
|  | ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512dq,+avx512vl | FileCheck %s --check-prefixes=X86,X86-AVX512 | 
|  | ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX1 | 
|  | ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX2 | 
|  | ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512vl | FileCheck %s --check-prefixes=X64,X64-AVX512 | 
|  | ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw,+avx512vl | FileCheck %s --check-prefixes=X64,X64-AVX512 | 
|  | ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512dq,+avx512vl | FileCheck %s --check-prefixes=X64,X64-AVX512 | 
|  |  | 
|  | ; | 
|  | ; Subvector Load + Broadcast | 
|  | ; | 
|  |  | 
|  | define <4 x double> @test_broadcast_2f64_4f64(ptr%p) nounwind { | 
|  | ; X86-LABEL: test_broadcast_2f64_4f64: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: test_broadcast_2f64_4f64: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-NEXT:    retq | 
|  | %1 = load <2 x double>, ptr%p | 
|  | %2 = shufflevector <2 x double> %1, <2 x double> undef, <4 x i32> <i32 0, i32 1, i32 0, i32 1> | 
|  | ret <4 x double> %2 | 
|  | } | 
|  |  | 
|  | define <8 x double> @test_broadcast_2f64_8f64(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_2f64_8f64: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_2f64_8f64: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcastf32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_2f64_8f64: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_2f64_8f64: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcastf32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <2 x double>, ptr%p | 
|  | %2 = shufflevector <2 x double> %1, <2 x double> undef, <8 x i32> <i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1> | 
|  | ret <8 x double> %2 | 
|  | } | 
|  |  | 
|  | define <8 x double> @test_broadcast_4f64_8f64(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_4f64_8f64: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vmovaps (%eax), %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_4f64_8f64: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcastf64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_4f64_8f64: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps (%rdi), %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_4f64_8f64: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcastf64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <4 x double>, ptr%p | 
|  | %2 = shufflevector <4 x double> %1, <4 x double> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x double> %2 | 
|  | } | 
|  |  | 
|  | define <4 x i64> @test_broadcast_2i64_4i64(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_2i64_4i64: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_2i64_4i64: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_2i64_4i64: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_2i64_4i64: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <2 x i64>, ptr%p | 
|  | %2 = shufflevector <2 x i64> %1, <2 x i64> undef, <4 x i32> <i32 0, i32 1, i32 0, i32 1> | 
|  | ret <4 x i64> %2 | 
|  | } | 
|  |  | 
|  | define <8 x i64> @test_broadcast_2i64_8i64(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_2i64_8i64: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_2i64_8i64: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_2i64_8i64: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_2i64_8i64: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <2 x i64>, ptr%p | 
|  | %2 = shufflevector <2 x i64> %1, <2 x i64> undef, <8 x i32> <i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1> | 
|  | ret <8 x i64> %2 | 
|  | } | 
|  |  | 
|  | define <8 x i64> @test_broadcast_4i64_8i64(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_4i64_8i64: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vmovaps (%eax), %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_4i64_8i64: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_4i64_8i64: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps (%rdi), %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_4i64_8i64: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <4 x i64>, ptr%p | 
|  | %2 = shufflevector <4 x i64> %1, <4 x i64> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x i64> %2 | 
|  | } | 
|  |  | 
|  | define <8 x float> @test_broadcast_4f32_8f32(ptr%p) nounwind { | 
|  | ; X86-LABEL: test_broadcast_4f32_8f32: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: test_broadcast_4f32_8f32: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-NEXT:    retq | 
|  | %1 = load <4 x float>, ptr%p | 
|  | %2 = shufflevector <4 x float> %1, <4 x float> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x float> %2 | 
|  | } | 
|  |  | 
|  | define <16 x float> @test_broadcast_4f32_16f32(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_4f32_16f32: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_4f32_16f32: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcastf32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_4f32_16f32: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_4f32_16f32: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcastf32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <4 x float>, ptr%p | 
|  | %2 = shufflevector <4 x float> %1, <4 x float> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <16 x float> %2 | 
|  | } | 
|  |  | 
|  | define <16 x float> @test_broadcast_8f32_16f32(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_8f32_16f32: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vmovaps (%eax), %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_8f32_16f32: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcastf64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_8f32_16f32: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps (%rdi), %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_8f32_16f32: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcastf64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <8 x float>, ptr%p | 
|  | %2 = shufflevector <8 x float> %1, <8 x float> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7> | 
|  | ret <16 x float> %2 | 
|  | } | 
|  |  | 
|  | define <8 x i32> @test_broadcast_4i32_8i32(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_4i32_8i32: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_4i32_8i32: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_4i32_8i32: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_4i32_8i32: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <4 x i32>, ptr%p | 
|  | %2 = shufflevector <4 x i32> %1, <4 x i32> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x i32> %2 | 
|  | } | 
|  |  | 
|  | define <16 x i32> @test_broadcast_4i32_16i32(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_4i32_16i32: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_4i32_16i32: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_4i32_16i32: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_4i32_16i32: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <4 x i32>, ptr%p | 
|  | %2 = shufflevector <4 x i32> %1, <4 x i32> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <16 x i32> %2 | 
|  | } | 
|  |  | 
|  | define <16 x i32> @test_broadcast_8i32_16i32(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_8i32_16i32: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vmovaps (%eax), %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_8i32_16i32: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_8i32_16i32: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps (%rdi), %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_8i32_16i32: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <8 x i32>, ptr%p | 
|  | %2 = shufflevector <8 x i32> %1, <8 x i32> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7> | 
|  | ret <16 x i32> %2 | 
|  | } | 
|  |  | 
|  | define <16 x i16> @test_broadcast_8i16_16i16(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_8i16_16i16: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_8i16_16i16: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_8i16_16i16: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_8i16_16i16: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <8 x i16>, ptr%p | 
|  | %2 = shufflevector <8 x i16> %1, <8 x i16> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7> | 
|  | ret <16 x i16> %2 | 
|  | } | 
|  |  | 
|  | define <32 x i16> @test_broadcast_8i16_32i16(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_8i16_32i16: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_8i16_32i16: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_8i16_32i16: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_8i16_32i16: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <8 x i16>, ptr%p | 
|  | %2 = shufflevector <8 x i16> %1, <8 x i16> undef, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7> | 
|  | ret <32 x i16> %2 | 
|  | } | 
|  |  | 
|  | define <32 x i16> @test_broadcast_16i16_32i16(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_16i16_32i16: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vmovaps (%eax), %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_16i16_32i16: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_16i16_32i16: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps (%rdi), %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_16i16_32i16: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <16 x i16>, ptr%p | 
|  | %2 = shufflevector <16 x i16> %1, <16 x i16> undef, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15> | 
|  | ret <32 x i16> %2 | 
|  | } | 
|  |  | 
|  | define <32 x i8> @test_broadcast_16i8_32i8(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_16i8_32i8: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_16i8_32i8: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_16i8_32i8: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_16i8_32i8: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <16 x i8>, ptr%p | 
|  | %2 = shufflevector <16 x i8> %1, <16 x i8> undef, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15> | 
|  | ret <32 x i8> %2 | 
|  | } | 
|  |  | 
|  | define <64 x i8> @test_broadcast_16i8_64i8(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_16i8_64i8: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_16i8_64i8: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_16i8_64i8: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_16i8_64i8: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <16 x i8>, ptr%p | 
|  | %2 = shufflevector <16 x i8> %1, <16 x i8> undef, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15> | 
|  | ret <64 x i8> %2 | 
|  | } | 
|  |  | 
|  | define <64 x i8> @test_broadcast_32i8_64i8(ptr%p) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_32i8_64i8: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vmovaps (%eax), %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_32i8_64i8: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_32i8_64i8: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps (%rdi), %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_32i8_64i8: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <32 x i8>, ptr%p | 
|  | %2 = shufflevector <32 x i8> %1, <32 x i8> undef, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31> | 
|  | ret <64 x i8> %2 | 
|  | } | 
|  |  | 
|  | ; | 
|  | ; Subvector Load + Broadcast + Store | 
|  | ; | 
|  |  | 
|  | define <4 x double> @test_broadcast_2f64_4f64_reuse(ptr %p0, ptr %p1) { | 
|  | ; X86-AVX-LABEL: test_broadcast_2f64_4f64_reuse: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %xmm0, (%eax) | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_2f64_4f64_reuse: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX512-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    vmovdqa %xmm0, (%eax) | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_2f64_4f64_reuse: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %xmm0, (%rsi) | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_2f64_4f64_reuse: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    vmovdqa %xmm0, (%rsi) | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <2 x double>, ptr %p0 | 
|  | store <2 x double> %1, ptr %p1 | 
|  | %2 = shufflevector <2 x double> %1, <2 x double> undef, <4 x i32> <i32 0, i32 1, i32 0, i32 1> | 
|  | ret <4 x double> %2 | 
|  | } | 
|  |  | 
|  | define <4 x i64> @test_broadcast_2i64_4i64_reuse(ptr %p0, ptr %p1) { | 
|  | ; X86-AVX-LABEL: test_broadcast_2i64_4i64_reuse: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %xmm0, (%eax) | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_2i64_4i64_reuse: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    vmovdqa %xmm0, (%eax) | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_2i64_4i64_reuse: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %xmm0, (%rsi) | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_2i64_4i64_reuse: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    vmovdqa %xmm0, (%rsi) | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <2 x i64>, ptr %p0 | 
|  | store <2 x i64> %1, ptr %p1 | 
|  | %2 = shufflevector <2 x i64> %1, <2 x i64> undef, <4 x i32> <i32 0, i32 1, i32 0, i32 1> | 
|  | ret <4 x i64> %2 | 
|  | } | 
|  |  | 
|  | define <8 x float> @test_broadcast_4f32_8f32_reuse(ptr %p0, ptr %p1) { | 
|  | ; X86-AVX-LABEL: test_broadcast_4f32_8f32_reuse: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %xmm0, (%eax) | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_4f32_8f32_reuse: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX512-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    vmovdqa %xmm0, (%eax) | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_4f32_8f32_reuse: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %xmm0, (%rsi) | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_4f32_8f32_reuse: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    vmovdqa %xmm0, (%rsi) | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <4 x float>, ptr %p0 | 
|  | store <4 x float> %1, ptr %p1 | 
|  | %2 = shufflevector <4 x float> %1, <4 x float> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x float> %2 | 
|  | } | 
|  |  | 
|  | define <8 x i32> @test_broadcast_4i32_8i32_reuse(ptr %p0, ptr %p1) { | 
|  | ; X86-AVX-LABEL: test_broadcast_4i32_8i32_reuse: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %xmm0, (%eax) | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_4i32_8i32_reuse: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    vmovdqa %xmm0, (%eax) | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_4i32_8i32_reuse: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %xmm0, (%rsi) | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_4i32_8i32_reuse: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    vmovdqa %xmm0, (%rsi) | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <4 x i32>, ptr %p0 | 
|  | store <4 x i32> %1, ptr %p1 | 
|  | %2 = shufflevector <4 x i32> %1, <4 x i32> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x i32> %2 | 
|  | } | 
|  |  | 
|  | define <16 x i16> @test_broadcast_8i16_16i16_reuse(ptr%p0, ptr%p1) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_8i16_16i16_reuse: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %xmm0, (%eax) | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_8i16_16i16_reuse: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    vmovdqa %xmm0, (%eax) | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_8i16_16i16_reuse: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %xmm0, (%rsi) | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_8i16_16i16_reuse: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    vmovdqa %xmm0, (%rsi) | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <8 x i16>, ptr%p0 | 
|  | store <8 x i16> %1, ptr %p1 | 
|  | %2 = shufflevector <8 x i16> %1, <8 x i16> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7> | 
|  | ret <16 x i16> %2 | 
|  | } | 
|  |  | 
|  | define <32 x i8> @test_broadcast_16i8_32i8_reuse(ptr%p0, ptr%p1) nounwind { | 
|  | ; X86-AVX-LABEL: test_broadcast_16i8_32i8_reuse: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %xmm0, (%eax) | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_16i8_32i8_reuse: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    vmovdqa %xmm0, (%eax) | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_16i8_32i8_reuse: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %xmm0, (%rsi) | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_16i8_32i8_reuse: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    vmovdqa %xmm0, (%rsi) | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <16 x i8>, ptr%p0 | 
|  | store <16 x i8> %1, ptr %p1 | 
|  | %2 = shufflevector <16 x i8> %1, <16 x i8> undef, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15> | 
|  | ret <32 x i8> %2 | 
|  | } | 
|  |  | 
|  | ; | 
|  | ; Subvector Load + Broadcast with Separate Store | 
|  | ; | 
|  |  | 
|  | define <8 x i32> @test_broadcast_4i32_8i32_chain(ptr %p0, ptr %p1) { | 
|  | ; X86-AVX-LABEL: test_broadcast_4i32_8i32_chain: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %xmm1, (%eax) | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_4i32_8i32_chain: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX512-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X86-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    vmovaps %xmm1, (%eax) | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_4i32_8i32_chain: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %xmm1, (%rsi) | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_4i32_8i32_chain: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X64-AVX512-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    vmovaps %xmm1, (%rsi) | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <4 x i32>, ptr %p0 | 
|  | store <4 x float> zeroinitializer, ptr %p1 | 
|  | %2 = shufflevector <4 x i32> %1, <4 x i32> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x i32> %2 | 
|  | } | 
|  |  | 
|  | define <16 x i32> @test_broadcast_4i32_16i32_chain(ptr %p0, ptr %p1) { | 
|  | ; X86-AVX-LABEL: test_broadcast_4i32_16i32_chain: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %xmm1, (%eax) | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_broadcast_4i32_16i32_chain: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %ecx | 
|  | ; X86-AVX512-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X86-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    vmovaps %xmm1, (%eax) | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_broadcast_4i32_16i32_chain: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %xmm1, (%rsi) | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_broadcast_4i32_16i32_chain: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X64-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    vmovaps %xmm1, (%rsi) | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = load <4 x i32>, ptr %p0 | 
|  | store <4 x float> zeroinitializer, ptr %p1 | 
|  | %2 = shufflevector <4 x i32> %1, <4 x i32> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <16 x i32> %2 | 
|  | } | 
|  |  | 
|  | ; | 
|  | ; subvector Load with multiple uses + broadcast | 
|  | ; Fallback to the broadcast should be done | 
|  | ; | 
|  |  | 
|  | @ga4 = dso_local global <4 x i64> zeroinitializer, align 8 | 
|  | @gb4 = dso_local global <8 x i64> zeroinitializer, align 8 | 
|  |  | 
|  | define dso_local void @fallback_broadcast_v4i64_to_v8i64(<4 x i64> %a, <8 x i64> %b) { | 
|  | ; X86-AVX1-LABEL: fallback_broadcast_v4i64_to_v8i64: | 
|  | ; X86-AVX1:       # %bb.0: # %entry | 
|  | ; X86-AVX1-NEXT:    vmovdqa {{.*#+}} ymm3 = [1,0,2,0,3,0,4,0] | 
|  | ; X86-AVX1-NEXT:    vpaddq %xmm3, %xmm0, %xmm4 | 
|  | ; X86-AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | ; X86-AVX1-NEXT:    vpmovsxbq {{.*#+}} xmm5 = [3,4] | 
|  | ; X86-AVX1-NEXT:    vpaddq %xmm5, %xmm0, %xmm0 | 
|  | ; X86-AVX1-NEXT:    vextractf128 $1, %ymm2, %xmm6 | 
|  | ; X86-AVX1-NEXT:    vpaddq %xmm5, %xmm6, %xmm6 | 
|  | ; X86-AVX1-NEXT:    vpaddq %xmm3, %xmm2, %xmm2 | 
|  | ; X86-AVX1-NEXT:    vinsertf128 $1, %xmm6, %ymm2, %ymm2 | 
|  | ; X86-AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm6 | 
|  | ; X86-AVX1-NEXT:    vpaddq %xmm5, %xmm6, %xmm5 | 
|  | ; X86-AVX1-NEXT:    vpaddq %xmm3, %xmm1, %xmm1 | 
|  | ; X86-AVX1-NEXT:    vinsertf128 $1, %xmm5, %ymm1, %ymm1 | 
|  | ; X86-AVX1-NEXT:    vandps %ymm3, %ymm1, %ymm1 | 
|  | ; X86-AVX1-NEXT:    vandps %ymm3, %ymm2, %ymm2 | 
|  | ; X86-AVX1-NEXT:    vmovdqu %xmm0, ga4+16 | 
|  | ; X86-AVX1-NEXT:    vmovdqu %xmm4, ga4 | 
|  | ; X86-AVX1-NEXT:    vmovups %ymm2, gb4+32 | 
|  | ; X86-AVX1-NEXT:    vmovups %ymm1, gb4 | 
|  | ; X86-AVX1-NEXT:    vzeroupper | 
|  | ; X86-AVX1-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX2-LABEL: fallback_broadcast_v4i64_to_v8i64: | 
|  | ; X86-AVX2:       # %bb.0: # %entry | 
|  | ; X86-AVX2-NEXT:    vpmovsxbq {{.*#+}} ymm3 = [1,2,3,4] | 
|  | ; X86-AVX2-NEXT:    vpaddq %ymm3, %ymm0, %ymm0 | 
|  | ; X86-AVX2-NEXT:    vpaddq %ymm3, %ymm2, %ymm2 | 
|  | ; X86-AVX2-NEXT:    vpaddq %ymm3, %ymm1, %ymm1 | 
|  | ; X86-AVX2-NEXT:    vpand %ymm3, %ymm1, %ymm1 | 
|  | ; X86-AVX2-NEXT:    vpand %ymm3, %ymm2, %ymm2 | 
|  | ; X86-AVX2-NEXT:    vmovdqu %ymm0, ga4 | 
|  | ; X86-AVX2-NEXT:    vmovdqu %ymm2, gb4+32 | 
|  | ; X86-AVX2-NEXT:    vmovdqu %ymm1, gb4 | 
|  | ; X86-AVX2-NEXT:    vzeroupper | 
|  | ; X86-AVX2-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: fallback_broadcast_v4i64_to_v8i64: | 
|  | ; X86-AVX512:       # %bb.0: # %entry | 
|  | ; X86-AVX512-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [1,0,2,0,3,0,4,0,1,0,2,0,3,0,4,0] | 
|  | ; X86-AVX512-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    vpaddq %ymm2, %ymm0, %ymm0 | 
|  | ; X86-AVX512-NEXT:    vpaddq %zmm2, %zmm1, %zmm1 | 
|  | ; X86-AVX512-NEXT:    vpandq %zmm2, %zmm1, %zmm1 | 
|  | ; X86-AVX512-NEXT:    vmovdqu %ymm0, ga4 | 
|  | ; X86-AVX512-NEXT:    vmovdqu64 %zmm1, gb4 | 
|  | ; X86-AVX512-NEXT:    vzeroupper | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX1-LABEL: fallback_broadcast_v4i64_to_v8i64: | 
|  | ; X64-AVX1:       # %bb.0: # %entry | 
|  | ; X64-AVX1-NEXT:    vmovdqa {{.*#+}} ymm3 = [1,2,3,4] | 
|  | ; X64-AVX1-NEXT:    vpaddq %xmm3, %xmm0, %xmm4 | 
|  | ; X64-AVX1-NEXT:    vextractf128 $1, %ymm0, %xmm0 | 
|  | ; X64-AVX1-NEXT:    vpmovsxbq {{.*#+}} xmm5 = [3,4] | 
|  | ; X64-AVX1-NEXT:    vpaddq %xmm5, %xmm0, %xmm0 | 
|  | ; X64-AVX1-NEXT:    vextractf128 $1, %ymm2, %xmm6 | 
|  | ; X64-AVX1-NEXT:    vpaddq %xmm5, %xmm6, %xmm6 | 
|  | ; X64-AVX1-NEXT:    vpaddq %xmm3, %xmm2, %xmm2 | 
|  | ; X64-AVX1-NEXT:    vinsertf128 $1, %xmm6, %ymm2, %ymm2 | 
|  | ; X64-AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm6 | 
|  | ; X64-AVX1-NEXT:    vpaddq %xmm5, %xmm6, %xmm5 | 
|  | ; X64-AVX1-NEXT:    vpaddq %xmm3, %xmm1, %xmm1 | 
|  | ; X64-AVX1-NEXT:    vinsertf128 $1, %xmm5, %ymm1, %ymm1 | 
|  | ; X64-AVX1-NEXT:    vandps %ymm3, %ymm1, %ymm1 | 
|  | ; X64-AVX1-NEXT:    vandps %ymm3, %ymm2, %ymm2 | 
|  | ; X64-AVX1-NEXT:    vmovdqu %xmm0, ga4+16(%rip) | 
|  | ; X64-AVX1-NEXT:    vmovdqu %xmm4, ga4(%rip) | 
|  | ; X64-AVX1-NEXT:    vmovups %ymm2, gb4+32(%rip) | 
|  | ; X64-AVX1-NEXT:    vmovups %ymm1, gb4(%rip) | 
|  | ; X64-AVX1-NEXT:    vzeroupper | 
|  | ; X64-AVX1-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX2-LABEL: fallback_broadcast_v4i64_to_v8i64: | 
|  | ; X64-AVX2:       # %bb.0: # %entry | 
|  | ; X64-AVX2-NEXT:    vpmovsxbq {{.*#+}} ymm3 = [1,2,3,4] | 
|  | ; X64-AVX2-NEXT:    vpaddq %ymm3, %ymm0, %ymm0 | 
|  | ; X64-AVX2-NEXT:    vpaddq %ymm3, %ymm2, %ymm2 | 
|  | ; X64-AVX2-NEXT:    vpaddq %ymm3, %ymm1, %ymm1 | 
|  | ; X64-AVX2-NEXT:    vpand %ymm3, %ymm1, %ymm1 | 
|  | ; X64-AVX2-NEXT:    vpand %ymm3, %ymm2, %ymm2 | 
|  | ; X64-AVX2-NEXT:    vmovdqu %ymm0, ga4(%rip) | 
|  | ; X64-AVX2-NEXT:    vmovdqu %ymm2, gb4+32(%rip) | 
|  | ; X64-AVX2-NEXT:    vmovdqu %ymm1, gb4(%rip) | 
|  | ; X64-AVX2-NEXT:    vzeroupper | 
|  | ; X64-AVX2-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: fallback_broadcast_v4i64_to_v8i64: | 
|  | ; X64-AVX512:       # %bb.0: # %entry | 
|  | ; X64-AVX512-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [1,2,3,4,1,2,3,4] | 
|  | ; X64-AVX512-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    vpaddq %ymm2, %ymm0, %ymm0 | 
|  | ; X64-AVX512-NEXT:    vpaddq %zmm2, %zmm1, %zmm1 | 
|  | ; X64-AVX512-NEXT:    vpandq %zmm2, %zmm1, %zmm1 | 
|  | ; X64-AVX512-NEXT:    vmovdqu %ymm0, ga4(%rip) | 
|  | ; X64-AVX512-NEXT:    vmovdqu64 %zmm1, gb4(%rip) | 
|  | ; X64-AVX512-NEXT:    vzeroupper | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | entry: | 
|  | %0 = add <4 x i64> %a, <i64 1, i64 2, i64 3, i64 4> | 
|  | %1 = add <8 x i64> %b, <i64 1, i64 2, i64 3, i64 4, i64 1, i64 2, i64 3, i64 4> | 
|  | %2 = and <8 x i64> %1, <i64 1, i64 2, i64 3, i64 4, i64 1, i64 2, i64 3, i64 4> | 
|  | store <4 x i64> %0, ptr @ga4, align 8 | 
|  | store <8 x i64> %2, ptr @gb4, align 8 | 
|  | ret void | 
|  | } | 
|  |  | 
|  |  | 
|  | @ga2 = dso_local global <4 x double> zeroinitializer, align 8 | 
|  | @gb2 = dso_local global <8 x double> zeroinitializer, align 8 | 
|  |  | 
|  | define dso_local void @fallback_broadcast_v4f64_to_v8f64(<4 x double> %a, <8 x double> %b) { | 
|  | ; X86-AVX-LABEL: fallback_broadcast_v4f64_to_v8f64: | 
|  | ; X86-AVX:       # %bb.0: # %entry | 
|  | ; X86-AVX-NEXT:    vmovapd {{.*#+}} ymm3 = [1.0E+0,2.0E+0,3.0E+0,4.0E+0] | 
|  | ; X86-AVX-NEXT:    vaddpd %ymm3, %ymm0, %ymm0 | 
|  | ; X86-AVX-NEXT:    vaddpd %ymm3, %ymm2, %ymm2 | 
|  | ; X86-AVX-NEXT:    vaddpd %ymm3, %ymm1, %ymm1 | 
|  | ; X86-AVX-NEXT:    vdivpd %ymm3, %ymm1, %ymm1 | 
|  | ; X86-AVX-NEXT:    vdivpd %ymm3, %ymm2, %ymm2 | 
|  | ; X86-AVX-NEXT:    vmovupd %ymm0, ga2 | 
|  | ; X86-AVX-NEXT:    vmovupd %ymm2, gb2+32 | 
|  | ; X86-AVX-NEXT:    vmovupd %ymm1, gb2 | 
|  | ; X86-AVX-NEXT:    vzeroupper | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: fallback_broadcast_v4f64_to_v8f64: | 
|  | ; X86-AVX512:       # %bb.0: # %entry | 
|  | ; X86-AVX512-NEXT:    vbroadcastf64x4 {{.*#+}} zmm2 = [1.0E+0,2.0E+0,3.0E+0,4.0E+0,1.0E+0,2.0E+0,3.0E+0,4.0E+0] | 
|  | ; X86-AVX512-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    vaddpd %ymm2, %ymm0, %ymm0 | 
|  | ; X86-AVX512-NEXT:    vaddpd %zmm2, %zmm1, %zmm1 | 
|  | ; X86-AVX512-NEXT:    vdivpd %zmm2, %zmm1, %zmm1 | 
|  | ; X86-AVX512-NEXT:    vmovupd %ymm0, ga2 | 
|  | ; X86-AVX512-NEXT:    vmovupd %zmm1, gb2 | 
|  | ; X86-AVX512-NEXT:    vzeroupper | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: fallback_broadcast_v4f64_to_v8f64: | 
|  | ; X64-AVX:       # %bb.0: # %entry | 
|  | ; X64-AVX-NEXT:    vmovapd {{.*#+}} ymm3 = [1.0E+0,2.0E+0,3.0E+0,4.0E+0] | 
|  | ; X64-AVX-NEXT:    vaddpd %ymm3, %ymm0, %ymm0 | 
|  | ; X64-AVX-NEXT:    vaddpd %ymm3, %ymm2, %ymm2 | 
|  | ; X64-AVX-NEXT:    vaddpd %ymm3, %ymm1, %ymm1 | 
|  | ; X64-AVX-NEXT:    vdivpd %ymm3, %ymm1, %ymm1 | 
|  | ; X64-AVX-NEXT:    vdivpd %ymm3, %ymm2, %ymm2 | 
|  | ; X64-AVX-NEXT:    vmovupd %ymm0, ga2(%rip) | 
|  | ; X64-AVX-NEXT:    vmovupd %ymm2, gb2+32(%rip) | 
|  | ; X64-AVX-NEXT:    vmovupd %ymm1, gb2(%rip) | 
|  | ; X64-AVX-NEXT:    vzeroupper | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: fallback_broadcast_v4f64_to_v8f64: | 
|  | ; X64-AVX512:       # %bb.0: # %entry | 
|  | ; X64-AVX512-NEXT:    vbroadcastf64x4 {{.*#+}} zmm2 = [1.0E+0,2.0E+0,3.0E+0,4.0E+0,1.0E+0,2.0E+0,3.0E+0,4.0E+0] | 
|  | ; X64-AVX512-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    vaddpd %ymm2, %ymm0, %ymm0 | 
|  | ; X64-AVX512-NEXT:    vaddpd %zmm2, %zmm1, %zmm1 | 
|  | ; X64-AVX512-NEXT:    vdivpd %zmm2, %zmm1, %zmm1 | 
|  | ; X64-AVX512-NEXT:    vmovupd %ymm0, ga2(%rip) | 
|  | ; X64-AVX512-NEXT:    vmovupd %zmm1, gb2(%rip) | 
|  | ; X64-AVX512-NEXT:    vzeroupper | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | entry: | 
|  | %0 = fadd <4 x double> %a, <double 1.0, double 2.0, double 3.0, double 4.0> | 
|  | %1 = fadd <8 x double> %b, <double 1.0, double 2.0, double 3.0, double 4.0, double 1.0, double 2.0, double 3.0, double 4.0> | 
|  | %2 = fdiv <8 x double> %1, <double 1.0, double 2.0, double 3.0, double 4.0, double 1.0, double 2.0, double 3.0, double 4.0> | 
|  | store <4 x double> %0, ptr @ga2, align 8 | 
|  | store <8 x double> %2, ptr @gb2, align 8 | 
|  | ret void | 
|  | } | 
|  |  | 
|  | @ha4 = dso_local global <4 x i32> zeroinitializer, align 8 | 
|  | @hb4 = dso_local global <8 x i32> zeroinitializer, align 8 | 
|  | @hc4 = dso_local global <16 x i32> zeroinitializer, align 8 | 
|  |  | 
|  | define dso_local void @fallback_broadcast_v4i32_v8i32_v16i32(<4 x i32> %a, <8 x i32> %b, <16 x i32> %c) nounwind { | 
|  | ; X86-AVX1-LABEL: fallback_broadcast_v4i32_v8i32_v16i32: | 
|  | ; X86-AVX1:       # %bb.0: # %entry | 
|  | ; X86-AVX1-NEXT:    pushl %ebp | 
|  | ; X86-AVX1-NEXT:    movl %esp, %ebp | 
|  | ; X86-AVX1-NEXT:    andl $-32, %esp | 
|  | ; X86-AVX1-NEXT:    subl $32, %esp | 
|  | ; X86-AVX1-NEXT:    vbroadcastf128 {{.*#+}} ymm3 = [1,2,3,4,1,2,3,4] | 
|  | ; X86-AVX1-NEXT:    # ymm3 = mem[0,1,0,1] | 
|  | ; X86-AVX1-NEXT:    vpaddd %xmm3, %xmm0, %xmm0 | 
|  | ; X86-AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm4 | 
|  | ; X86-AVX1-NEXT:    vpaddd %xmm3, %xmm4, %xmm4 | 
|  | ; X86-AVX1-NEXT:    vpaddd %xmm3, %xmm1, %xmm1 | 
|  | ; X86-AVX1-NEXT:    vinsertf128 $1, %xmm4, %ymm1, %ymm1 | 
|  | ; X86-AVX1-NEXT:    vandps %ymm3, %ymm1, %ymm1 | 
|  | ; X86-AVX1-NEXT:    vextractf128 $1, %ymm2, %xmm4 | 
|  | ; X86-AVX1-NEXT:    vpaddd %xmm3, %xmm4, %xmm4 | 
|  | ; X86-AVX1-NEXT:    vpaddd %xmm3, %xmm2, %xmm2 | 
|  | ; X86-AVX1-NEXT:    vinsertf128 $1, %xmm4, %ymm2, %ymm2 | 
|  | ; X86-AVX1-NEXT:    vpaddd 8(%ebp), %xmm3, %xmm4 | 
|  | ; X86-AVX1-NEXT:    vpaddd 24(%ebp), %xmm3, %xmm5 | 
|  | ; X86-AVX1-NEXT:    vinsertf128 $1, %xmm5, %ymm4, %ymm4 | 
|  | ; X86-AVX1-NEXT:    vandps %ymm3, %ymm2, %ymm2 | 
|  | ; X86-AVX1-NEXT:    vandps %ymm3, %ymm4, %ymm3 | 
|  | ; X86-AVX1-NEXT:    vmovdqu %xmm0, ha4 | 
|  | ; X86-AVX1-NEXT:    vmovups %ymm1, hb4 | 
|  | ; X86-AVX1-NEXT:    vmovups %ymm3, hc4+32 | 
|  | ; X86-AVX1-NEXT:    vmovups %ymm2, hc4 | 
|  | ; X86-AVX1-NEXT:    movl %ebp, %esp | 
|  | ; X86-AVX1-NEXT:    popl %ebp | 
|  | ; X86-AVX1-NEXT:    vzeroupper | 
|  | ; X86-AVX1-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX2-LABEL: fallback_broadcast_v4i32_v8i32_v16i32: | 
|  | ; X86-AVX2:       # %bb.0: # %entry | 
|  | ; X86-AVX2-NEXT:    pushl %ebp | 
|  | ; X86-AVX2-NEXT:    movl %esp, %ebp | 
|  | ; X86-AVX2-NEXT:    andl $-32, %esp | 
|  | ; X86-AVX2-NEXT:    subl $32, %esp | 
|  | ; X86-AVX2-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [1,2,3,4,1,2,3,4] | 
|  | ; X86-AVX2-NEXT:    # ymm3 = mem[0,1,0,1] | 
|  | ; X86-AVX2-NEXT:    vpaddd %xmm3, %xmm0, %xmm0 | 
|  | ; X86-AVX2-NEXT:    vpaddd %ymm3, %ymm1, %ymm1 | 
|  | ; X86-AVX2-NEXT:    vpand %ymm3, %ymm1, %ymm1 | 
|  | ; X86-AVX2-NEXT:    vpaddd %ymm3, %ymm2, %ymm2 | 
|  | ; X86-AVX2-NEXT:    vpaddd 8(%ebp), %ymm3, %ymm4 | 
|  | ; X86-AVX2-NEXT:    vpand %ymm3, %ymm2, %ymm2 | 
|  | ; X86-AVX2-NEXT:    vpand %ymm3, %ymm4, %ymm3 | 
|  | ; X86-AVX2-NEXT:    vmovdqu %xmm0, ha4 | 
|  | ; X86-AVX2-NEXT:    vmovdqu %ymm1, hb4 | 
|  | ; X86-AVX2-NEXT:    vmovdqu %ymm3, hc4+32 | 
|  | ; X86-AVX2-NEXT:    vmovdqu %ymm2, hc4 | 
|  | ; X86-AVX2-NEXT:    movl %ebp, %esp | 
|  | ; X86-AVX2-NEXT:    popl %ebp | 
|  | ; X86-AVX2-NEXT:    vzeroupper | 
|  | ; X86-AVX2-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: fallback_broadcast_v4i32_v8i32_v16i32: | 
|  | ; X86-AVX512:       # %bb.0: # %entry | 
|  | ; X86-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [1,2,3,4,1,2,3,4,1,2,3,4,1,2,3,4] | 
|  | ; X86-AVX512-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    vpaddd %xmm3, %xmm0, %xmm0 | 
|  | ; X86-AVX512-NEXT:    vpaddd %ymm3, %ymm1, %ymm1 | 
|  | ; X86-AVX512-NEXT:    vpand %ymm3, %ymm1, %ymm1 | 
|  | ; X86-AVX512-NEXT:    vpaddd %zmm3, %zmm2, %zmm2 | 
|  | ; X86-AVX512-NEXT:    vpandd %zmm3, %zmm2, %zmm2 | 
|  | ; X86-AVX512-NEXT:    vmovdqu %xmm0, ha4 | 
|  | ; X86-AVX512-NEXT:    vmovdqu %ymm1, hb4 | 
|  | ; X86-AVX512-NEXT:    vmovdqu64 %zmm2, hc4 | 
|  | ; X86-AVX512-NEXT:    vzeroupper | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX1-LABEL: fallback_broadcast_v4i32_v8i32_v16i32: | 
|  | ; X64-AVX1:       # %bb.0: # %entry | 
|  | ; X64-AVX1-NEXT:    vbroadcastf128 {{.*#+}} ymm4 = [1,2,3,4,1,2,3,4] | 
|  | ; X64-AVX1-NEXT:    # ymm4 = mem[0,1,0,1] | 
|  | ; X64-AVX1-NEXT:    vpaddd %xmm4, %xmm0, %xmm0 | 
|  | ; X64-AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm5 | 
|  | ; X64-AVX1-NEXT:    vpaddd %xmm4, %xmm5, %xmm5 | 
|  | ; X64-AVX1-NEXT:    vpaddd %xmm4, %xmm1, %xmm1 | 
|  | ; X64-AVX1-NEXT:    vinsertf128 $1, %xmm5, %ymm1, %ymm1 | 
|  | ; X64-AVX1-NEXT:    vandps %ymm4, %ymm1, %ymm1 | 
|  | ; X64-AVX1-NEXT:    vextractf128 $1, %ymm3, %xmm5 | 
|  | ; X64-AVX1-NEXT:    vpaddd %xmm4, %xmm5, %xmm5 | 
|  | ; X64-AVX1-NEXT:    vpaddd %xmm4, %xmm3, %xmm3 | 
|  | ; X64-AVX1-NEXT:    vinsertf128 $1, %xmm5, %ymm3, %ymm3 | 
|  | ; X64-AVX1-NEXT:    vextractf128 $1, %ymm2, %xmm5 | 
|  | ; X64-AVX1-NEXT:    vpaddd %xmm4, %xmm5, %xmm5 | 
|  | ; X64-AVX1-NEXT:    vpaddd %xmm4, %xmm2, %xmm2 | 
|  | ; X64-AVX1-NEXT:    vinsertf128 $1, %xmm5, %ymm2, %ymm2 | 
|  | ; X64-AVX1-NEXT:    vandps %ymm4, %ymm2, %ymm2 | 
|  | ; X64-AVX1-NEXT:    vandps %ymm4, %ymm3, %ymm3 | 
|  | ; X64-AVX1-NEXT:    vmovdqu %xmm0, ha4(%rip) | 
|  | ; X64-AVX1-NEXT:    vmovups %ymm1, hb4(%rip) | 
|  | ; X64-AVX1-NEXT:    vmovups %ymm3, hc4+32(%rip) | 
|  | ; X64-AVX1-NEXT:    vmovups %ymm2, hc4(%rip) | 
|  | ; X64-AVX1-NEXT:    vzeroupper | 
|  | ; X64-AVX1-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX2-LABEL: fallback_broadcast_v4i32_v8i32_v16i32: | 
|  | ; X64-AVX2:       # %bb.0: # %entry | 
|  | ; X64-AVX2-NEXT:    vbroadcasti128 {{.*#+}} ymm4 = [1,2,3,4,1,2,3,4] | 
|  | ; X64-AVX2-NEXT:    # ymm4 = mem[0,1,0,1] | 
|  | ; X64-AVX2-NEXT:    vpaddd %xmm4, %xmm0, %xmm0 | 
|  | ; X64-AVX2-NEXT:    vpaddd %ymm4, %ymm1, %ymm1 | 
|  | ; X64-AVX2-NEXT:    vpand %ymm4, %ymm1, %ymm1 | 
|  | ; X64-AVX2-NEXT:    vpaddd %ymm4, %ymm3, %ymm3 | 
|  | ; X64-AVX2-NEXT:    vpaddd %ymm4, %ymm2, %ymm2 | 
|  | ; X64-AVX2-NEXT:    vpand %ymm4, %ymm2, %ymm2 | 
|  | ; X64-AVX2-NEXT:    vpand %ymm4, %ymm3, %ymm3 | 
|  | ; X64-AVX2-NEXT:    vmovdqu %xmm0, ha4(%rip) | 
|  | ; X64-AVX2-NEXT:    vmovdqu %ymm1, hb4(%rip) | 
|  | ; X64-AVX2-NEXT:    vmovdqu %ymm3, hc4+32(%rip) | 
|  | ; X64-AVX2-NEXT:    vmovdqu %ymm2, hc4(%rip) | 
|  | ; X64-AVX2-NEXT:    vzeroupper | 
|  | ; X64-AVX2-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: fallback_broadcast_v4i32_v8i32_v16i32: | 
|  | ; X64-AVX512:       # %bb.0: # %entry | 
|  | ; X64-AVX512-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [1,2,3,4,1,2,3,4,1,2,3,4,1,2,3,4] | 
|  | ; X64-AVX512-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    vpaddd %xmm3, %xmm0, %xmm0 | 
|  | ; X64-AVX512-NEXT:    vpaddd %ymm3, %ymm1, %ymm1 | 
|  | ; X64-AVX512-NEXT:    vpand %ymm3, %ymm1, %ymm1 | 
|  | ; X64-AVX512-NEXT:    vpaddd %zmm3, %zmm2, %zmm2 | 
|  | ; X64-AVX512-NEXT:    vpandd %zmm3, %zmm2, %zmm2 | 
|  | ; X64-AVX512-NEXT:    vmovdqu %xmm0, ha4(%rip) | 
|  | ; X64-AVX512-NEXT:    vmovdqu %ymm1, hb4(%rip) | 
|  | ; X64-AVX512-NEXT:    vmovdqu64 %zmm2, hc4(%rip) | 
|  | ; X64-AVX512-NEXT:    vzeroupper | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | entry: | 
|  | %0 = add <4 x i32> %a, <i32 1, i32 2, i32 3, i32 4> | 
|  | %1 = add <8 x i32> %b, <i32 1, i32 2, i32 3, i32 4, i32 1, i32 2, i32 3, i32 4> | 
|  | %2 = and <8 x i32> %1, <i32 1, i32 2, i32 3, i32 4, i32 1, i32 2, i32 3, i32 4> | 
|  | %3 = add <16 x i32> %c, <i32 1, i32 2, i32 3, i32 4, i32 1, i32 2, i32 3, i32 4, i32 1, i32 2, i32 3, i32 4, i32 1, i32 2, i32 3, i32 4> | 
|  | %4 = and <16 x i32> %3, <i32 1, i32 2, i32 3, i32 4, i32 1, i32 2, i32 3, i32 4, i32 1, i32 2, i32 3, i32 4, i32 1, i32 2, i32 3, i32 4> | 
|  | store <4 x i32> %0, ptr @ha4, align 8 | 
|  | store <8 x i32> %2, ptr @hb4, align 8 | 
|  | store <16 x i32> %4, ptr @hc4, align 8 | 
|  | ret void | 
|  | } | 
|  |  | 
|  | ; | 
|  | ; Subvector Broadcast from register | 
|  | ; | 
|  |  | 
|  | define <4 x double> @reg_broadcast_2f64_4f64(<2 x double> %a0) nounwind { | 
|  | ; X86-LABEL: reg_broadcast_2f64_4f64: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: reg_broadcast_2f64_4f64: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-NEXT:    retq | 
|  | %1 = shufflevector <2 x double> %a0, <2 x double> undef, <4 x i32> <i32 0, i32 1, i32 0, i32 1> | 
|  | ret <4 x double> %1 | 
|  | } | 
|  |  | 
|  | define <8 x double> @reg_broadcast_2f64_8f64(<2 x double> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_2f64_8f64: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_2f64_8f64: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_2f64_8f64: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_2f64_8f64: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <2 x double> %a0, <2 x double> undef, <8 x i32> <i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1> | 
|  | ret <8 x double> %1 | 
|  | } | 
|  |  | 
|  | define <8 x double> @reg_broadcast_4f64_8f64(<4 x double> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_4f64_8f64: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_4f64_8f64: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_4f64_8f64: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_4f64_8f64: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <4 x double> %a0, <4 x double> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x double> %1 | 
|  | } | 
|  |  | 
|  | define <4 x i64> @reg_broadcast_2i64_4i64(<2 x i64> %a0) nounwind { | 
|  | ; X86-LABEL: reg_broadcast_2i64_4i64: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: reg_broadcast_2i64_4i64: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-NEXT:    retq | 
|  | %1 = shufflevector <2 x i64> %a0, <2 x i64> undef, <4 x i32> <i32 0, i32 1, i32 0, i32 1> | 
|  | ret <4 x i64> %1 | 
|  | } | 
|  |  | 
|  | define <8 x i64> @reg_broadcast_2i64_8i64(<2 x i64> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_2i64_8i64: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_2i64_8i64: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_2i64_8i64: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_2i64_8i64: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <2 x i64> %a0, <2 x i64> undef, <8 x i32> <i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1> | 
|  | ret <8 x i64> %1 | 
|  | } | 
|  |  | 
|  | define <8 x i64> @reg_broadcast_4i64_8i64(<4 x i64> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_4i64_8i64: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_4i64_8i64: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_4i64_8i64: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_4i64_8i64: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <4 x i64> %a0, <4 x i64> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x i64> %1 | 
|  | } | 
|  |  | 
|  | define <8 x float> @reg_broadcast_4f32_8f32(<4 x float> %a0) nounwind { | 
|  | ; X86-LABEL: reg_broadcast_4f32_8f32: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: reg_broadcast_4f32_8f32: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-NEXT:    retq | 
|  | %1 = shufflevector <4 x float> %a0, <4 x float> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x float> %1 | 
|  | } | 
|  |  | 
|  | define <16 x float> @reg_broadcast_4f32_16f32(<4 x float> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_4f32_16f32: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_4f32_16f32: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_4f32_16f32: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_4f32_16f32: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vshuff64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <4 x float> %a0, <4 x float> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <16 x float> %1 | 
|  | } | 
|  |  | 
|  | define <16 x float> @reg_broadcast_8f32_16f32(<8 x float> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_8f32_16f32: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_8f32_16f32: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_8f32_16f32: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_8f32_16f32: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <8 x float> %a0, <8 x float> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7> | 
|  | ret <16 x float> %1 | 
|  | } | 
|  |  | 
|  | define <8 x i32> @reg_broadcast_4i32_8i32(<4 x i32> %a0) nounwind { | 
|  | ; X86-LABEL: reg_broadcast_4i32_8i32: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: reg_broadcast_4i32_8i32: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-NEXT:    retq | 
|  | %1 = shufflevector <4 x i32> %a0, <4 x i32> undef, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <8 x i32> %1 | 
|  | } | 
|  |  | 
|  | define <16 x i32> @reg_broadcast_4i32_16i32(<4 x i32> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_4i32_16i32: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_4i32_16i32: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_4i32_16i32: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_4i32_16i32: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <4 x i32> %a0, <4 x i32> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | ret <16 x i32> %1 | 
|  | } | 
|  |  | 
|  | define <16 x i32> @reg_broadcast_8i32_16i32(<8 x i32> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_8i32_16i32: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_8i32_16i32: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_8i32_16i32: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_8i32_16i32: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <8 x i32> %a0, <8 x i32> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7> | 
|  | ret <16 x i32> %1 | 
|  | } | 
|  |  | 
|  | define <16 x i16> @reg_broadcast_8i16_16i16(<8 x i16> %a0) nounwind { | 
|  | ; X86-LABEL: reg_broadcast_8i16_16i16: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: reg_broadcast_8i16_16i16: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-NEXT:    retq | 
|  | %1 = shufflevector <8 x i16> %a0, <8 x i16> undef, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7> | 
|  | ret <16 x i16> %1 | 
|  | } | 
|  |  | 
|  | define <32 x i16> @reg_broadcast_8i16_32i16(<8 x i16> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_8i16_32i16: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_8i16_32i16: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_8i16_32i16: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_8i16_32i16: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <8 x i16> %a0, <8 x i16> undef, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7> | 
|  | ret <32 x i16> %1 | 
|  | } | 
|  |  | 
|  | define <32 x i16> @reg_broadcast_16i16_32i16(<16 x i16> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_16i16_32i16: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_16i16_32i16: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_16i16_32i16: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_16i16_32i16: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <16 x i16> %a0, <16 x i16> undef, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15> | 
|  | ret <32 x i16> %1 | 
|  | } | 
|  |  | 
|  | define <32 x i8> @reg_broadcast_16i8_32i8(<16 x i8> %a0) nounwind { | 
|  | ; X86-LABEL: reg_broadcast_16i8_32i8: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: reg_broadcast_16i8_32i8: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-NEXT:    retq | 
|  | %1 = shufflevector <16 x i8> %a0, <16 x i8> undef, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15> | 
|  | ret <32 x i8> %1 | 
|  | } | 
|  |  | 
|  | define <64 x i8> @reg_broadcast_16i8_64i8(<16 x i8> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_16i8_64i8: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X86-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_16i8_64i8: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_16i8_64i8: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    # kill: def $xmm0 killed $xmm0 def $ymm0 | 
|  | ; X64-AVX-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_16i8_64i8: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $xmm0 killed $xmm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <16 x i8> %a0, <16 x i8> undef, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15> | 
|  | ret <64 x i8> %1 | 
|  | } | 
|  |  | 
|  | define <64 x i8> @reg_broadcast_32i8_64i8(<32 x i8> %a0) nounwind { | 
|  | ; X86-AVX-LABEL: reg_broadcast_32i8_64i8: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: reg_broadcast_32i8_64i8: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X86-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: reg_broadcast_32i8_64i8: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: reg_broadcast_32i8_64i8: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    # kill: def $ymm0 killed $ymm0 def $zmm0 | 
|  | ; X64-AVX512-NEXT:    vinsertf64x4 $1, %ymm0, %zmm0, %zmm0 | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %1 = shufflevector <32 x i8> %a0, <32 x i8> undef, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31> | 
|  | ret <64 x i8> %1 | 
|  | } | 
|  |  | 
|  | ; | 
|  | ; PR34394 | 
|  | ; | 
|  |  | 
|  | define <4 x i32> @test_2xi32_to_4xi32_mem(ptr %vp) { | 
|  | ; X86-LABEL: test_2xi32_to_4xi32_mem: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-NEXT:    vmovddup {{.*#+}} xmm0 = mem[0,0] | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: test_2xi32_to_4xi32_mem: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    vmovddup {{.*#+}} xmm0 = mem[0,0] | 
|  | ; X64-NEXT:    retq | 
|  | %vec = load <2 x i32>, ptr %vp | 
|  | %res = shufflevector <2 x i32> %vec, <2 x i32> undef, <4 x i32> <i32 0, i32 1, i32 0, i32 1> | 
|  | ret <4 x i32> %res | 
|  | } | 
|  |  | 
|  | define <8 x i32> @test_2xi32_to_8xi32_mem(ptr %vp) { | 
|  | ; X86-LABEL: test_2xi32_to_8xi32_mem: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-NEXT:    vbroadcastsd (%eax), %ymm0 | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: test_2xi32_to_8xi32_mem: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    vbroadcastsd (%rdi), %ymm0 | 
|  | ; X64-NEXT:    retq | 
|  | %vec = load <2 x i32>, ptr %vp | 
|  | %res = shufflevector <2 x i32> %vec, <2 x i32> undef, <8 x i32> <i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1> | 
|  | ret <8 x i32> %res | 
|  | } | 
|  |  | 
|  | define <16 x i32> @test_2xi32_to_16xi32_mem(ptr %vp) { | 
|  | ; X86-AVX-LABEL: test_2xi32_to_16xi32_mem: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastsd (%eax), %ymm0 | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: test_2xi32_to_16xi32_mem: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcastsd (%eax), %zmm0 | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: test_2xi32_to_16xi32_mem: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastsd (%rdi), %ymm0 | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: test_2xi32_to_16xi32_mem: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcastsd (%rdi), %zmm0 | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %vec = load <2 x i32>, ptr %vp | 
|  | %res = shufflevector <2 x i32> %vec, <2 x i32> undef, <16 x i32> <i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1, i32 0, i32 1> | 
|  | ret <16 x i32> %res | 
|  | } | 
|  |  | 
|  | ; | 
|  | ; PR34041 | 
|  | ; | 
|  |  | 
|  | define <4 x double> @broadcast_v4f64_f64_u000(ptr %p) { | 
|  | ; X86-LABEL: broadcast_v4f64_f64_u000: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-NEXT:    vbroadcastsd (%eax), %ymm0 | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: broadcast_v4f64_f64_u000: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    vbroadcastsd (%rdi), %ymm0 | 
|  | ; X64-NEXT:    retq | 
|  | %s = load double, ptr %p | 
|  | %vec = insertelement <2 x double> undef, double %s, i32 0 | 
|  | %res = shufflevector <2 x double> %vec, <2 x double> undef, <4 x i32> <i32 1, i32 0, i32 0, i32 0> | 
|  | ret <4 x double> %res | 
|  | } | 
|  |  | 
|  | define <4 x double> @broadcast_v4f64_v2f64_4u61(ptr %vp, <4 x double> %default) { | 
|  | ; X86-LABEL: broadcast_v4f64_v2f64_4u61: | 
|  | ; X86:       # %bb.0: | 
|  | ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-NEXT:    vinsertf128 $1, (%eax), %ymm0, %ymm1 | 
|  | ; X86-NEXT:    vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7] | 
|  | ; X86-NEXT:    retl | 
|  | ; | 
|  | ; X64-LABEL: broadcast_v4f64_v2f64_4u61: | 
|  | ; X64:       # %bb.0: | 
|  | ; X64-NEXT:    vinsertf128 $1, (%rdi), %ymm0, %ymm1 | 
|  | ; X64-NEXT:    vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7] | 
|  | ; X64-NEXT:    retq | 
|  | %vec = load <2 x double>, ptr %vp | 
|  | %shuf = shufflevector <2 x double> %vec, <2 x double> undef, <4 x i32> <i32 0, i32 3, i32 undef, i32 1> | 
|  | %res = select <4 x i1> <i1 0, i1 1, i1 0, i1 1>, <4 x double> %shuf, <4 x double> %default | 
|  | ret <4 x double> %res | 
|  | } | 
|  |  | 
|  | ; TODO: prefer vblend vs vunpckh on AVX1 targets | 
|  | define <8 x float> @broadcast_v8f32_v2f32_u1uu0uEu(ptr %vp, <8 x float> %default) { | 
|  | ; X86-AVX1-LABEL: broadcast_v8f32_v2f32_u1uu0uEu: | 
|  | ; X86-AVX1:       # %bb.0: | 
|  | ; X86-AVX1-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX1-NEXT:    vbroadcastsd (%eax), %ymm1 | 
|  | ; X86-AVX1-NEXT:    vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3] | 
|  | ; X86-AVX1-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX2-LABEL: broadcast_v8f32_v2f32_u1uu0uEu: | 
|  | ; X86-AVX2:       # %bb.0: | 
|  | ; X86-AVX2-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX2-NEXT:    vbroadcastsd (%eax), %ymm1 | 
|  | ; X86-AVX2-NEXT:    vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7] | 
|  | ; X86-AVX2-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: broadcast_v8f32_v2f32_u1uu0uEu: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcastsd (%eax), %ymm1 | 
|  | ; X86-AVX512-NEXT:    vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX1-LABEL: broadcast_v8f32_v2f32_u1uu0uEu: | 
|  | ; X64-AVX1:       # %bb.0: | 
|  | ; X64-AVX1-NEXT:    vbroadcastsd (%rdi), %ymm1 | 
|  | ; X64-AVX1-NEXT:    vunpckhpd {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3] | 
|  | ; X64-AVX1-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX2-LABEL: broadcast_v8f32_v2f32_u1uu0uEu: | 
|  | ; X64-AVX2:       # %bb.0: | 
|  | ; X64-AVX2-NEXT:    vbroadcastsd (%rdi), %ymm1 | 
|  | ; X64-AVX2-NEXT:    vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7] | 
|  | ; X64-AVX2-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: broadcast_v8f32_v2f32_u1uu0uEu: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcastsd (%rdi), %ymm1 | 
|  | ; X64-AVX512-NEXT:    vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %vec = load <2 x float>, ptr %vp | 
|  | %shuf = shufflevector <2 x float> %vec, <2 x float> undef, <8 x i32> <i32 undef, i32 1, i32 undef, i32 undef, i32 0, i32 2, i32 3, i32 undef> | 
|  | %res = select <8 x i1> <i1 1, i1 1, i1 1, i1 1, i1 1, i1 1, i1 0, i1 1>, <8 x float> %shuf, <8 x float> %default | 
|  | ret <8 x float> %res | 
|  | } | 
|  |  | 
|  | define <8 x double> @broadcast_v8f64_v2f64_u1u10101(ptr %vp) { | 
|  | ; X86-AVX-LABEL: broadcast_v8f64_v2f64_u1u10101: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: broadcast_v8f64_v2f64_u1u10101: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcastf32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: broadcast_v8f64_v2f64_u1u10101: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: broadcast_v8f64_v2f64_u1u10101: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcastf32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %vec = load <2 x double>, ptr %vp | 
|  | %res = shufflevector <2 x double> %vec, <2 x double> undef, <8 x i32> <i32 3, i32 1, i32 undef, i32 1, i32 0, i32 1, i32 0, i32 1> | 
|  | ret <8 x double> %res | 
|  | } | 
|  |  | 
|  | define <8 x double> @broadcast_v8f64_v2f64_0uuu0101(ptr %vp) { | 
|  | ; X86-AVX-LABEL: broadcast_v8f64_v2f64_0uuu0101: | 
|  | ; X86-AVX:       # %bb.0: | 
|  | ; X86-AVX-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X86-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X86-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: broadcast_v8f64_v2f64_0uuu0101: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax | 
|  | ; X86-AVX512-NEXT:    vbroadcastf32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX-LABEL: broadcast_v8f64_v2f64_0uuu0101: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1] | 
|  | ; X64-AVX-NEXT:    vmovaps %ymm0, %ymm1 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: broadcast_v8f64_v2f64_0uuu0101: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vbroadcastf32x4 {{.*#+}} zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3] | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %vec = load <2 x double>, ptr %vp | 
|  | %res = shufflevector <2 x double> %vec, <2 x double> undef, <8 x i32> <i32 0, i32 undef, i32 undef, i32 undef, i32 0, i32 1, i32 0, i32 1> | 
|  | ret <8 x double> %res | 
|  | } | 
|  |  | 
|  | define void @PR51226() { | 
|  | ; X86-AVX1-LABEL: PR51226: | 
|  | ; X86-AVX1:       # %bb.0: | 
|  | ; X86-AVX1-NEXT:    vpmovzxwd {{.*#+}} xmm0 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero | 
|  | ; X86-AVX1-NEXT:    vpslld $16, %xmm0, %xmm0 | 
|  | ; X86-AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-AVX1-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X86-AVX1-NEXT:    vminps %ymm1, %ymm0, %ymm0 | 
|  | ; X86-AVX1-NEXT:    vmovups %ymm0, (%eax) | 
|  | ; X86-AVX1-NEXT:    vzeroupper | 
|  | ; X86-AVX1-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX2-LABEL: PR51226: | 
|  | ; X86-AVX2:       # %bb.0: | 
|  | ; X86-AVX2-NEXT:    vpmovzxwd {{.*#+}} xmm0 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero | 
|  | ; X86-AVX2-NEXT:    vpslld $16, %xmm0, %xmm0 | 
|  | ; X86-AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-AVX2-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X86-AVX2-NEXT:    vminps %ymm1, %ymm0, %ymm0 | 
|  | ; X86-AVX2-NEXT:    vmovups %ymm0, (%eax) | 
|  | ; X86-AVX2-NEXT:    vzeroupper | 
|  | ; X86-AVX2-NEXT:    retl | 
|  | ; | 
|  | ; X86-AVX512-LABEL: PR51226: | 
|  | ; X86-AVX512:       # %bb.0: | 
|  | ; X86-AVX512-NEXT:    vpmovzxwd {{.*#+}} xmm0 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero | 
|  | ; X86-AVX512-NEXT:    vpslld $16, %xmm0, %xmm0 | 
|  | ; X86-AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X86-AVX512-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X86-AVX512-NEXT:    vminps %ymm1, %ymm0, %ymm0 | 
|  | ; X86-AVX512-NEXT:    vmovups %ymm0, (%eax) | 
|  | ; X86-AVX512-NEXT:    vzeroupper | 
|  | ; X86-AVX512-NEXT:    retl | 
|  | ; | 
|  | ; X64-AVX1-LABEL: PR51226: | 
|  | ; X64-AVX1:       # %bb.0: | 
|  | ; X64-AVX1-NEXT:    vpmovzxwd {{.*#+}} xmm0 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero | 
|  | ; X64-AVX1-NEXT:    vpslld $16, %xmm0, %xmm0 | 
|  | ; X64-AVX1-NEXT:    vinsertf128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-AVX1-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X64-AVX1-NEXT:    vminps %ymm1, %ymm0, %ymm0 | 
|  | ; X64-AVX1-NEXT:    vmovups %ymm0, (%rax) | 
|  | ; X64-AVX1-NEXT:    vzeroupper | 
|  | ; X64-AVX1-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX2-LABEL: PR51226: | 
|  | ; X64-AVX2:       # %bb.0: | 
|  | ; X64-AVX2-NEXT:    vpmovzxwd {{.*#+}} xmm0 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero | 
|  | ; X64-AVX2-NEXT:    vpslld $16, %xmm0, %xmm0 | 
|  | ; X64-AVX2-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-AVX2-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X64-AVX2-NEXT:    vminps %ymm1, %ymm0, %ymm0 | 
|  | ; X64-AVX2-NEXT:    vmovups %ymm0, (%rax) | 
|  | ; X64-AVX2-NEXT:    vzeroupper | 
|  | ; X64-AVX2-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX512-LABEL: PR51226: | 
|  | ; X64-AVX512:       # %bb.0: | 
|  | ; X64-AVX512-NEXT:    vpmovzxwd {{.*#+}} xmm0 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero | 
|  | ; X64-AVX512-NEXT:    vpslld $16, %xmm0, %xmm0 | 
|  | ; X64-AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0 | 
|  | ; X64-AVX512-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; X64-AVX512-NEXT:    vminps %ymm1, %ymm0, %ymm0 | 
|  | ; X64-AVX512-NEXT:    vmovups %ymm0, (%rax) | 
|  | ; X64-AVX512-NEXT:    vzeroupper | 
|  | ; X64-AVX512-NEXT:    retq | 
|  | %i = load <4 x i16>, ptr undef, align 8 | 
|  | %i1 = zext <4 x i16> %i to <4 x i32> | 
|  | %i2 = shl nuw <4 x i32> %i1, <i32 16, i32 16, i32 16, i32 16> | 
|  | %i3 = bitcast <4 x i32> %i2 to <4 x float> | 
|  | %shuffle99 = shufflevector <4 x float> %i3, <4 x float> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 0, i32 1, i32 2, i32 3> | 
|  | %i4 = fcmp reassoc nsz contract ogt <8 x float> zeroinitializer, %shuffle99 | 
|  | %i5 = select <8 x i1> %i4, <8 x float> %shuffle99, <8 x float> zeroinitializer | 
|  | store <8 x float> %i5, ptr undef, align 16 | 
|  | ret void | 
|  | } |