|  | ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py | 
|  | ; RUN: llc -verify-machineinstrs -mcpu=alderlake -mattr=+false-deps-perm -mtriple=x86_64-unknown-unknown < %s | FileCheck %s --check-prefixes=ENABLE,ENABLE-ADL | 
|  | ; RUN: llc -verify-machineinstrs -mcpu=sapphirerapids -mattr=+false-deps-perm -mtriple=x86_64-unknown-unknown < %s | FileCheck %s --check-prefixes=ENABLE,ENABLE-SPR | 
|  | ; RUN: llc -verify-machineinstrs -mcpu=alderlake -mattr=-false-deps-perm -mtriple=x86_64-unknown-unknown < %s | FileCheck %s --check-prefixes=DISABLE,DISABLE-ADL | 
|  | ; RUN: llc -verify-machineinstrs -mcpu=sapphirerapids -mattr=-false-deps-perm -mtriple=x86_64-unknown-unknown < %s | FileCheck %s --check-prefixes=DISABLE,DISABLE-SPR | 
|  |  | 
|  | define <8 x i32> @permd(<8 x i32> %a0, <8 x i32> %a1) { | 
|  | ; ENABLE-ADL-LABEL: permd: | 
|  | ; ENABLE-ADL:       # %bb.0: | 
|  | ; ENABLE-ADL-NEXT:    vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill | 
|  | ; ENABLE-ADL-NEXT:    vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill | 
|  | ; ENABLE-ADL-NEXT:    #APP | 
|  | ; ENABLE-ADL-NEXT:    nop | 
|  | ; ENABLE-ADL-NEXT:    #NO_APP | 
|  | ; ENABLE-ADL-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload | 
|  | ; ENABLE-ADL-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload | 
|  | ; ENABLE-ADL-NEXT:    vxorps %xmm0, %xmm0, %xmm0 | 
|  | ; ENABLE-ADL-NEXT:    vpermd %ymm2, %ymm1, %ymm0 | 
|  | ; ENABLE-ADL-NEXT:    vpaddd %ymm1, %ymm2, %ymm1 | 
|  | ; ENABLE-ADL-NEXT:    vpaddd %ymm1, %ymm0, %ymm0 | 
|  | ; ENABLE-ADL-NEXT:    retq | 
|  | ; | 
|  | ; ENABLE-SPR-LABEL: permd: | 
|  | ; ENABLE-SPR:       # %bb.0: | 
|  | ; ENABLE-SPR-NEXT:    vmovdqa64 %ymm1, %ymm16 | 
|  | ; ENABLE-SPR-NEXT:    vmovdqa64 %ymm0, %ymm17 | 
|  | ; ENABLE-SPR-NEXT:    #APP | 
|  | ; ENABLE-SPR-NEXT:    nop | 
|  | ; ENABLE-SPR-NEXT:    #NO_APP | 
|  | ; ENABLE-SPR-NEXT:    vxorps %xmm0, %xmm0, %xmm0 | 
|  | ; ENABLE-SPR-NEXT:    vpermd %ymm17, %ymm16, %ymm0 | 
|  | ; ENABLE-SPR-NEXT:    vpaddd %ymm16, %ymm17, %ymm1 | 
|  | ; ENABLE-SPR-NEXT:    vpaddd %ymm1, %ymm0, %ymm0 | 
|  | ; ENABLE-SPR-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-ADL-LABEL: permd: | 
|  | ; DISABLE-ADL:       # %bb.0: | 
|  | ; DISABLE-ADL-NEXT:    vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill | 
|  | ; DISABLE-ADL-NEXT:    vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill | 
|  | ; DISABLE-ADL-NEXT:    #APP | 
|  | ; DISABLE-ADL-NEXT:    nop | 
|  | ; DISABLE-ADL-NEXT:    #NO_APP | 
|  | ; DISABLE-ADL-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload | 
|  | ; DISABLE-ADL-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload | 
|  | ; DISABLE-ADL-NEXT:    vpermd %ymm2, %ymm1, %ymm0 | 
|  | ; DISABLE-ADL-NEXT:    vpaddd %ymm1, %ymm2, %ymm1 | 
|  | ; DISABLE-ADL-NEXT:    vpaddd %ymm1, %ymm0, %ymm0 | 
|  | ; DISABLE-ADL-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-SPR-LABEL: permd: | 
|  | ; DISABLE-SPR:       # %bb.0: | 
|  | ; DISABLE-SPR-NEXT:    vmovdqa64 %ymm1, %ymm16 | 
|  | ; DISABLE-SPR-NEXT:    vmovdqa64 %ymm0, %ymm17 | 
|  | ; DISABLE-SPR-NEXT:    #APP | 
|  | ; DISABLE-SPR-NEXT:    nop | 
|  | ; DISABLE-SPR-NEXT:    #NO_APP | 
|  | ; DISABLE-SPR-NEXT:    vpermd %ymm17, %ymm16, %ymm0 | 
|  | ; DISABLE-SPR-NEXT:    vpaddd %ymm16, %ymm17, %ymm1 | 
|  | ; DISABLE-SPR-NEXT:    vpaddd %ymm1, %ymm0, %ymm0 | 
|  | ; DISABLE-SPR-NEXT:    retq | 
|  | %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"() | 
|  | %2 = call <8 x i32> @llvm.x86.avx2.permd(<8 x i32> %a0, <8 x i32> %a1) | 
|  | %3 = add <8 x i32> %a0, %a1 | 
|  | %res = add <8 x i32> %2, %3 | 
|  | ret <8 x i32> %res | 
|  | } | 
|  |  | 
|  | define <8 x i32> @permd_mem(ptr %p0, <8 x i32> %a1) { | 
|  | ; ENABLE-ADL-LABEL: permd_mem: | 
|  | ; ENABLE-ADL:       # %bb.0: | 
|  | ; ENABLE-ADL-NEXT:    vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill | 
|  | ; ENABLE-ADL-NEXT:    #APP | 
|  | ; ENABLE-ADL-NEXT:    nop | 
|  | ; ENABLE-ADL-NEXT:    #NO_APP | 
|  | ; ENABLE-ADL-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload | 
|  | ; ENABLE-ADL-NEXT:    vxorps %xmm0, %xmm0, %xmm0 | 
|  | ; ENABLE-ADL-NEXT:    vpermd (%rdi), %ymm1, %ymm0 | 
|  | ; ENABLE-ADL-NEXT:    vpaddd %ymm1, %ymm0, %ymm0 | 
|  | ; ENABLE-ADL-NEXT:    retq | 
|  | ; | 
|  | ; ENABLE-SPR-LABEL: permd_mem: | 
|  | ; ENABLE-SPR:       # %bb.0: | 
|  | ; ENABLE-SPR-NEXT:    vmovdqa64 %ymm0, %ymm16 | 
|  | ; ENABLE-SPR-NEXT:    #APP | 
|  | ; ENABLE-SPR-NEXT:    nop | 
|  | ; ENABLE-SPR-NEXT:    #NO_APP | 
|  | ; ENABLE-SPR-NEXT:    vxorps %xmm0, %xmm0, %xmm0 | 
|  | ; ENABLE-SPR-NEXT:    vpermd (%rdi), %ymm16, %ymm0 | 
|  | ; ENABLE-SPR-NEXT:    vpaddd %ymm16, %ymm0, %ymm0 | 
|  | ; ENABLE-SPR-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-ADL-LABEL: permd_mem: | 
|  | ; DISABLE-ADL:       # %bb.0: | 
|  | ; DISABLE-ADL-NEXT:    vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill | 
|  | ; DISABLE-ADL-NEXT:    #APP | 
|  | ; DISABLE-ADL-NEXT:    nop | 
|  | ; DISABLE-ADL-NEXT:    #NO_APP | 
|  | ; DISABLE-ADL-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload | 
|  | ; DISABLE-ADL-NEXT:    vpermd (%rdi), %ymm1, %ymm0 | 
|  | ; DISABLE-ADL-NEXT:    vpaddd %ymm1, %ymm0, %ymm0 | 
|  | ; DISABLE-ADL-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-SPR-LABEL: permd_mem: | 
|  | ; DISABLE-SPR:       # %bb.0: | 
|  | ; DISABLE-SPR-NEXT:    vmovdqa64 %ymm0, %ymm16 | 
|  | ; DISABLE-SPR-NEXT:    #APP | 
|  | ; DISABLE-SPR-NEXT:    nop | 
|  | ; DISABLE-SPR-NEXT:    #NO_APP | 
|  | ; DISABLE-SPR-NEXT:    vpermd (%rdi), %ymm16, %ymm0 | 
|  | ; DISABLE-SPR-NEXT:    vpaddd %ymm16, %ymm0, %ymm0 | 
|  | ; DISABLE-SPR-NEXT:    retq | 
|  | %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"() | 
|  | %a0 = load <8 x i32>, ptr %p0, align 64 | 
|  | %2 = call <8 x i32> @llvm.x86.avx2.permd(<8 x i32> %a0, <8 x i32> %a1) | 
|  | %res = add <8 x i32> %2, %a1 | 
|  | ret <8 x i32> %res | 
|  | } | 
|  |  | 
|  | declare <8 x i32> @llvm.x86.avx2.permd(<8 x i32>, <8 x i32>) nounwind readonly | 
|  |  | 
|  | define <4 x i64> @permq(<4 x i64> %a0) { | 
|  | ; ENABLE-LABEL: permq: | 
|  | ; ENABLE:       # %bb.0: | 
|  | ; ENABLE-NEXT:    #APP | 
|  | ; ENABLE-NEXT:    nop | 
|  | ; ENABLE-NEXT:    #NO_APP | 
|  | ; ENABLE-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; ENABLE-NEXT:    vpermq {{.*#+}} ymm1 = ymm0[1,2,1,0] | 
|  | ; ENABLE-NEXT:    vpaddq %ymm0, %ymm1, %ymm0 | 
|  | ; ENABLE-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-LABEL: permq: | 
|  | ; DISABLE:       # %bb.0: | 
|  | ; DISABLE-NEXT:    #APP | 
|  | ; DISABLE-NEXT:    nop | 
|  | ; DISABLE-NEXT:    #NO_APP | 
|  | ; DISABLE-NEXT:    vpermq {{.*#+}} ymm1 = ymm0[1,2,1,0] | 
|  | ; DISABLE-NEXT:    vpaddq %ymm0, %ymm1, %ymm0 | 
|  | ; DISABLE-NEXT:    retq | 
|  | %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"() | 
|  | %2 = shufflevector <4 x i64> %a0, <4 x i64> undef, <4 x i32> <i32 1, i32 2, i32 1, i32 0> | 
|  | %res = add <4 x i64> %2, %a0 | 
|  | ret <4 x i64> %res | 
|  | } | 
|  |  | 
|  | define <4 x i64> @permq_mem(ptr %p0) { | 
|  | ; ENABLE-LABEL: permq_mem: | 
|  | ; ENABLE:       # %bb.0: | 
|  | ; ENABLE-NEXT:    #APP | 
|  | ; ENABLE-NEXT:    nop | 
|  | ; ENABLE-NEXT:    #NO_APP | 
|  | ; ENABLE-NEXT:    vxorps %xmm0, %xmm0, %xmm0 | 
|  | ; ENABLE-NEXT:    vpermpd {{.*#+}} ymm0 = mem[1,2,1,0] | 
|  | ; ENABLE-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-LABEL: permq_mem: | 
|  | ; DISABLE:       # %bb.0: | 
|  | ; DISABLE-NEXT:    #APP | 
|  | ; DISABLE-NEXT:    nop | 
|  | ; DISABLE-NEXT:    #NO_APP | 
|  | ; DISABLE-NEXT:    vpermpd {{.*#+}} ymm0 = mem[1,2,1,0] | 
|  | ; DISABLE-NEXT:    retq | 
|  | %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"() | 
|  | %a0 = load <4 x i64>, ptr %p0, align 64 | 
|  | %2 = shufflevector <4 x i64> %a0, <4 x i64> undef, <4 x i32> <i32 1, i32 2, i32 1, i32 0> | 
|  | ret <4 x i64> %2 | 
|  | } | 
|  |  | 
|  | define <8 x float> @permps(<8 x float> %a0, <8 x i32> %a1) { | 
|  | ; ENABLE-ADL-LABEL: permps: | 
|  | ; ENABLE-ADL:       # %bb.0: | 
|  | ; ENABLE-ADL-NEXT:    vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill | 
|  | ; ENABLE-ADL-NEXT:    #APP | 
|  | ; ENABLE-ADL-NEXT:    nop | 
|  | ; ENABLE-ADL-NEXT:    #NO_APP | 
|  | ; ENABLE-ADL-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload | 
|  | ; ENABLE-ADL-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; ENABLE-ADL-NEXT:    vpermps %ymm2, %ymm0, %ymm1 | 
|  | ; ENABLE-ADL-NEXT:    vcvtdq2ps %ymm0, %ymm0 | 
|  | ; ENABLE-ADL-NEXT:    vaddps %ymm2, %ymm0, %ymm0 | 
|  | ; ENABLE-ADL-NEXT:    vaddps %ymm0, %ymm1, %ymm0 | 
|  | ; ENABLE-ADL-NEXT:    retq | 
|  | ; | 
|  | ; ENABLE-SPR-LABEL: permps: | 
|  | ; ENABLE-SPR:       # %bb.0: | 
|  | ; ENABLE-SPR-NEXT:    vmovaps %ymm0, %ymm16 | 
|  | ; ENABLE-SPR-NEXT:    #APP | 
|  | ; ENABLE-SPR-NEXT:    nop | 
|  | ; ENABLE-SPR-NEXT:    #NO_APP | 
|  | ; ENABLE-SPR-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; ENABLE-SPR-NEXT:    vpermps %ymm16, %ymm0, %ymm1 | 
|  | ; ENABLE-SPR-NEXT:    vcvtdq2ps %ymm0, %ymm0 | 
|  | ; ENABLE-SPR-NEXT:    vaddps %ymm16, %ymm0, %ymm0 | 
|  | ; ENABLE-SPR-NEXT:    vaddps %ymm0, %ymm1, %ymm0 | 
|  | ; ENABLE-SPR-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-ADL-LABEL: permps: | 
|  | ; DISABLE-ADL:       # %bb.0: | 
|  | ; DISABLE-ADL-NEXT:    vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill | 
|  | ; DISABLE-ADL-NEXT:    #APP | 
|  | ; DISABLE-ADL-NEXT:    nop | 
|  | ; DISABLE-ADL-NEXT:    #NO_APP | 
|  | ; DISABLE-ADL-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload | 
|  | ; DISABLE-ADL-NEXT:    vpermps %ymm2, %ymm0, %ymm1 | 
|  | ; DISABLE-ADL-NEXT:    vcvtdq2ps %ymm0, %ymm0 | 
|  | ; DISABLE-ADL-NEXT:    vaddps %ymm2, %ymm0, %ymm0 | 
|  | ; DISABLE-ADL-NEXT:    vaddps %ymm0, %ymm1, %ymm0 | 
|  | ; DISABLE-ADL-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-SPR-LABEL: permps: | 
|  | ; DISABLE-SPR:       # %bb.0: | 
|  | ; DISABLE-SPR-NEXT:    vmovaps %ymm0, %ymm16 | 
|  | ; DISABLE-SPR-NEXT:    #APP | 
|  | ; DISABLE-SPR-NEXT:    nop | 
|  | ; DISABLE-SPR-NEXT:    #NO_APP | 
|  | ; DISABLE-SPR-NEXT:    vpermps %ymm16, %ymm0, %ymm1 | 
|  | ; DISABLE-SPR-NEXT:    vcvtdq2ps %ymm0, %ymm0 | 
|  | ; DISABLE-SPR-NEXT:    vaddps %ymm16, %ymm0, %ymm0 | 
|  | ; DISABLE-SPR-NEXT:    vaddps %ymm0, %ymm1, %ymm0 | 
|  | ; DISABLE-SPR-NEXT:    retq | 
|  | %1 = tail call <8 x i32> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"() | 
|  | %2 = call <8 x float> @llvm.x86.avx2.permps(<8 x float> %a0, <8 x i32> %1) | 
|  | %t = sitofp <8 x i32> %1 to <8 x float> | 
|  | %3 = fadd <8 x float> %t, %a0 | 
|  | %res = fadd <8 x float> %2, %3 | 
|  | ret <8 x float> %res | 
|  | } | 
|  |  | 
|  | define <8 x float> @permps_mem(ptr %p0, <8 x i32> %a1) { | 
|  | ; ENABLE-LABEL: permps_mem: | 
|  | ; ENABLE:       # %bb.0: | 
|  | ; ENABLE-NEXT:    #APP | 
|  | ; ENABLE-NEXT:    nop | 
|  | ; ENABLE-NEXT:    #NO_APP | 
|  | ; ENABLE-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; ENABLE-NEXT:    vpermps (%rdi), %ymm0, %ymm1 | 
|  | ; ENABLE-NEXT:    vcvtdq2ps %ymm0, %ymm0 | 
|  | ; ENABLE-NEXT:    vaddps %ymm0, %ymm1, %ymm0 | 
|  | ; ENABLE-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-LABEL: permps_mem: | 
|  | ; DISABLE:       # %bb.0: | 
|  | ; DISABLE-NEXT:    #APP | 
|  | ; DISABLE-NEXT:    nop | 
|  | ; DISABLE-NEXT:    #NO_APP | 
|  | ; DISABLE-NEXT:    vpermps (%rdi), %ymm0, %ymm1 | 
|  | ; DISABLE-NEXT:    vcvtdq2ps %ymm0, %ymm0 | 
|  | ; DISABLE-NEXT:    vaddps %ymm0, %ymm1, %ymm0 | 
|  | ; DISABLE-NEXT:    retq | 
|  | %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"() | 
|  | %a0 = load <8 x float>, ptr %p0, align 64 | 
|  | %2 = call <8 x float> @llvm.x86.avx2.permps(<8 x float> %a0, <8 x i32> %a1) | 
|  | %t = sitofp <8 x i32> %a1 to <8 x float> | 
|  | %res = fadd <8 x float> %2, %t | 
|  | ret <8 x float> %res | 
|  | } | 
|  |  | 
|  | declare <8 x float> @llvm.x86.avx2.permps(<8 x float>, <8 x i32>) nounwind readonly | 
|  |  | 
|  | define <4 x double> @permpd(<4 x double> %a0) { | 
|  | ; ENABLE-LABEL: permpd: | 
|  | ; ENABLE:       # %bb.0: | 
|  | ; ENABLE-NEXT:    #APP | 
|  | ; ENABLE-NEXT:    nop | 
|  | ; ENABLE-NEXT:    #NO_APP | 
|  | ; ENABLE-NEXT:    vxorps %xmm1, %xmm1, %xmm1 | 
|  | ; ENABLE-NEXT:    vpermpd {{.*#+}} ymm1 = ymm0[1,2,1,0] | 
|  | ; ENABLE-NEXT:    vaddpd %ymm0, %ymm1, %ymm0 | 
|  | ; ENABLE-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-LABEL: permpd: | 
|  | ; DISABLE:       # %bb.0: | 
|  | ; DISABLE-NEXT:    #APP | 
|  | ; DISABLE-NEXT:    nop | 
|  | ; DISABLE-NEXT:    #NO_APP | 
|  | ; DISABLE-NEXT:    vpermpd {{.*#+}} ymm1 = ymm0[1,2,1,0] | 
|  | ; DISABLE-NEXT:    vaddpd %ymm0, %ymm1, %ymm0 | 
|  | ; DISABLE-NEXT:    retq | 
|  | %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"() | 
|  | %2 = shufflevector <4 x double> %a0, <4 x double> undef, <4 x i32> <i32 1, i32 2, i32 1, i32 0> | 
|  | %res = fadd <4 x double> %2, %a0 | 
|  | ret <4 x double> %res | 
|  | } | 
|  |  | 
|  | define <4 x double> @permpd_mem(ptr %p0) { | 
|  | ; ENABLE-LABEL: permpd_mem: | 
|  | ; ENABLE:       # %bb.0: | 
|  | ; ENABLE-NEXT:    #APP | 
|  | ; ENABLE-NEXT:    nop | 
|  | ; ENABLE-NEXT:    #NO_APP | 
|  | ; ENABLE-NEXT:    vxorps %xmm0, %xmm0, %xmm0 | 
|  | ; ENABLE-NEXT:    vpermpd {{.*#+}} ymm0 = mem[1,2,1,0] | 
|  | ; ENABLE-NEXT:    retq | 
|  | ; | 
|  | ; DISABLE-LABEL: permpd_mem: | 
|  | ; DISABLE:       # %bb.0: | 
|  | ; DISABLE-NEXT:    #APP | 
|  | ; DISABLE-NEXT:    nop | 
|  | ; DISABLE-NEXT:    #NO_APP | 
|  | ; DISABLE-NEXT:    vpermpd {{.*#+}} ymm0 = mem[1,2,1,0] | 
|  | ; DISABLE-NEXT:    retq | 
|  | %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"() | 
|  | %a0 = load <4 x double>, ptr %p0, align 64 | 
|  | %2 = shufflevector <4 x double> %a0, <4 x double> undef, <4 x i32> <i32 1, i32 2, i32 1, i32 0> | 
|  | ret <4 x double> %2 | 
|  | } |