| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc < %s -mtriple=i686-linux | FileCheck %s --check-prefixes=X86 |
| ; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s --check-prefixes=X64 |
| |
| declare void @use(i8) |
| |
| define i8 @add_and_xor(i8 %x, i8 %y) { |
| ; X86-LABEL: add_and_xor: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: orb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: orl %esi, %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %xor = xor i8 %x, -1 |
| %and = and i8 %xor, %y |
| %add = add i8 %and, %x |
| ret i8 %add |
| } |
| |
| define i8 @add_and_xor_wrong_const(i8 %x, i8 %y) { |
| ; X86-LABEL: add_and_xor_wrong_const: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: xorb $-2, %al |
| ; X86-NEXT: andb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: addb %cl, %al |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_wrong_const: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: xorb $-2, %al |
| ; X64-NEXT: andb %sil, %al |
| ; X64-NEXT: addb %dil, %al |
| ; X64-NEXT: retq |
| %xor = xor i8 %x, -2 |
| %and = and i8 %xor, %y |
| %add = add i8 %and, %x |
| ret i8 %add |
| } |
| |
| define i8 @add_and_xor_wrong_op(i8 %x, i8 %y, i8 %z) { |
| ; X86-LABEL: add_and_xor_wrong_op: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: notb %al |
| ; X86-NEXT: andb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: addb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_wrong_op: |
| ; X64: # %bb.0: |
| ; X64-NEXT: # kill: def $edx killed $edx def $rdx |
| ; X64-NEXT: # kill: def $edi killed $edi def $rdi |
| ; X64-NEXT: notb %dl |
| ; X64-NEXT: andb %sil, %dl |
| ; X64-NEXT: leal (%rdx,%rdi), %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %xor = xor i8 %z, -1 |
| %and = and i8 %xor, %y |
| %add = add i8 %and, %x |
| ret i8 %add |
| } |
| |
| define i8 @add_and_xor_commuted1(i8 %x, i8 %y) { |
| ; X86-LABEL: add_and_xor_commuted1: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: orb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_commuted1: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: orl %esi, %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %xor = xor i8 %x, -1 |
| %and = and i8 %y, %xor |
| %add = add i8 %and, %x |
| ret i8 %add |
| } |
| |
| define i8 @add_and_xor_commuted2(i8 %x, i8 %y) { |
| ; X86-LABEL: add_and_xor_commuted2: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: orb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_commuted2: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: orl %esi, %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %xor = xor i8 %x, -1 |
| %and = and i8 %xor, %y |
| %add = add i8 %x, %and |
| ret i8 %add |
| } |
| |
| define i8 @add_and_xor_commuted3(i8 %x, i8 %y) { |
| ; X86-LABEL: add_and_xor_commuted3: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: orb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_commuted3: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: orl %esi, %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %xor = xor i8 %x, -1 |
| %and = and i8 %y, %xor |
| %add = add i8 %x, %and |
| ret i8 %add |
| } |
| |
| define i8 @add_and_xor_extra_use(i8 %x, i8 %y) nounwind { |
| ; X86-LABEL: add_and_xor_extra_use: |
| ; X86: # %bb.0: |
| ; X86-NEXT: pushl %ebx |
| ; X86-NEXT: subl $8, %esp |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ebx |
| ; X86-NEXT: movb {{[0-9]+}}(%esp), %bh |
| ; X86-NEXT: notb %bh |
| ; X86-NEXT: movzbl %bh, %eax |
| ; X86-NEXT: movl %eax, (%esp) |
| ; X86-NEXT: calll use@PLT |
| ; X86-NEXT: andb %bl, %bh |
| ; X86-NEXT: movzbl %bh, %eax |
| ; X86-NEXT: movl %eax, (%esp) |
| ; X86-NEXT: calll use@PLT |
| ; X86-NEXT: orb {{[0-9]+}}(%esp), %bl |
| ; X86-NEXT: movl %ebx, %eax |
| ; X86-NEXT: addl $8, %esp |
| ; X86-NEXT: popl %ebx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_extra_use: |
| ; X64: # %bb.0: |
| ; X64-NEXT: pushq %rbp |
| ; X64-NEXT: pushq %r14 |
| ; X64-NEXT: pushq %rbx |
| ; X64-NEXT: movl %esi, %ebx |
| ; X64-NEXT: movl %edi, %ebp |
| ; X64-NEXT: movl %ebp, %eax |
| ; X64-NEXT: notb %al |
| ; X64-NEXT: movzbl %al, %r14d |
| ; X64-NEXT: movl %r14d, %edi |
| ; X64-NEXT: callq use@PLT |
| ; X64-NEXT: andb %bl, %r14b |
| ; X64-NEXT: movzbl %r14b, %edi |
| ; X64-NEXT: callq use@PLT |
| ; X64-NEXT: orb %bpl, %bl |
| ; X64-NEXT: movl %ebx, %eax |
| ; X64-NEXT: popq %rbx |
| ; X64-NEXT: popq %r14 |
| ; X64-NEXT: popq %rbp |
| ; X64-NEXT: retq |
| %xor = xor i8 %x, -1 |
| call void @use(i8 %xor) |
| %and = and i8 %xor, %y |
| call void @use(i8 %and) |
| %add = add i8 %and, %x |
| ret i8 %add |
| } |
| |
| define i64 @add_and_xor_const(i64 %x) { |
| ; X86-LABEL: add_and_xor_const: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: orl $1, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_const: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: orq $1, %rax |
| ; X64-NEXT: retq |
| %xor = xor i64 %x, -1 |
| %and = and i64 %xor, 1 |
| %add = add i64 %and, %x |
| ret i64 %add |
| } |
| |
| define i64 @add_and_xor_const_wrong_op(i64 %x, i64 %y) { |
| ; X86-LABEL: add_and_xor_const_wrong_op: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: notl %eax |
| ; X86-NEXT: andl $1, %eax |
| ; X86-NEXT: addl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: adcl $0, %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_const_wrong_op: |
| ; X64: # %bb.0: |
| ; X64-NEXT: notl %esi |
| ; X64-NEXT: andl $1, %esi |
| ; X64-NEXT: leaq (%rsi,%rdi), %rax |
| ; X64-NEXT: retq |
| %xor = xor i64 %y, -1 |
| %and = and i64 %xor, 1 |
| %add = add i64 %and, %x |
| ret i64 %add |
| } |
| |
| define i64 @add_and_xor_const_explicit_trunc(i64 %x) { |
| ; X86-LABEL: add_and_xor_const_explicit_trunc: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: orl $1, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_const_explicit_trunc: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: orq $1, %rax |
| ; X64-NEXT: retq |
| %trunc = trunc i64 %x to i32 |
| %xor = xor i32 %trunc, -1 |
| %ext = sext i32 %xor to i64 |
| %and = and i64 %ext, 1 |
| %add = add i64 %and, %x |
| ret i64 %add |
| } |
| |
| define i64 @add_and_xor_const_explicit_trunc_wrong_mask(i64 %x) { |
| ; X86-LABEL: add_and_xor_const_explicit_trunc_wrong_mask: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: notl %eax |
| ; X86-NEXT: movl %eax, %edx |
| ; X86-NEXT: shrl $31, %edx |
| ; X86-NEXT: andl $1, %eax |
| ; X86-NEXT: addl %ecx, %eax |
| ; X86-NEXT: adcl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_const_explicit_trunc_wrong_mask: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: notl %eax |
| ; X64-NEXT: movslq %eax, %rcx |
| ; X64-NEXT: movabsq $4294967297, %rax # imm = 0x100000001 |
| ; X64-NEXT: andq %rcx, %rax |
| ; X64-NEXT: addq %rdi, %rax |
| ; X64-NEXT: retq |
| %trunc = trunc i64 %x to i32 |
| %xor = xor i32 %trunc, -1 |
| %ext = sext i32 %xor to i64 |
| %and = and i64 %ext, 4294967297 |
| %add = add i64 %and, %x |
| ret i64 %add |
| } |
| |
| define ptr @gep_and_xor(ptr %a, i64 %m) { |
| ; X86-LABEL: gep_and_xor: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: orl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: gep_and_xor: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: orq %rsi, %rax |
| ; X64-NEXT: retq |
| %old = ptrtoint ptr %a to i64 |
| %old.not = and i64 %old, %m |
| %offset = xor i64 %old.not, %m |
| %p = getelementptr i8, ptr %a, i64 %offset |
| ret ptr %p |
| } |
| |
| define ptr @gep_and_xor_const(ptr %a) { |
| ; X86-LABEL: gep_and_xor_const: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: orl $1, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: gep_and_xor_const: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: orq $1, %rax |
| ; X64-NEXT: retq |
| %old = ptrtoint ptr %a to i64 |
| %old.not = and i64 %old, 1 |
| %offset = xor i64 %old.not, 1 |
| %p = getelementptr i8, ptr %a, i64 %offset |
| ret ptr %p |
| } |
| |
| define i64 @add_and_xor_const_zext_trunc(i64 %x) { |
| ; X86-LABEL: add_and_xor_const_zext_trunc: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: orl $1, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_const_zext_trunc: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: orq $1, %rax |
| ; X64-NEXT: retq |
| %t = trunc i64 %x to i32 |
| %xor = xor i32 %t, -1 |
| %and = and i32 %xor, 1 |
| %ext = zext i32 %and to i64 |
| %add = add i64 %ext, %x |
| ret i64 %add |
| } |
| |
| define i64 @add_and_xor_const_zext_trunc_var(i64 %x, i64 %y) { |
| ; X86-LABEL: add_and_xor_const_zext_trunc_var: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: orl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: add_and_xor_const_zext_trunc_var: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %esi, %eax |
| ; X64-NEXT: orq %rdi, %rax |
| ; X64-NEXT: retq |
| %tx = trunc i64 %x to i32 |
| %ty = trunc i64 %y to i32 |
| %xor = xor i32 %tx, -1 |
| %and = and i32 %ty, %xor |
| %ext = zext i32 %and to i64 |
| %add = add i64 %ext, %x |
| ret i64 %add |
| } |