| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py | 
 | ; RUN: llc -verify-machineinstrs -fixup-byte-word-insts=1 -mtriple=x86_64-- < %s | FileCheck --check-prefix=X64 %s | 
 | ; RUN: llc -verify-machineinstrs -fixup-byte-word-insts=0 -mtriple=x86_64-- < %s | FileCheck --check-prefix=X64 %s | 
 | ; RUN: llc -verify-machineinstrs -fixup-byte-word-insts=1 -mtriple=i386-- < %s | FileCheck --check-prefixes=X86,X86-BWON %s | 
 | ; RUN: llc -verify-machineinstrs -fixup-byte-word-insts=0 -mtriple=i386-- < %s | FileCheck --check-prefixes=X86,X86-BWOFF %s | 
 |  | 
 | target datalayout = "e-m:o-p:32:32-f64:32:64-f80:128-n8:16:32-S128" | 
 |  | 
 | define i8 @test_movb(i8 %a0) nounwind { | 
 | ; X64-LABEL: test_movb: | 
 | ; X64:       # %bb.0: | 
 | ; X64-NEXT:    movl %edi, %eax | 
 | ; X64-NEXT:    # kill: def $al killed $al killed $eax | 
 | ; X64-NEXT:    retq | 
 | ; | 
 | ; X86-BWON-LABEL: test_movb: | 
 | ; X86-BWON:       # %bb.0: | 
 | ; X86-BWON-NEXT:    movzbl {{[0-9]+}}(%esp), %eax | 
 | ; X86-BWON-NEXT:    retl | 
 | ; | 
 | ; X86-BWOFF-LABEL: test_movb: | 
 | ; X86-BWOFF:       # %bb.0: | 
 | ; X86-BWOFF-NEXT:    movb {{[0-9]+}}(%esp), %al | 
 | ; X86-BWOFF-NEXT:    retl | 
 |   ret i8 %a0 | 
 | } | 
 |  | 
 | define i8 @test_movb_Os(i8 %a0) nounwind optsize { | 
 | ; X64-LABEL: test_movb_Os: | 
 | ; X64:       # %bb.0: | 
 | ; X64-NEXT:    movl %edi, %eax | 
 | ; X64-NEXT:    # kill: def $al killed $al killed $eax | 
 | ; X64-NEXT:    retq | 
 | ; | 
 | ; X86-LABEL: test_movb_Os: | 
 | ; X86:       # %bb.0: | 
 | ; X86-NEXT:    movb {{[0-9]+}}(%esp), %al | 
 | ; X86-NEXT:    retl | 
 |   ret i8 %a0 | 
 | } | 
 |  | 
 | define i8 @test_movb_Oz(i8 %a0) nounwind minsize { | 
 | ; X64-LABEL: test_movb_Oz: | 
 | ; X64:       # %bb.0: | 
 | ; X64-NEXT:    movl %edi, %eax | 
 | ; X64-NEXT:    # kill: def $al killed $al killed $eax | 
 | ; X64-NEXT:    retq | 
 | ; | 
 | ; X86-LABEL: test_movb_Oz: | 
 | ; X86:       # %bb.0: | 
 | ; X86-NEXT:    movb {{[0-9]+}}(%esp), %al | 
 | ; X86-NEXT:    retl | 
 |   ret i8 %a0 | 
 | } | 
 |  | 
 | define i16 @test_movw(i16 %a0) { | 
 | ; X64-LABEL: test_movw: | 
 | ; X64:       # %bb.0: | 
 | ; X64-NEXT:    movl %edi, %eax | 
 | ; X64-NEXT:    # kill: def $ax killed $ax killed $eax | 
 | ; X64-NEXT:    retq | 
 | ; | 
 | ; X86-BWON-LABEL: test_movw: | 
 | ; X86-BWON:       # %bb.0: | 
 | ; X86-BWON-NEXT:    movzwl {{[0-9]+}}(%esp), %eax | 
 | ; X86-BWON-NEXT:    retl | 
 | ; | 
 | ; X86-BWOFF-LABEL: test_movw: | 
 | ; X86-BWOFF:       # %bb.0: | 
 | ; X86-BWOFF-NEXT:    movw {{[0-9]+}}(%esp), %ax | 
 | ; X86-BWOFF-NEXT:    retl | 
 |   ret i16 %a0 | 
 | } | 
 |  | 
 | ; Verify we don't mess with H-reg copies (only generated in 32-bit mode). | 
 | define i8 @test_movb_hreg(i16 %a0) { | 
 | ; X64-LABEL: test_movb_hreg: | 
 | ; X64:       # %bb.0: | 
 | ; X64-NEXT:    # kill: def $edi killed $edi def $rdi | 
 | ; X64-NEXT:    movl %edi, %eax | 
 | ; X64-NEXT:    shrl $8, %eax | 
 | ; X64-NEXT:    addl %edi, %eax | 
 | ; X64-NEXT:    # kill: def $al killed $al killed $eax | 
 | ; X64-NEXT:    retq | 
 | ; | 
 | ; X86-LABEL: test_movb_hreg: | 
 | ; X86:       # %bb.0: | 
 | ; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax | 
 | ; X86-NEXT:    addb %al, %ah | 
 | ; X86-NEXT:    movb %ah, %al | 
 | ; X86-NEXT:    retl | 
 |   %tmp0 = trunc i16 %a0 to i8 | 
 |   %tmp1 = lshr i16 %a0, 8 | 
 |   %tmp2 = trunc i16 %tmp1 to i8 | 
 |   %tmp3 = add i8 %tmp0, %tmp2 | 
 |   ret i8 %tmp3 | 
 | } |