| # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py | 
 | # RUN: llc -mtriple=amdgcn-amd-amdhsa -verify-machineinstrs  -run-pass=si-i1-copies -o - %s | FileCheck %s | 
 |  | 
 | # The strange block ordering visits the use before the def. | 
 | --- | 
 | name: inserted_cmp_operand_class_rpo | 
 | tracksRegLiveness: true | 
 | machineFunctionInfo: | 
 |   isEntryFunction: true | 
 | body:             | | 
 |   ; CHECK-LABEL: name: inserted_cmp_operand_class_rpo | 
 |   ; CHECK: bb.0: | 
 |   ; CHECK-NEXT:   successors: %bb.3(0x80000000) | 
 |   ; CHECK-NEXT: {{  $}} | 
 |   ; CHECK-NEXT:   S_BRANCH %bb.3 | 
 |   ; CHECK-NEXT: {{  $}} | 
 |   ; CHECK-NEXT: bb.1: | 
 |   ; CHECK-NEXT:   successors: %bb.2(0x80000000) | 
 |   ; CHECK-NEXT: {{  $}} | 
 |   ; CHECK-NEXT:   [[COPY:%[0-9]+]]:sreg_64 = COPY %1 | 
 |   ; CHECK-NEXT: {{  $}} | 
 |   ; CHECK-NEXT: bb.2: | 
 |   ; CHECK-NEXT:   [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec | 
 |   ; CHECK-NEXT:   [[COPY1:%[0-9]+]]:sreg_64_xexec = COPY [[COPY]] | 
 |   ; CHECK-NEXT:   S_ENDPGM 0 | 
 |   ; CHECK-NEXT: {{  $}} | 
 |   ; CHECK-NEXT: bb.3: | 
 |   ; CHECK-NEXT:   successors: %bb.1(0x80000000) | 
 |   ; CHECK-NEXT: {{  $}} | 
 |   ; CHECK-NEXT:   [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec | 
 |   ; CHECK-NEXT:   [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 0 | 
 |   ; CHECK-NEXT:   [[V_CMP_EQ_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_EQ_U32_e64 killed [[V_MOV_B32_e32_1]], killed [[S_MOV_B32_]], implicit $exec | 
 |   ; CHECK-NEXT:   [[COPY2:%[0-9]+]]:sreg_64 = COPY [[V_CMP_EQ_U32_e64_]] | 
 |   ; CHECK-NEXT:   S_BRANCH %bb.1 | 
 |   bb.0: | 
 |     successors: %bb.3 | 
 |  | 
 |     S_BRANCH %bb.3 | 
 |  | 
 |   bb.1: | 
 |     successors: %bb.2 | 
 |  | 
 |     %0:vreg_1 = COPY %1 | 
 |  | 
 |   bb.2: | 
 |     %2:vgpr_32 = V_MOV_B32_e32 0, implicit $exec | 
 |     %3:sreg_64_xexec = COPY %0 | 
 |     S_ENDPGM 0 | 
 |  | 
 |   bb.3: | 
 |     successors: %bb.1 | 
 |  | 
 |     %4:vgpr_32 = V_MOV_B32_e32 0, implicit $exec | 
 |     %5:sreg_32_xm0 = S_MOV_B32 0 | 
 |     %6:sreg_64 = V_CMP_EQ_U32_e64 killed %4, killed %5, implicit $exec | 
 |     %1:vreg_1 = COPY %6 | 
 |     S_BRANCH %bb.1 | 
 | ... |