diff --git a/llvm/lib/Target/AMDGPU/SILowerI1Copies.cpp b/llvm/lib/Target/AMDGPU/SILowerI1Copies.cpp index d4f0906f020ab..a11f02e7db350 100644 --- a/llvm/lib/Target/AMDGPU/SILowerI1Copies.cpp +++ b/llvm/lib/Target/AMDGPU/SILowerI1Copies.cpp @@ -708,6 +708,9 @@ bool SILowerI1Copies::lowerCopiesToI1() { .addImm(0); MI.getOperand(1).setReg(TmpReg); SrcReg = TmpReg; + } else { + // SrcReg needs to be live beyond copy. + MI.getOperand(1).setIsKill(false); } // Defs in a loop that are observed outside the loop must be transformed diff --git a/llvm/test/CodeGen/AMDGPU/lower-i1-copies-clear-kills.mir b/llvm/test/CodeGen/AMDGPU/lower-i1-copies-clear-kills.mir new file mode 100644 index 0000000000000..c5e2ba5d8c7cb --- /dev/null +++ b/llvm/test/CodeGen/AMDGPU/lower-i1-copies-clear-kills.mir @@ -0,0 +1,157 @@ +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 3 +# RUN: llc -run-pass=si-i1-copies -mtriple=amdgcn--amdpal -mcpu=gfx1030 -verify-machineinstrs -o - %s | FileCheck %s + +# Make sure that kill flag is clear on %23 to %0 copy when %23 is reused. + +--- +name: _amdgpu_cs_main +tracksRegLiveness: true +body: | + ; CHECK-LABEL: name: _amdgpu_cs_main + ; CHECK: bb.0: + ; CHECK-NEXT: successors: %bb.1(0x80000000) + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 0 + ; CHECK-NEXT: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_NE_U32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B32_1:%[0-9]+]]:sgpr_32 = S_MOV_B32 0 + ; CHECK-NEXT: [[V_CMP_GT_F32_e64_:%[0-9]+]]:sreg_32 = nofpexcept V_CMP_GT_F32_e64 0, [[COPY]], 0, killed [[S_MOV_B32_1]], 0, implicit $mode, implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32 = S_AND_B32 killed [[V_CMP_NE_U32_e64_]], killed [[V_CMP_GT_F32_e64_]], implicit-def dead $scc + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_32 = COPY [[S_AND_B32_]] + ; CHECK-NEXT: [[S_MOV_B32_2:%[0-9]+]]:sreg_32 = S_MOV_B32 -1 + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_32 = COPY [[COPY2]] + ; CHECK-NEXT: [[S_XOR_B32_:%[0-9]+]]:sreg_32 = S_XOR_B32 [[COPY3]], killed [[S_MOV_B32_2]], implicit-def dead $scc + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32 = COPY [[S_XOR_B32_]] + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:vgpr_32 = COPY [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: bb.1: + ; CHECK-NEXT: successors: %bb.2(0x40000000), %bb.3(0x40000000) + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[PHI:%[0-9]+]]:sreg_32 = PHI [[S_MOV_B32_]], %bb.0, %14, %bb.5 + ; CHECK-NEXT: [[PHI1:%[0-9]+]]:sreg_32 = PHI [[S_MOV_B32_]], %bb.0, %16, %bb.5 + ; CHECK-NEXT: [[PHI2:%[0-9]+]]:vgpr_32 = PHI [[COPY5]], %bb.0, %18, %bb.5 + ; CHECK-NEXT: [[COPY6:%[0-9]+]]:sreg_32 = COPY [[COPY4]] + ; CHECK-NEXT: [[SI_IF:%[0-9]+]]:sreg_32 = SI_IF [[COPY6]], %bb.3, implicit-def dead $exec, implicit-def dead $scc, implicit $exec + ; CHECK-NEXT: S_BRANCH %bb.2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: bb.2: + ; CHECK-NEXT: successors: %bb.3(0x80000000) + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[V_OR_B32_e64_:%[0-9]+]]:vgpr_32 = V_OR_B32_e64 [[PHI1]], [[PHI2]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B32_3:%[0-9]+]]:sreg_32 = S_MOV_B32 -1 + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32 = S_OR_B32 [[S_AND_B32_]], $exec_lo, implicit-def $scc + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: bb.3: + ; CHECK-NEXT: successors: %bb.4(0x40000000), %bb.5(0x40000000) + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[PHI3:%[0-9]+]]:sreg_32 = PHI [[S_AND_B32_]], %bb.1, [[S_OR_B32_]], %bb.2 + ; CHECK-NEXT: [[PHI4:%[0-9]+]]:vgpr_32 = PHI [[PHI2]], %bb.1, [[V_OR_B32_e64_]], %bb.2 + ; CHECK-NEXT: SI_END_CF [[SI_IF]], implicit-def dead $exec, implicit-def dead $scc, implicit $exec + ; CHECK-NEXT: [[S_MOV_B32_4:%[0-9]+]]:sreg_32 = S_MOV_B32 -1 + ; CHECK-NEXT: [[DEF:%[0-9]+]]:sreg_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[COPY7:%[0-9]+]]:sreg_32 = COPY [[PHI3]] + ; CHECK-NEXT: [[COPY8:%[0-9]+]]:vgpr_32 = COPY [[DEF]] + ; CHECK-NEXT: [[SI_IF1:%[0-9]+]]:sreg_32 = SI_IF [[COPY7]], %bb.5, implicit-def dead $exec, implicit-def dead $scc, implicit $exec + ; CHECK-NEXT: S_BRANCH %bb.4 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: bb.4: + ; CHECK-NEXT: successors: %bb.5(0x80000000) + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[S_MOV_B32_5:%[0-9]+]]:sreg_32 = S_MOV_B32 1 + ; CHECK-NEXT: [[S_OR_B32_1:%[0-9]+]]:sreg_32 = S_OR_B32 [[PHI1]], killed [[S_MOV_B32_5]], implicit-def dead $scc + ; CHECK-NEXT: [[S_MOV_B32_6:%[0-9]+]]:sreg_32 = S_MOV_B32 0 + ; CHECK-NEXT: [[S_XOR_B32_1:%[0-9]+]]:sreg_32 = S_XOR_B32 $exec_lo, -1, implicit-def $scc + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: bb.5: + ; CHECK-NEXT: successors: %bb.6(0x04000000), %bb.1(0x7c000000) + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[PHI5:%[0-9]+]]:sreg_32 = PHI [[S_MOV_B32_4]], %bb.3, [[S_XOR_B32_1]], %bb.4 + ; CHECK-NEXT: [[PHI6:%[0-9]+]]:vgpr_32 = PHI [[COPY8]], %bb.3, [[PHI4]], %bb.4 + ; CHECK-NEXT: [[PHI7:%[0-9]+]]:sreg_32 = PHI [[DEF]], %bb.3, [[S_OR_B32_1]], %bb.4 + ; CHECK-NEXT: SI_END_CF [[SI_IF1]], implicit-def dead $exec, implicit-def dead $scc, implicit $exec + ; CHECK-NEXT: [[COPY9:%[0-9]+]]:sreg_32 = COPY [[PHI5]] + ; CHECK-NEXT: [[SI_IF_BREAK:%[0-9]+]]:sreg_32 = SI_IF_BREAK [[COPY9]], [[PHI]], implicit-def dead $scc + ; CHECK-NEXT: SI_LOOP [[SI_IF_BREAK]], %bb.1, implicit-def dead $exec, implicit-def dead $scc, implicit $exec + ; CHECK-NEXT: S_BRANCH %bb.6 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: bb.6: + ; CHECK-NEXT: [[PHI8:%[0-9]+]]:sreg_32 = PHI [[SI_IF_BREAK]], %bb.5 + ; CHECK-NEXT: SI_END_CF [[PHI8]], implicit-def dead $exec, implicit-def dead $scc, implicit $exec + ; CHECK-NEXT: S_ENDPGM 0 + bb.0: + successors: %bb.1(0x80000000) + liveins: $vgpr0, $vgpr1 + + %0:vgpr_32 = COPY $vgpr1 + %1:vgpr_32 = COPY $vgpr0 + %2:sreg_32 = S_MOV_B32 0 + %3:sreg_32 = V_CMP_NE_U32_e64 %1, %2, implicit $exec + %4:sgpr_32 = S_MOV_B32 0 + %5:sreg_32 = nofpexcept V_CMP_GT_F32_e64 0, %0, 0, killed %4, 0, implicit $mode, implicit $exec + %6:sreg_32 = S_AND_B32 killed %3, killed %5, implicit-def dead $scc + %7:vreg_1 = COPY killed %6 + %8:sreg_32 = S_MOV_B32 -1 + %9:sreg_32 = COPY %7 + %10:sreg_32 = S_XOR_B32 %9, killed %8, implicit-def dead $scc + %11:vreg_1 = COPY %10 + %12:vgpr_32 = COPY %2, implicit $exec + + bb.1: + successors: %bb.2(0x40000000), %bb.3(0x40000000) + + %13:sreg_32 = PHI %2, %bb.0, %14, %bb.5 + %15:sreg_32 = PHI %2, %bb.0, %16, %bb.5 + %17:vgpr_32 = PHI %12, %bb.0, %18, %bb.5 + %19:sreg_32 = COPY %11 + %20:sreg_32 = SI_IF %19, %bb.3, implicit-def dead $exec, implicit-def dead $scc, implicit $exec + S_BRANCH %bb.2 + + bb.2: + successors: %bb.3(0x80000000) + + %21:vgpr_32 = V_OR_B32_e64 %15, %17, implicit $exec + %22:sreg_32 = S_MOV_B32 -1 + %23:vreg_1 = COPY %22, implicit $exec + + bb.3: + successors: %bb.4(0x40000000), %bb.5(0x40000000) + + %24:vgpr_32 = PHI %17, %bb.1, %21, %bb.2 + %25:vreg_1 = PHI %7, %bb.1, %23, %bb.2 + SI_END_CF %20, implicit-def dead $exec, implicit-def dead $scc, implicit $exec + %26:sreg_32 = S_MOV_B32 -1 + %27:sreg_32 = IMPLICIT_DEF + %28:sreg_32 = COPY %25 + %29:vgpr_32 = COPY %27 + %30:vreg_1 = COPY %26, implicit $exec + %31:sreg_32 = SI_IF %28, %bb.5, implicit-def dead $exec, implicit-def dead $scc, implicit $exec + S_BRANCH %bb.4 + + bb.4: + successors: %bb.5(0x80000000) + + %32:sreg_32 = S_MOV_B32 1 + %33:sreg_32 = S_OR_B32 %15, killed %32, implicit-def dead $scc + %34:sreg_32 = S_MOV_B32 0 + %35:vreg_1 = COPY %34, implicit $exec + + bb.5: + successors: %bb.6(0x04000000), %bb.1(0x7c000000) + + %18:vgpr_32 = PHI %29, %bb.3, %24, %bb.4 + %16:sreg_32 = PHI %27, %bb.3, %33, %bb.4 + %36:vreg_1 = PHI %30, %bb.3, %35, %bb.4 + SI_END_CF %31, implicit-def dead $exec, implicit-def dead $scc, implicit $exec + %37:sreg_32 = COPY %36 + %14:sreg_32 = SI_IF_BREAK %37, %13, implicit-def dead $scc + SI_LOOP %14, %bb.1, implicit-def dead $exec, implicit-def dead $scc, implicit $exec + S_BRANCH %bb.6 + + bb.6: + %38:sreg_32 = PHI %14, %bb.5 + SI_END_CF %38, implicit-def dead $exec, implicit-def dead $scc, implicit $exec + S_ENDPGM 0 + +...