Skip to content

Commit cfe5a08

Browse files
authored
[RISCV] Enable Zbb ANDN/ORN/XNOR for more 64-bit constants (#122698)
This extends PR #120221 to 64-bit constants that don't match the 12-low-bits-set pattern.
1 parent 99fc649 commit cfe5a08

File tree

2 files changed

+13
-15
lines changed

2 files changed

+13
-15
lines changed

llvm/lib/Target/RISCV/RISCVISelDAGToDAG.cpp

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3216,17 +3216,18 @@ bool RISCVDAGToDAGISel::selectSHXADD_UWOp(SDValue N, unsigned ShAmt,
32163216
bool RISCVDAGToDAGISel::selectInvLogicImm(SDValue N, SDValue &Val) {
32173217
if (!isa<ConstantSDNode>(N))
32183218
return false;
3219-
32203219
int64_t Imm = cast<ConstantSDNode>(N)->getSExtValue();
3221-
if ((Imm & 0xfff) != 0xfff || Imm == -1)
3220+
3221+
// For 32-bit signed constants, we can only substitute LUI+ADDI with LUI.
3222+
if (isInt<32>(Imm) && ((Imm & 0xfff) != 0xfff || Imm == -1))
32223223
return false;
32233224

3225+
// Abandon this transform if the constant is needed elsewhere.
32243226
for (const SDNode *U : N->users()) {
32253227
if (!ISD::isBitwiseLogicOp(U->getOpcode()))
32263228
return false;
32273229
}
32283230

3229-
// For 32-bit signed constants we already know it's a win: LUI+ADDI vs LUI.
32303231
// For 64-bit constants, the instruction sequences get complex,
32313232
// so we select inverted only if it's cheaper.
32323233
if (!isInt<32>(Imm)) {

llvm/test/CodeGen/RISCV/zbb-logic-neg-imm.ll

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -330,10 +330,9 @@ define i64 @andnofff(i64 %x) {
330330
;
331331
; RV64-LABEL: andnofff:
332332
; RV64: # %bb.0:
333-
; RV64-NEXT: li a1, -1
334-
; RV64-NEXT: slli a1, a1, 56
335-
; RV64-NEXT: addi a1, a1, 255
336-
; RV64-NEXT: and a0, a0, a1
333+
; RV64-NEXT: lui a1, 1048560
334+
; RV64-NEXT: srli a1, a1, 8
335+
; RV64-NEXT: andn a0, a0, a1
337336
; RV64-NEXT: ret
338337
%and = and i64 %x, -72057594037927681
339338
ret i64 %and
@@ -349,10 +348,9 @@ define i64 @ornofff(i64 %x) {
349348
;
350349
; NOZBS64-LABEL: ornofff:
351350
; NOZBS64: # %bb.0:
352-
; NOZBS64-NEXT: li a1, -1
353-
; NOZBS64-NEXT: slli a1, a1, 63
354-
; NOZBS64-NEXT: addi a1, a1, 2047
355-
; NOZBS64-NEXT: or a0, a0, a1
351+
; NOZBS64-NEXT: lui a1, 1048575
352+
; NOZBS64-NEXT: srli a1, a1, 1
353+
; NOZBS64-NEXT: orn a0, a0, a1
356354
; NOZBS64-NEXT: ret
357355
;
358356
; ZBS32-LABEL: ornofff:
@@ -380,10 +378,9 @@ define i64 @xornofff(i64 %x) {
380378
;
381379
; RV64-LABEL: xornofff:
382380
; RV64: # %bb.0:
383-
; RV64-NEXT: li a1, -1
384-
; RV64-NEXT: slli a1, a1, 60
385-
; RV64-NEXT: addi a1, a1, 255
386-
; RV64-NEXT: xor a0, a0, a1
381+
; RV64-NEXT: lui a1, 1048575
382+
; RV64-NEXT: srli a1, a1, 4
383+
; RV64-NEXT: xnor a0, a0, a1
387384
; RV64-NEXT: ret
388385
%xor = xor i64 %x, -1152921504606846721
389386
ret i64 %xor

0 commit comments

Comments
 (0)