-
Notifications
You must be signed in to change notification settings - Fork 13.5k
[PAC][CodeGen][ELF][AArch64] Support signed TLSDESC #113813
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Conversation
This stack of pull requests is managed by Graphite. Learn more about stacking. |
@llvm/pr-subscribers-mc @llvm/pr-subscribers-backend-aarch64 Author: Daniil Kovalev (kovdan01) ChangesDepends on #113716 Support the following relocations and assembly operators:
Only SelectionDAG ISel is supported. Tests starting with 'ptrauth-' have corresponding variants w/o this prefix. Patch is 39.19 KiB, truncated to 20.00 KiB below, full version: https://github.com/llvm/llvm-project/pull/113813.diff 12 Files Affected:
diff --git a/llvm/lib/Target/AArch64/AArch64AsmPrinter.cpp b/llvm/lib/Target/AArch64/AArch64AsmPrinter.cpp
index e79457f925db66..8ec785027d23d7 100644
--- a/llvm/lib/Target/AArch64/AArch64AsmPrinter.cpp
+++ b/llvm/lib/Target/AArch64/AArch64AsmPrinter.cpp
@@ -2603,6 +2603,65 @@ void AArch64AsmPrinter::emitInstruction(const MachineInstr *MI) {
EmitToStreamer(*OutStreamer, TmpInstSB);
return;
}
+ case AArch64::TLSDESC_AUTH_CALLSEQ: {
+ /// lower this to:
+ /// adrp x0, :tlsdesc_auth:var
+ /// ldr x16, [x0, #:tlsdesc_auth_lo12:var]
+ /// add x0, x0, #:tlsdesc_auth_lo12:var
+ /// .tlsdesccall var
+ /// blraa x16, x0
+ /// (TPIDR_EL0 offset now in x0)
+ const MachineOperand &MO_Sym = MI->getOperand(0);
+ MachineOperand MO_TLSDESC_LO12(MO_Sym), MO_TLSDESC(MO_Sym);
+ MCOperand Sym, SymTLSDescLo12, SymTLSDesc;
+ MO_TLSDESC_LO12.setTargetFlags(AArch64II::MO_TLS | AArch64II::MO_PAGEOFF);
+ MO_TLSDESC.setTargetFlags(AArch64II::MO_TLS | AArch64II::MO_PAGE);
+ MCInstLowering.lowerOperand(MO_Sym, Sym);
+ MCInstLowering.lowerOperand(MO_TLSDESC_LO12, SymTLSDescLo12);
+ MCInstLowering.lowerOperand(MO_TLSDESC, SymTLSDesc);
+
+ MCInst Adrp;
+ Adrp.setOpcode(AArch64::ADRP);
+ Adrp.addOperand(MCOperand::createReg(AArch64::X0));
+ Adrp.addOperand(SymTLSDesc);
+ EmitToStreamer(*OutStreamer, Adrp);
+
+ MCInst Ldr;
+ Ldr.setOpcode(AArch64::LDRXui);
+ Ldr.addOperand(MCOperand::createReg(AArch64::X16));
+ Ldr.addOperand(MCOperand::createReg(AArch64::X0));
+ Ldr.addOperand(SymTLSDescLo12);
+ Ldr.addOperand(MCOperand::createImm(0));
+ EmitToStreamer(*OutStreamer, Ldr);
+
+ MCInst Add;
+ Add.setOpcode(AArch64::ADDXri);
+ Add.addOperand(MCOperand::createReg(AArch64::X0));
+ Add.addOperand(MCOperand::createReg(AArch64::X0));
+ Add.addOperand(SymTLSDescLo12);
+ Add.addOperand(MCOperand::createImm(AArch64_AM::getShiftValue(0)));
+ EmitToStreamer(*OutStreamer, Add);
+
+ // Emit a relocation-annotation. This expands to no code, but requests
+ // the following instruction gets an R_AARCH64_TLSDESC_CALL.
+ // TODO: we probably don't need that for AUTH TLSDESC. Emit as for now for
+ // consistency with non-AUTH case.
+ MCInst TLSDescCall;
+ TLSDescCall.setOpcode(AArch64::TLSDESCCALL);
+ TLSDescCall.addOperand(Sym);
+ EmitToStreamer(*OutStreamer, TLSDescCall);
+#ifndef NDEBUG
+ --InstsEmitted; // no code emitted
+#endif
+
+ MCInst Blraa;
+ Blraa.setOpcode(AArch64::BLRAA);
+ Blraa.addOperand(MCOperand::createReg(AArch64::X16));
+ Blraa.addOperand(MCOperand::createReg(AArch64::X0));
+ EmitToStreamer(*OutStreamer, Blraa);
+
+ return;
+ }
case AArch64::TLSDESC_CALLSEQ: {
/// lower this to:
/// adrp x0, :tlsdesc:var
diff --git a/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp b/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp
index 884e5fe9f60040..108a01f34e2e81 100644
--- a/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp
+++ b/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp
@@ -2634,6 +2634,7 @@ const char *AArch64TargetLowering::getTargetNodeName(unsigned Opcode) const {
MAKE_CASE(AArch64ISD::CSINC)
MAKE_CASE(AArch64ISD::THREAD_POINTER)
MAKE_CASE(AArch64ISD::TLSDESC_CALLSEQ)
+ MAKE_CASE(AArch64ISD::TLSDESC_AUTH_CALLSEQ)
MAKE_CASE(AArch64ISD::PROBED_ALLOCA)
MAKE_CASE(AArch64ISD::ABDS_PRED)
MAKE_CASE(AArch64ISD::ABDU_PRED)
@@ -9889,8 +9890,11 @@ SDValue AArch64TargetLowering::LowerELFTLSDescCallSeq(SDValue SymAddr,
SDValue Chain = DAG.getEntryNode();
SDVTList NodeTys = DAG.getVTList(MVT::Other, MVT::Glue);
- Chain =
- DAG.getNode(AArch64ISD::TLSDESC_CALLSEQ, DL, NodeTys, {Chain, SymAddr});
+ unsigned Opcode =
+ DAG.getMachineFunction().getInfo<AArch64FunctionInfo>()->hasELFSignedGOT()
+ ? AArch64ISD::TLSDESC_AUTH_CALLSEQ
+ : AArch64ISD::TLSDESC_CALLSEQ;
+ Chain = DAG.getNode(Opcode, DL, NodeTys, {Chain, SymAddr});
SDValue Glue = Chain.getValue(1);
return DAG.getCopyFromReg(Chain, DL, AArch64::X0, PtrVT, Glue);
@@ -9903,7 +9907,13 @@ AArch64TargetLowering::LowerELFGlobalTLSAddress(SDValue Op,
const GlobalAddressSDNode *GA = cast<GlobalAddressSDNode>(Op);
- TLSModel::Model Model = getTargetMachine().getTLSModel(GA->getGlobal());
+ TLSModel::Model Model;
+ if (DAG.getMachineFunction()
+ .getInfo<AArch64FunctionInfo>()
+ ->hasELFSignedGOT())
+ Model = TLSModel::GeneralDynamic;
+ else
+ Model = getTargetMachine().getTLSModel(GA->getGlobal());
if (!EnableAArch64ELFLocalDynamicTLSGeneration) {
if (Model == TLSModel::LocalDynamic)
diff --git a/llvm/lib/Target/AArch64/AArch64ISelLowering.h b/llvm/lib/Target/AArch64/AArch64ISelLowering.h
index 160cd18ca53b32..03c027d7497f96 100644
--- a/llvm/lib/Target/AArch64/AArch64ISelLowering.h
+++ b/llvm/lib/Target/AArch64/AArch64ISelLowering.h
@@ -85,6 +85,7 @@ enum NodeType : unsigned {
// Produces the full sequence of instructions for getting the thread pointer
// offset of a variable into X0, using the TLSDesc model.
TLSDESC_CALLSEQ,
+ TLSDESC_AUTH_CALLSEQ,
ADRP, // Page address of a TargetGlobalAddress operand.
ADR, // ADR
ADDlow, // Add the low 12 bits of a TargetGlobalAddress operand.
diff --git a/llvm/lib/Target/AArch64/AArch64InstrInfo.td b/llvm/lib/Target/AArch64/AArch64InstrInfo.td
index fdaa01598dcba5..c67b3996dc7c67 100644
--- a/llvm/lib/Target/AArch64/AArch64InstrInfo.td
+++ b/llvm/lib/Target/AArch64/AArch64InstrInfo.td
@@ -493,6 +493,8 @@ def SDT_AArch64stnp : SDTypeProfile<0, 3, [SDTCisVT<0, v4i32>, SDTCisSameAs<0, 1
// number of operands (the variable)
def SDT_AArch64TLSDescCallSeq : SDTypeProfile<0,1,
[SDTCisPtrTy<0>]>;
+def SDT_AArch64TLSDescAuthCallSeq : SDTypeProfile<0,1,
+ [SDTCisPtrTy<0>]>;
def SDT_AArch64WrapperLarge : SDTypeProfile<1, 4,
[SDTCisVT<0, i64>, SDTCisVT<1, i32>,
@@ -879,6 +881,10 @@ def AArch64tlsdesc_callseq : SDNode<"AArch64ISD::TLSDESC_CALLSEQ",
[SDNPInGlue, SDNPOutGlue, SDNPHasChain,
SDNPVariadic]>;
+def AArch64tlsdesc_auth_callseq : SDNode<"AArch64ISD::TLSDESC_AUTH_CALLSEQ",
+ SDT_AArch64TLSDescAuthCallSeq,
+ [SDNPInGlue, SDNPOutGlue, SDNPHasChain,
+ SDNPVariadic]>;
def AArch64WrapperLarge : SDNode<"AArch64ISD::WrapperLarge",
SDT_AArch64WrapperLarge>;
@@ -3294,8 +3300,16 @@ def TLSDESC_CALLSEQ
: Pseudo<(outs), (ins i64imm:$sym),
[(AArch64tlsdesc_callseq tglobaltlsaddr:$sym)]>,
Sched<[WriteI, WriteLD, WriteI, WriteBrReg]>;
+let isCall = 1, Defs = [NZCV, LR, X0, X16], hasSideEffects = 1, Size = 16,
+ isCodeGenOnly = 1 in
+def TLSDESC_AUTH_CALLSEQ
+ : Pseudo<(outs), (ins i64imm:$sym),
+ [(AArch64tlsdesc_auth_callseq tglobaltlsaddr:$sym)]>,
+ Sched<[WriteI, WriteLD, WriteI, WriteBrReg]>;
def : Pat<(AArch64tlsdesc_callseq texternalsym:$sym),
(TLSDESC_CALLSEQ texternalsym:$sym)>;
+def : Pat<(AArch64tlsdesc_auth_callseq texternalsym:$sym),
+ (TLSDESC_AUTH_CALLSEQ texternalsym:$sym)>;
//===----------------------------------------------------------------------===//
// Conditional branch (immediate) instruction.
diff --git a/llvm/lib/Target/AArch64/AArch64MCInstLower.cpp b/llvm/lib/Target/AArch64/AArch64MCInstLower.cpp
index 9f234b0f917058..46ce151ca82b64 100644
--- a/llvm/lib/Target/AArch64/AArch64MCInstLower.cpp
+++ b/llvm/lib/Target/AArch64/AArch64MCInstLower.cpp
@@ -194,12 +194,16 @@ MCOperand AArch64MCInstLower::lowerSymbolOperandELF(const MachineOperand &MO,
} else if (MO.getTargetFlags() & AArch64II::MO_TLS) {
TLSModel::Model Model;
if (MO.isGlobal()) {
- const GlobalValue *GV = MO.getGlobal();
- Model = Printer.TM.getTLSModel(GV);
- if (!EnableAArch64ELFLocalDynamicTLSGeneration &&
- Model == TLSModel::LocalDynamic)
+ const MachineFunction *MF = MO.getParent()->getParent()->getParent();
+ if (MF->getInfo<AArch64FunctionInfo>()->hasELFSignedGOT()) {
Model = TLSModel::GeneralDynamic;
-
+ } else {
+ const GlobalValue *GV = MO.getGlobal();
+ Model = Printer.TM.getTLSModel(GV);
+ if (!EnableAArch64ELFLocalDynamicTLSGeneration &&
+ Model == TLSModel::LocalDynamic)
+ Model = TLSModel::GeneralDynamic;
+ }
} else {
assert(MO.isSymbol() &&
StringRef(MO.getSymbolName()) == "_TLS_MODULE_BASE_" &&
@@ -218,10 +222,18 @@ MCOperand AArch64MCInstLower::lowerSymbolOperandELF(const MachineOperand &MO,
case TLSModel::LocalDynamic:
RefFlags |= AArch64MCExpr::VK_DTPREL;
break;
- case TLSModel::GeneralDynamic:
- RefFlags |= AArch64MCExpr::VK_TLSDESC;
+ case TLSModel::GeneralDynamic: {
+ // TODO: it's probably better to introduce MO_TLS_AUTH or smth and avoid
+ // running hasELFSignedGOT() every time, but existing flags already
+ // cover all 12 bits of SubReg_TargetFlags field in MachineOperand, and
+ // making the field wider breaks static assertions.
+ const MachineFunction *MF = MO.getParent()->getParent()->getParent();
+ RefFlags |= MF->getInfo<AArch64FunctionInfo>()->hasELFSignedGOT()
+ ? AArch64MCExpr::VK_TLSDESC_AUTH
+ : AArch64MCExpr::VK_TLSDESC;
break;
}
+ }
} else if (MO.getTargetFlags() & AArch64II::MO_PREL) {
RefFlags |= AArch64MCExpr::VK_PREL;
} else {
diff --git a/llvm/lib/Target/AArch64/AsmParser/AArch64AsmParser.cpp b/llvm/lib/Target/AArch64/AsmParser/AArch64AsmParser.cpp
index b83ca3f7e52db4..b4c37c6d02cfdb 100644
--- a/llvm/lib/Target/AArch64/AsmParser/AArch64AsmParser.cpp
+++ b/llvm/lib/Target/AArch64/AsmParser/AArch64AsmParser.cpp
@@ -903,6 +903,7 @@ class AArch64Operand : public MCParsedAsmOperand {
ELFRefKind == AArch64MCExpr::VK_TPREL_LO12_NC ||
ELFRefKind == AArch64MCExpr::VK_GOTTPREL_LO12_NC ||
ELFRefKind == AArch64MCExpr::VK_TLSDESC_LO12 ||
+ ELFRefKind == AArch64MCExpr::VK_TLSDESC_AUTH_LO12 ||
ELFRefKind == AArch64MCExpr::VK_SECREL_LO12 ||
ELFRefKind == AArch64MCExpr::VK_SECREL_HI12 ||
ELFRefKind == AArch64MCExpr::VK_GOT_PAGE_LO15) {
@@ -1020,6 +1021,7 @@ class AArch64Operand : public MCParsedAsmOperand {
ELFRefKind == AArch64MCExpr::VK_TPREL_LO12 ||
ELFRefKind == AArch64MCExpr::VK_TPREL_LO12_NC ||
ELFRefKind == AArch64MCExpr::VK_TLSDESC_LO12 ||
+ ELFRefKind == AArch64MCExpr::VK_TLSDESC_AUTH_LO12 ||
ELFRefKind == AArch64MCExpr::VK_SECREL_HI12 ||
ELFRefKind == AArch64MCExpr::VK_SECREL_LO12;
}
@@ -3313,7 +3315,8 @@ ParseStatus AArch64AsmParser::tryParseAdrpLabel(OperandVector &Operands) {
ELFRefKind != AArch64MCExpr::VK_GOT_AUTH_PAGE &&
ELFRefKind != AArch64MCExpr::VK_GOT_PAGE_LO15 &&
ELFRefKind != AArch64MCExpr::VK_GOTTPREL_PAGE &&
- ELFRefKind != AArch64MCExpr::VK_TLSDESC_PAGE) {
+ ELFRefKind != AArch64MCExpr::VK_TLSDESC_PAGE &&
+ ELFRefKind != AArch64MCExpr::VK_TLSDESC_AUTH_PAGE) {
// The operand must be an @page or @gotpage qualified symbolref.
return Error(S, "page or gotpage label reference expected");
}
@@ -4390,56 +4393,59 @@ bool AArch64AsmParser::parseSymbolicImmVal(const MCExpr *&ImmVal) {
return TokError("expect relocation specifier in operand after ':'");
std::string LowerCase = getTok().getIdentifier().lower();
- RefKind = StringSwitch<AArch64MCExpr::VariantKind>(LowerCase)
- .Case("lo12", AArch64MCExpr::VK_LO12)
- .Case("abs_g3", AArch64MCExpr::VK_ABS_G3)
- .Case("abs_g2", AArch64MCExpr::VK_ABS_G2)
- .Case("abs_g2_s", AArch64MCExpr::VK_ABS_G2_S)
- .Case("abs_g2_nc", AArch64MCExpr::VK_ABS_G2_NC)
- .Case("abs_g1", AArch64MCExpr::VK_ABS_G1)
- .Case("abs_g1_s", AArch64MCExpr::VK_ABS_G1_S)
- .Case("abs_g1_nc", AArch64MCExpr::VK_ABS_G1_NC)
- .Case("abs_g0", AArch64MCExpr::VK_ABS_G0)
- .Case("abs_g0_s", AArch64MCExpr::VK_ABS_G0_S)
- .Case("abs_g0_nc", AArch64MCExpr::VK_ABS_G0_NC)
- .Case("prel_g3", AArch64MCExpr::VK_PREL_G3)
- .Case("prel_g2", AArch64MCExpr::VK_PREL_G2)
- .Case("prel_g2_nc", AArch64MCExpr::VK_PREL_G2_NC)
- .Case("prel_g1", AArch64MCExpr::VK_PREL_G1)
- .Case("prel_g1_nc", AArch64MCExpr::VK_PREL_G1_NC)
- .Case("prel_g0", AArch64MCExpr::VK_PREL_G0)
- .Case("prel_g0_nc", AArch64MCExpr::VK_PREL_G0_NC)
- .Case("dtprel_g2", AArch64MCExpr::VK_DTPREL_G2)
- .Case("dtprel_g1", AArch64MCExpr::VK_DTPREL_G1)
- .Case("dtprel_g1_nc", AArch64MCExpr::VK_DTPREL_G1_NC)
- .Case("dtprel_g0", AArch64MCExpr::VK_DTPREL_G0)
- .Case("dtprel_g0_nc", AArch64MCExpr::VK_DTPREL_G0_NC)
- .Case("dtprel_hi12", AArch64MCExpr::VK_DTPREL_HI12)
- .Case("dtprel_lo12", AArch64MCExpr::VK_DTPREL_LO12)
- .Case("dtprel_lo12_nc", AArch64MCExpr::VK_DTPREL_LO12_NC)
- .Case("pg_hi21_nc", AArch64MCExpr::VK_ABS_PAGE_NC)
- .Case("tprel_g2", AArch64MCExpr::VK_TPREL_G2)
- .Case("tprel_g1", AArch64MCExpr::VK_TPREL_G1)
- .Case("tprel_g1_nc", AArch64MCExpr::VK_TPREL_G1_NC)
- .Case("tprel_g0", AArch64MCExpr::VK_TPREL_G0)
- .Case("tprel_g0_nc", AArch64MCExpr::VK_TPREL_G0_NC)
- .Case("tprel_hi12", AArch64MCExpr::VK_TPREL_HI12)
- .Case("tprel_lo12", AArch64MCExpr::VK_TPREL_LO12)
- .Case("tprel_lo12_nc", AArch64MCExpr::VK_TPREL_LO12_NC)
- .Case("tlsdesc_lo12", AArch64MCExpr::VK_TLSDESC_LO12)
- .Case("got", AArch64MCExpr::VK_GOT_PAGE)
- .Case("gotpage_lo15", AArch64MCExpr::VK_GOT_PAGE_LO15)
- .Case("got_lo12", AArch64MCExpr::VK_GOT_LO12)
- .Case("got_auth", AArch64MCExpr::VK_GOT_AUTH_PAGE)
- .Case("got_auth_lo12", AArch64MCExpr::VK_GOT_AUTH_LO12)
- .Case("gottprel", AArch64MCExpr::VK_GOTTPREL_PAGE)
- .Case("gottprel_lo12", AArch64MCExpr::VK_GOTTPREL_LO12_NC)
- .Case("gottprel_g1", AArch64MCExpr::VK_GOTTPREL_G1)
- .Case("gottprel_g0_nc", AArch64MCExpr::VK_GOTTPREL_G0_NC)
- .Case("tlsdesc", AArch64MCExpr::VK_TLSDESC_PAGE)
- .Case("secrel_lo12", AArch64MCExpr::VK_SECREL_LO12)
- .Case("secrel_hi12", AArch64MCExpr::VK_SECREL_HI12)
- .Default(AArch64MCExpr::VK_INVALID);
+ RefKind =
+ StringSwitch<AArch64MCExpr::VariantKind>(LowerCase)
+ .Case("lo12", AArch64MCExpr::VK_LO12)
+ .Case("abs_g3", AArch64MCExpr::VK_ABS_G3)
+ .Case("abs_g2", AArch64MCExpr::VK_ABS_G2)
+ .Case("abs_g2_s", AArch64MCExpr::VK_ABS_G2_S)
+ .Case("abs_g2_nc", AArch64MCExpr::VK_ABS_G2_NC)
+ .Case("abs_g1", AArch64MCExpr::VK_ABS_G1)
+ .Case("abs_g1_s", AArch64MCExpr::VK_ABS_G1_S)
+ .Case("abs_g1_nc", AArch64MCExpr::VK_ABS_G1_NC)
+ .Case("abs_g0", AArch64MCExpr::VK_ABS_G0)
+ .Case("abs_g0_s", AArch64MCExpr::VK_ABS_G0_S)
+ .Case("abs_g0_nc", AArch64MCExpr::VK_ABS_G0_NC)
+ .Case("prel_g3", AArch64MCExpr::VK_PREL_G3)
+ .Case("prel_g2", AArch64MCExpr::VK_PREL_G2)
+ .Case("prel_g2_nc", AArch64MCExpr::VK_PREL_G2_NC)
+ .Case("prel_g1", AArch64MCExpr::VK_PREL_G1)
+ .Case("prel_g1_nc", AArch64MCExpr::VK_PREL_G1_NC)
+ .Case("prel_g0", AArch64MCExpr::VK_PREL_G0)
+ .Case("prel_g0_nc", AArch64MCExpr::VK_PREL_G0_NC)
+ .Case("dtprel_g2", AArch64MCExpr::VK_DTPREL_G2)
+ .Case("dtprel_g1", AArch64MCExpr::VK_DTPREL_G1)
+ .Case("dtprel_g1_nc", AArch64MCExpr::VK_DTPREL_G1_NC)
+ .Case("dtprel_g0", AArch64MCExpr::VK_DTPREL_G0)
+ .Case("dtprel_g0_nc", AArch64MCExpr::VK_DTPREL_G0_NC)
+ .Case("dtprel_hi12", AArch64MCExpr::VK_DTPREL_HI12)
+ .Case("dtprel_lo12", AArch64MCExpr::VK_DTPREL_LO12)
+ .Case("dtprel_lo12_nc", AArch64MCExpr::VK_DTPREL_LO12_NC)
+ .Case("pg_hi21_nc", AArch64MCExpr::VK_ABS_PAGE_NC)
+ .Case("tprel_g2", AArch64MCExpr::VK_TPREL_G2)
+ .Case("tprel_g1", AArch64MCExpr::VK_TPREL_G1)
+ .Case("tprel_g1_nc", AArch64MCExpr::VK_TPREL_G1_NC)
+ .Case("tprel_g0", AArch64MCExpr::VK_TPREL_G0)
+ .Case("tprel_g0_nc", AArch64MCExpr::VK_TPREL_G0_NC)
+ .Case("tprel_hi12", AArch64MCExpr::VK_TPREL_HI12)
+ .Case("tprel_lo12", AArch64MCExpr::VK_TPREL_LO12)
+ .Case("tprel_lo12_nc", AArch64MCExpr::VK_TPREL_LO12_NC)
+ .Case("tlsdesc_lo12", AArch64MCExpr::VK_TLSDESC_LO12)
+ .Case("tlsdesc_auth_lo12", AArch64MCExpr::VK_TLSDESC_AUTH_LO12)
+ .Case("got", AArch64MCExpr::VK_GOT_PAGE)
+ .Case("gotpage_lo15", AArch64MCExpr::VK_GOT_PAGE_LO15)
+ .Case("got_lo12", AArch64MCExpr::VK_GOT_LO12)
+ .Case("got_auth", AArch64MCExpr::VK_GOT_AUTH_PAGE)
+ .Case("got_auth_lo12", AArch64MCExpr::VK_GOT_AUTH_LO12)
+ .Case("gottprel", AArch64MCExpr::VK_GOTTPREL_PAGE)
+ .Case("gottprel_lo12", AArch64MCExpr::VK_GOTTPREL_LO12_NC)
+ .Case("gottprel_g1", AArch64MCExpr::VK_GOTTPREL_G1)
+ .Case("gottprel_g0_nc", AArch64MCExpr::VK_GOTTPREL_G0_NC)
+ .Case("tlsdesc", AArch64MCExpr::VK_TLSDESC_PAGE)
+ .Case("tlsdesc_auth", AArch64MCExpr::VK_TLSDESC_AUTH_PAGE)
+ .Case("secrel_lo12", AArch64MCExpr::VK_SECREL_LO12)
+ .Case("secrel_hi12", AArch64MCExpr::VK_SECREL_HI12)
+ .Default(AArch64MCExpr::VK_INVALID);
if (RefKind == AArch64MCExpr::VK_INVALID)
return TokError("expect relocation specifier in operand after ':'");
@@ -5813,6 +5819,7 @@ bool AArch64AsmParser::validateInstruction(MCInst &Inst, SMLoc &IDLoc,
ELFRefKind == AArch64MCExpr::VK_TPREL_LO12 ||
ELFRefKind == AArch64MCExpr::VK_TPREL_LO12_NC ||
ELFRefKind == AArch64MCExpr::VK_TLSDESC_LO12 ||
+ ELFRefKind == AArch64MCExpr::VK_TLSDESC_AUTH_LO12 ||
ELFRefKind == AArch64MCExpr::VK_SECREL_LO12 ||
ELFRefKind == AArch64MCExpr::VK_SECREL_HI12) &&
(Inst.getOpcode() == AArch64::ADDXri ||
diff --git a/llvm/lib/Target/AArch64/MCTargetDesc/AArch64ELFObjectWriter.cpp b/llvm/lib/Target/AArch64/MCTargetDesc/AArch64ELFObjectWriter.cpp
index b5f5a58d96288e..61e92b16762522 100644
--- a/llvm/lib/Target/AArch64/MCTargetDesc/AArch64ELFObjectWriter.cpp
+++ b/llvm/lib/Target/AArch64/MCTargetDesc/AArch64ELFObjectWriter.cpp
@@ -178,6 +178,15 @@ unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx,
return R_CLS(TLSIE_ADR_GOTTPREL_PAGE21);
if (SymLoc == AArch64MCExpr::VK_TLSDESC && !IsNC)
return R_CLS(TLSDESC_ADR_PAGE21);
+ if (SymLoc == AArch64MCExpr::VK_TLSDESC_AUTH && !IsNC) {
+ if (IsILP32) {
+ Ctx.reportError(Fixup.getLoc(),
+ "ILP32 ADRP AUTH relocation not supported "
+ "(LP64 eqv: AUTH_TLSDESC_ADR_PAGE21)");
+ return ELF::R_AARCH64_NONE;
+ }
+ return ELF::R_AARCH64_AUTH_TLSDESC_ADR_PAGE21;
+ }
Ctx.reportError(Fixup.getLoc(),
"invalid symbol kind for ADRP relocation");
return ELF::R_AARCH64_NONE;
@@ -249,6 +258,15 @@ unsigned AArch64ELFObjectWriter::getRelocType(MCContext &Ctx,
return R_CLS(TLSLE_ADD_TPREL_LO12);
if (RefKind == AArch64MCExpr::VK_TLSDESC_LO12)
return R_CLS(TLSDESC_ADD_LO12);
+ if (RefKind == ...
[truncated]
|
b3ff2bb
to
2c899f6
Compare
Would be glad to see everyone's feedback on the changes. |
1 similar comment
Would be glad to see everyone's feedback on the changes. |
2c899f6
to
c7d72cb
Compare
Would be glad to see everyone's feedback on the changes. |
1 similar comment
Would be glad to see everyone's feedback on the changes. |
c7d72cb
to
cf96e45
Compare
@MaskRay would be glad to see your feedback on this |
cf96e45
to
027e655
Compare
027e655
to
e04830e
Compare
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@kovdan01 Thank you for the update! LGTM with a few minor comments, except that I'm not sure about the definitions of SDT_AArch64TLSDescAuthCallSeq
and AArch64tlsdesc_auth_callseq
.
Support the following relocations and assembly operators: - `R_AARCH64_AUTH_TLSDESC_ADR_PAGE21` (`:tlsdesc_auth:` for `adrp`) - `R_AARCH64_AUTH_TLSDESC_LD64_LO12` (`:tlsdesc_auth_lo12:` for `ldr`) - `R_AARCH64_AUTH_TLSDESC_ADD_LO12` (`:tlsdesc_auth_lo12:` for `add`) `TLSDESC_AUTH_CALLSEQ` pseudo-instruction is introduced which is later expanded to actual instruction sequence like the following. ``` adrp x0, :tlsdesc_auth:var ldr x16, [x0, #:tlsdesc_auth_lo12:var] add x0, x0, #:tlsdesc_auth_lo12:var blraa x16, x0 (TPIDR_EL0 offset now in x0) ``` Only SelectionDAG ISel is supported. Tests starting with 'ptrauth-' have corresponding variants w/o this prefix.
e04830e
to
dcdbcdd
Compare
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
LGTM, thank you for the update!
Depends on #120010
TLSDESC_AUTH_CALLSEQ
pseudo-instruction is introduced which is later expandedto actual instruction sequence like the following.
Only SelectionDAG ISel is supported.
Tests starting with 'ptrauth-' have corresponding variants w/o this prefix.