Skip to content

Commit 80ae6ed

Browse files
aamcommit-bot@chromium.org
authored andcommitted
[vm/aot-switchable-calls] Introduce single runtime entry for all switchable calls
Bug: #37835 Bug: #36097 Change-Id: I0198fd0328945b04e4f2254bacac25b41038e78c Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/138361 Commit-Queue: Alexander Aprelev <[email protected]> Reviewed-by: Martin Kustermann <[email protected]> Reviewed-by: Ryan Macnak <[email protected]>
1 parent 2e0bec3 commit 80ae6ed

35 files changed

+1146
-1139
lines changed

runtime/vm/class_finalizer.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1757,9 +1757,9 @@ void ClassFinalizer::ClearAllCode(bool including_nonchanging_cids) {
17571757
object_store->set_build_method_extractor_code(null_code);
17581758

17591759
auto& miss_function =
1760-
Function::Handle(zone, object_store->megamorphic_miss_function());
1760+
Function::Handle(zone, object_store->megamorphic_call_miss_function());
17611761
miss_function.ClearCode();
1762-
object_store->SetMegamorphicMissHandler(null_code, miss_function);
1762+
object_store->SetMegamorphicCallMissHandler(null_code, miss_function);
17631763
}
17641764
#endif // !DART_PRECOMPILED_RUNTIME
17651765
}

runtime/vm/class_id.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ namespace dart {
4141
V(SingleTargetCache) \
4242
V(UnlinkedCall) \
4343
V(MonomorphicSmiableCall) \
44+
V(CallSiteData) \
4445
V(ICData) \
4546
V(MegamorphicCache) \
4647
V(SubtypeTestCache) \

runtime/vm/clustered_snapshot.cc

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5442,8 +5442,8 @@ void Serializer::AddVMIsolateBaseObjects() {
54425442

54435443
ClassTable* table = isolate()->class_table();
54445444
for (intptr_t cid = kClassCid; cid < kInstanceCid; cid++) {
5445-
// Error has no class object.
5446-
if (cid != kErrorCid) {
5445+
// Error, CallSiteData has no class object.
5446+
if (cid != kErrorCid && cid != kCallSiteDataCid) {
54475447
ASSERT(table->HasValidClassAt(cid));
54485448
AddBaseObject(table->At(cid), "Class");
54495449
}
@@ -6109,8 +6109,8 @@ void Deserializer::AddVMIsolateBaseObjects() {
61096109

61106110
ClassTable* table = isolate()->class_table();
61116111
for (intptr_t cid = kClassCid; cid <= kUnwindErrorCid; cid++) {
6112-
// Error has no class object.
6113-
if (cid != kErrorCid) {
6112+
// Error, CallSiteData has no class object.
6113+
if (cid != kErrorCid && cid != kCallSiteDataCid) {
61146114
ASSERT(table->HasValidClassAt(cid));
61156115
AddBaseObject(table->At(cid));
61166116
}
@@ -6550,7 +6550,7 @@ RawApiError* FullSnapshotReader::ReadIsolateSnapshot() {
65506550
if (FLAG_use_bare_instructions) {
65516551
// By default, every switchable call site will put (ic_data, code) into the
65526552
// object pool. The [code] is initialized (at AOT compile-time) to be a
6553-
// [StubCode::UnlinkedCall].
6553+
// [StubCode::SwitchableCallMiss].
65546554
//
65556555
// In --use-bare-instruction we reduce the extra indirection via the [code]
65566556
// object and store instead (ic_data, entrypoint) in the object pool.
@@ -6565,9 +6565,15 @@ RawApiError* FullSnapshotReader::ReadIsolateSnapshot() {
65656565
for (intptr_t i = 0; i < pool.Length(); i++) {
65666566
if (pool.TypeAt(i) == ObjectPool::EntryType::kTaggedObject) {
65676567
entry = pool.ObjectAt(i);
6568-
if (entry.raw() == StubCode::UnlinkedCall().raw()) {
6568+
if (entry.raw() == StubCode::SwitchableCallMiss().raw()) {
65696569
smi = Smi::FromAlignedAddress(
6570-
StubCode::UnlinkedCall().MonomorphicEntryPoint());
6570+
StubCode::SwitchableCallMiss().MonomorphicEntryPoint());
6571+
pool.SetTypeAt(i, ObjectPool::EntryType::kImmediate,
6572+
ObjectPool::Patchability::kPatchable);
6573+
pool.SetObjectAt(i, smi);
6574+
} else if (entry.raw() == StubCode::MegamorphicCall().raw()) {
6575+
smi = Smi::FromAlignedAddress(
6576+
StubCode::MegamorphicCall().MonomorphicEntryPoint());
65716577
pool.SetTypeAt(i, ObjectPool::EntryType::kImmediate,
65726578
ObjectPool::Patchability::kPatchable);
65736579
pool.SetObjectAt(i, smi);

runtime/vm/compiler/assembler/assembler_arm.cc

Lines changed: 25 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1592,7 +1592,6 @@ void Assembler::LoadIsolate(Register rd) {
15921592

15931593
bool Assembler::CanLoadFromObjectPool(const Object& object) const {
15941594
ASSERT(IsOriginalObject(object));
1595-
ASSERT(!target::CanLoadFromThread(object));
15961595
if (!constant_pool_allowed()) {
15971596
return false;
15981597
}
@@ -1608,24 +1607,31 @@ void Assembler::LoadObjectHelper(Register rd,
16081607
bool is_unique,
16091608
Register pp) {
16101609
ASSERT(IsOriginalObject(object));
1611-
intptr_t offset = 0;
1612-
if (target::CanLoadFromThread(object, &offset)) {
1613-
// Load common VM constants from the thread. This works also in places where
1614-
// no constant pool is set up (e.g. intrinsic code).
1615-
ldr(rd, Address(THR, offset), cond);
1616-
} else if (target::IsSmi(object)) {
1617-
// Relocation doesn't apply to Smis.
1618-
LoadImmediate(rd, target::ToRawSmi(object), cond);
1619-
} else if (CanLoadFromObjectPool(object)) {
1620-
// Make sure that class CallPattern is able to decode this load from the
1621-
// object pool.
1622-
const auto index = is_unique ? object_pool_builder().AddObject(object)
1623-
: object_pool_builder().FindObject(object);
1624-
const int32_t offset = target::ObjectPool::element_offset(index);
1625-
LoadWordFromPoolOffset(rd, offset - kHeapObjectTag, pp, cond);
1626-
} else {
1610+
// `is_unique == true` effectively means object has to be patchable.
1611+
if (!is_unique) {
1612+
intptr_t offset = 0;
1613+
if (target::CanLoadFromThread(object, &offset)) {
1614+
// Load common VM constants from the thread. This works also in places
1615+
// where no constant pool is set up (e.g. intrinsic code).
1616+
ldr(rd, Address(THR, offset), cond);
1617+
return;
1618+
}
1619+
if (target::IsSmi(object)) {
1620+
// Relocation doesn't apply to Smis.
1621+
LoadImmediate(rd, target::ToRawSmi(object), cond);
1622+
return;
1623+
}
1624+
}
1625+
if (!CanLoadFromObjectPool(object)) {
16271626
UNREACHABLE();
1627+
return;
16281628
}
1629+
// Make sure that class CallPattern is able to decode this load from the
1630+
// object pool.
1631+
const auto index = is_unique ? object_pool_builder().AddObject(object)
1632+
: object_pool_builder().FindObject(object);
1633+
const int32_t offset = target::ObjectPool::element_offset(index);
1634+
LoadWordFromPoolOffset(rd, offset - kHeapObjectTag, pp, cond);
16291635
}
16301636

16311637
void Assembler::LoadObject(Register rd, const Object& object, Condition cond) {
@@ -3459,7 +3465,7 @@ void Assembler::MonomorphicCheckedEntryJIT() {
34593465
LoadClassIdMayBeSmi(IP, R0);
34603466
add(R2, R2, Operand(target::ToRawSmi(1)));
34613467
cmp(R1, Operand(IP, LSL, 1));
3462-
Branch(Address(THR, target::Thread::monomorphic_miss_entry_offset()), NE);
3468+
Branch(Address(THR, target::Thread::switchable_call_miss_entry_offset()), NE);
34633469
str(R2, FieldAddress(R9, count_offset));
34643470
LoadImmediate(R4, 0); // GC-safe for OptimizeInvokedFunction.
34653471

@@ -3488,7 +3494,7 @@ void Assembler::MonomorphicCheckedEntryAOT() {
34883494

34893495
LoadClassId(IP, R0);
34903496
cmp(R9, Operand(IP, LSL, 1));
3491-
Branch(Address(THR, target::Thread::monomorphic_miss_entry_offset()), NE);
3497+
Branch(Address(THR, target::Thread::switchable_call_miss_entry_offset()), NE);
34923498

34933499
// Fall through to unchecked entry.
34943500
ASSERT_EQUAL(CodeSize() - start,

runtime/vm/compiler/assembler/assembler_arm64.cc

Lines changed: 20 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -430,7 +430,6 @@ intptr_t Assembler::FindImmediate(int64_t imm) {
430430

431431
bool Assembler::CanLoadFromObjectPool(const Object& object) const {
432432
ASSERT(IsOriginalObject(object));
433-
ASSERT(!target::CanLoadFromThread(object));
434433
if (!constant_pool_allowed()) {
435434
return false;
436435
}
@@ -464,20 +463,28 @@ void Assembler::LoadObjectHelper(Register dst,
464463
const Object& object,
465464
bool is_unique) {
466465
ASSERT(IsOriginalObject(object));
467-
word offset = 0;
468-
if (IsSameObject(compiler::NullObject(), object)) {
469-
mov(dst, NULL_REG);
470-
} else if (target::CanLoadFromThread(object, &offset)) {
471-
ldr(dst, Address(THR, offset));
472-
} else if (CanLoadFromObjectPool(object)) {
466+
// `is_unique == true` effectively means object has to be patchable.
467+
// (even if the object is null)
468+
if (!is_unique) {
469+
if (IsSameObject(compiler::NullObject(), object)) {
470+
mov(dst, NULL_REG);
471+
return;
472+
}
473+
word offset = 0;
474+
if (target::CanLoadFromThread(object, &offset)) {
475+
ldr(dst, Address(THR, offset));
476+
return;
477+
}
478+
}
479+
if (CanLoadFromObjectPool(object)) {
473480
const int32_t offset = target::ObjectPool::element_offset(
474481
is_unique ? object_pool_builder().AddObject(object)
475482
: object_pool_builder().FindObject(object));
476483
LoadWordFromPoolOffset(dst, offset);
477-
} else {
478-
ASSERT(target::IsSmi(object));
479-
LoadImmediate(dst, target::ToRawSmi(object));
484+
return;
480485
}
486+
ASSERT(target::IsSmi(object));
487+
LoadImmediate(dst, target::ToRawSmi(object));
481488
}
482489

483490
void Assembler::LoadObject(Register dst, const Object& object) {
@@ -1549,7 +1556,7 @@ void Assembler::MonomorphicCheckedEntryJIT() {
15491556

15501557
Label immediate, miss;
15511558
Bind(&miss);
1552-
ldr(IP0, Address(THR, target::Thread::monomorphic_miss_entry_offset()));
1559+
ldr(IP0, Address(THR, target::Thread::switchable_call_miss_entry_offset()));
15531560
br(IP0);
15541561

15551562
Comment("MonomorphicCheckedEntry");
@@ -1567,7 +1574,7 @@ void Assembler::MonomorphicCheckedEntryJIT() {
15671574
cmp(R1, Operand(IP0, LSL, 1));
15681575
b(&miss, NE);
15691576
str(R2, FieldAddress(R5, count_offset));
1570-
LoadImmediate(R4, 0); // GC-safe for OptimizeInvokedFunction.
1577+
LoadImmediate(R4, 0); // GC-safe for OptimizeInvokedFunction
15711578

15721579
// Fall through to unchecked entry.
15731580
ASSERT_EQUAL(CodeSize() - start,
@@ -1587,7 +1594,7 @@ void Assembler::MonomorphicCheckedEntryAOT() {
15871594

15881595
Label immediate, miss;
15891596
Bind(&miss);
1590-
ldr(IP0, Address(THR, target::Thread::monomorphic_miss_entry_offset()));
1597+
ldr(IP0, Address(THR, target::Thread::switchable_call_miss_entry_offset()));
15911598
br(IP0);
15921599

15931600
Comment("MonomorphicCheckedEntry");

runtime/vm/compiler/assembler/assembler_ia32.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2123,7 +2123,7 @@ void Assembler::MonomorphicCheckedEntryJIT() {
21232123
intptr_t start = CodeSize();
21242124
Label have_cid, miss;
21252125
Bind(&miss);
2126-
jmp(Address(THR, target::Thread::monomorphic_miss_entry_offset()));
2126+
jmp(Address(THR, target::Thread::switchable_call_miss_entry_offset()));
21272127

21282128
Comment("MonomorphicCheckedEntry");
21292129
ASSERT(CodeSize() - start ==

runtime/vm/compiler/assembler/assembler_x64.cc

Lines changed: 19 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1208,7 +1208,6 @@ void Assembler::Drop(intptr_t stack_elements, Register tmp) {
12081208

12091209
bool Assembler::CanLoadFromObjectPool(const Object& object) const {
12101210
ASSERT(IsOriginalObject(object));
1211-
ASSERT(!target::CanLoadFromThread(object));
12121211
if (!constant_pool_allowed()) {
12131212
return false;
12141213
}
@@ -1243,18 +1242,23 @@ void Assembler::LoadObjectHelper(Register dst,
12431242
bool is_unique) {
12441243
ASSERT(IsOriginalObject(object));
12451244

1246-
intptr_t offset_from_thread;
1247-
if (target::CanLoadFromThread(object, &offset_from_thread)) {
1248-
movq(dst, Address(THR, offset_from_thread));
1249-
} else if (CanLoadFromObjectPool(object)) {
1250-
const intptr_t idx = is_unique ? object_pool_builder().AddObject(object)
1251-
: object_pool_builder().FindObject(object);
1252-
const int32_t offset = target::ObjectPool::element_offset(idx);
1245+
// `is_unique == true` effectively means object has to be patchable.
1246+
if (!is_unique) {
1247+
intptr_t offset;
1248+
if (target::CanLoadFromThread(object, &offset)) {
1249+
movq(dst, Address(THR, offset));
1250+
return;
1251+
}
1252+
}
1253+
if (CanLoadFromObjectPool(object)) {
1254+
const int32_t offset = target::ObjectPool::element_offset(
1255+
is_unique ? object_pool_builder().AddObject(object)
1256+
: object_pool_builder().FindObject(object));
12531257
LoadWordFromPoolOffset(dst, offset - kHeapObjectTag);
1254-
} else {
1255-
ASSERT(target::IsSmi(object));
1256-
LoadImmediate(dst, Immediate(target::ToRawSmi(object)));
1258+
return;
12571259
}
1260+
ASSERT(target::IsSmi(object));
1261+
LoadImmediate(dst, Immediate(target::ToRawSmi(object)));
12581262
}
12591263

12601264
void Assembler::LoadObject(Register dst, const Object& object) {
@@ -1801,7 +1805,7 @@ void Assembler::MonomorphicCheckedEntryJIT() {
18011805
intptr_t start = CodeSize();
18021806
Label have_cid, miss;
18031807
Bind(&miss);
1804-
jmp(Address(THR, target::Thread::monomorphic_miss_entry_offset()));
1808+
jmp(Address(THR, target::Thread::switchable_call_miss_entry_offset()));
18051809

18061810
// Ensure the monomorphic entry is 2-byte aligned (so GC can see them if we
18071811
// store them in ICData / MegamorphicCache arrays)
@@ -1829,12 +1833,14 @@ void Assembler::MonomorphicCheckedEntryJIT() {
18291833
ASSERT(((CodeSize() - start) & kSmiTagMask) == kSmiTag);
18301834
}
18311835

1836+
// RBX - input: class id smi
1837+
// RDX - input: receiver object
18321838
void Assembler::MonomorphicCheckedEntryAOT() {
18331839
has_monomorphic_entry_ = true;
18341840
intptr_t start = CodeSize();
18351841
Label have_cid, miss;
18361842
Bind(&miss);
1837-
jmp(Address(THR, target::Thread::monomorphic_miss_entry_offset()));
1843+
jmp(Address(THR, target::Thread::switchable_call_miss_entry_offset()));
18381844

18391845
// Ensure the monomorphic entry is 2-byte aligned (so GC can see them if we
18401846
// store them in ICData / MegamorphicCache arrays)

runtime/vm/compiler/backend/flow_graph_compiler_arm.cc

Lines changed: 24 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1132,13 +1132,29 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall(
11321132
// Load receiver into R0.
11331133
__ LoadFromOffset(kWord, R0, SP,
11341134
(args_desc.Count() - 1) * compiler::target::kWordSize);
1135-
__ LoadObject(R9, cache);
1136-
__ ldr(
1137-
LR,
1138-
compiler::Address(
1139-
THR,
1140-
compiler::target::Thread::megamorphic_call_checked_entry_offset()));
1141-
__ blx(LR);
1135+
// Use same code pattern as instance call so it can be parsed by code patcher.
1136+
if (FLAG_precompiled_mode) {
1137+
if (FLAG_use_bare_instructions) {
1138+
// The AOT runtime will replace the slot in the object pool with the
1139+
// entrypoint address - see clustered_snapshot.cc.
1140+
__ LoadUniqueObject(LR, StubCode::MegamorphicCall());
1141+
} else {
1142+
__ LoadUniqueObject(CODE_REG, StubCode::MegamorphicCall());
1143+
__ ldr(LR, compiler::FieldAddress(
1144+
CODE_REG, compiler::target::Code::entry_point_offset(
1145+
Code::EntryKind::kMonomorphic)));
1146+
}
1147+
__ LoadUniqueObject(R9, cache);
1148+
__ blx(LR);
1149+
1150+
} else {
1151+
__ LoadUniqueObject(R9, cache);
1152+
__ LoadUniqueObject(CODE_REG, StubCode::MegamorphicCall());
1153+
__ ldr(LR, compiler::FieldAddress(
1154+
CODE_REG,
1155+
Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
1156+
__ blx(LR);
1157+
}
11421158

11431159
RecordSafepoint(locs, slow_path_argument_count);
11441160
const intptr_t deopt_id_after = DeoptId::ToDeoptAfter(deopt_id);
@@ -1173,7 +1189,7 @@ void FlowGraphCompiler::EmitInstanceCallAOT(const ICData& ic_data,
11731189
ASSERT(entry_kind == Code::EntryKind::kNormal ||
11741190
entry_kind == Code::EntryKind::kUnchecked);
11751191
ASSERT(ic_data.NumArgsTested() == 1);
1176-
const Code& initial_stub = StubCode::UnlinkedCall();
1192+
const Code& initial_stub = StubCode::SwitchableCallMiss();
11771193
const char* switchable_call_mode = "smiable";
11781194
if (!receiver_can_be_smi) {
11791195
switchable_call_mode = "non-smi";

runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1089,9 +1089,25 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall(
10891089
// Load receiver into R0.
10901090
__ LoadFromOffset(R0, SP, (args_desc.Count() - 1) * kWordSize);
10911091

1092-
__ LoadObject(R5, cache);
1093-
__ ldr(LR, compiler::Address(
1094-
THR, Thread::megamorphic_call_checked_entry_offset()));
1092+
// Use same code pattern as instance call so it can be parsed by code patcher.
1093+
compiler::ObjectPoolBuilder& op = __ object_pool_builder();
1094+
const intptr_t data_index =
1095+
op.AddObject(cache, ObjectPool::Patchability::kPatchable);
1096+
const intptr_t stub_index = op.AddObject(
1097+
StubCode::MegamorphicCall(), ObjectPool::Patchability::kPatchable);
1098+
ASSERT((data_index + 1) == stub_index);
1099+
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
1100+
// The AOT runtime will replace the slot in the object pool with the
1101+
// entrypoint address - see clustered_snapshot.cc.
1102+
__ LoadDoubleWordFromPoolOffset(R5, LR,
1103+
ObjectPool::element_offset(data_index));
1104+
} else {
1105+
__ LoadDoubleWordFromPoolOffset(R5, CODE_REG,
1106+
ObjectPool::element_offset(data_index));
1107+
__ ldr(LR, compiler::FieldAddress(
1108+
CODE_REG,
1109+
Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
1110+
}
10951111
__ blr(LR);
10961112

10971113
RecordSafepoint(locs, slow_path_argument_count);
@@ -1125,7 +1141,7 @@ void FlowGraphCompiler::EmitInstanceCallAOT(const ICData& ic_data,
11251141
bool receiver_can_be_smi) {
11261142
ASSERT(CanCallDart());
11271143
ASSERT(ic_data.NumArgsTested() == 1);
1128-
const Code& initial_stub = StubCode::UnlinkedCall();
1144+
const Code& initial_stub = StubCode::SwitchableCallMiss();
11291145
const char* switchable_call_mode = "smiable";
11301146
if (!receiver_can_be_smi) {
11311147
switchable_call_mode = "non-smi";
@@ -1137,6 +1153,9 @@ void FlowGraphCompiler::EmitInstanceCallAOT(const ICData& ic_data,
11371153
compiler::ObjectPoolBuilder& op = __ object_pool_builder();
11381154

11391155
__ Comment("InstanceCallAOT (%s)", switchable_call_mode);
1156+
// Clear argument descriptor to keep gc happy when it gets pushed on to
1157+
// the stack.
1158+
__ LoadImmediate(R4, 0);
11401159
__ LoadFromOffset(R0, SP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize);
11411160

11421161
const intptr_t data_index =

runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -982,9 +982,10 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall(
982982
__ Comment("MegamorphicCall");
983983
// Load receiver into EBX.
984984
__ movl(EBX, compiler::Address(ESP, (args_desc.Count() - 1) * kWordSize));
985-
__ LoadObject(ECX, cache);
986-
__ call(
987-
compiler::Address(THR, Thread::megamorphic_call_checked_entry_offset()));
985+
__ LoadObject(ECX, cache, true);
986+
__ LoadObject(CODE_REG, StubCode::MegamorphicCall(), true);
987+
__ call(compiler::FieldAddress(
988+
CODE_REG, Code::entry_point_offset(Code::EntryKind::kMonomorphic)));
988989

989990
AddCurrentDescriptor(RawPcDescriptors::kOther, DeoptId::kNone, token_pos);
990991
RecordSafepoint(locs, slow_path_argument_count);

0 commit comments

Comments
 (0)