Skip to content

Commit 04e6bb8

Browse files
sjindel-googlecommit-bot@chromium.org
authored andcommitted
[vm] Use multiple entrypoints to remove unnecessary checks on calls against "this".
Test Plan: Behavioral correctness should be ensured by existing tests. Tests in vm/dart/entrypoints ensure that the unchecked entrypoint is used in cases where the optimization should trigger. Bug: dart-lang#31798 Change-Id: I5b880b2dfa6343b4bb0a96ad23562facff73e41f Cq-Include-Trybots: luci.dart.try:vm-kernel-win-release-x64-try,vm-kernel-optcounter-threshold-linux-release-x64-try,vm-kernel-precomp-linux-debug-x64-try,vm-kernel-precomp-linux-release-simarm-try,vm-kernel-precomp-linux-release-simarm64-try,vm-kernel-precomp-linux-release-x64-try,vm-kernel-precomp-win-release-x64-try Reviewed-on: https://dart-review.googlesource.com/69741 Commit-Queue: Samir Jindel <[email protected]> Reviewed-by: Vyacheslav Egorov <[email protected]>
1 parent 95cc715 commit 04e6bb8

File tree

4 files changed

+40
-20
lines changed

4 files changed

+40
-20
lines changed

assembler_arm.cc

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2460,15 +2460,17 @@ void Assembler::Branch(const StubEntry& stub_entry,
24602460
bx(IP, cond);
24612461
}
24622462

2463-
void Assembler::BranchLink(const Code& target, Patchability patchable) {
2463+
void Assembler::BranchLink(const Code& target,
2464+
Patchability patchable,
2465+
Code::EntryKind entry_kind) {
24642466
// Make sure that class CallPattern is able to patch the label referred
24652467
// to by this code sequence.
24662468
// For added code robustness, use 'blx lr' in a patchable sequence and
24672469
// use 'blx ip' in a non-patchable sequence (see other BranchLink flavors).
24682470
const int32_t offset = ObjectPool::element_offset(
24692471
object_pool_wrapper_.FindObject(target, patchable));
24702472
LoadWordFromPoolOffset(CODE_REG, offset - kHeapObjectTag, PP, AL);
2471-
ldr(LR, FieldAddress(CODE_REG, Code::entry_point_offset()));
2473+
ldr(LR, FieldAddress(CODE_REG, Code::entry_point_offset(entry_kind)));
24722474
blx(LR); // Use blx instruction so that the return branch prediction works.
24732475
}
24742476

@@ -2478,8 +2480,9 @@ void Assembler::BranchLink(const StubEntry& stub_entry,
24782480
BranchLink(code, patchable);
24792481
}
24802482

2481-
void Assembler::BranchLinkPatchable(const Code& target) {
2482-
BranchLink(target, kPatchable);
2483+
void Assembler::BranchLinkPatchable(const Code& target,
2484+
Code::EntryKind entry_kind) {
2485+
BranchLink(target, kPatchable, entry_kind);
24832486
}
24842487

24852488
void Assembler::BranchLinkToRuntime() {
@@ -2498,7 +2501,8 @@ void Assembler::CallNullErrorShared(bool save_fpu_registers) {
24982501
}
24992502

25002503
void Assembler::BranchLinkWithEquivalence(const StubEntry& stub_entry,
2501-
const Object& equivalence) {
2504+
const Object& equivalence,
2505+
Code::EntryKind entry_kind) {
25022506
const Code& target = Code::ZoneHandle(stub_entry.code());
25032507
// Make sure that class CallPattern is able to patch the label referred
25042508
// to by this code sequence.
@@ -2507,7 +2511,7 @@ void Assembler::BranchLinkWithEquivalence(const StubEntry& stub_entry,
25072511
const int32_t offset = ObjectPool::element_offset(
25082512
object_pool_wrapper_.FindObject(target, equivalence));
25092513
LoadWordFromPoolOffset(CODE_REG, offset - kHeapObjectTag, PP, AL);
2510-
ldr(LR, FieldAddress(CODE_REG, Code::entry_point_offset()));
2514+
ldr(LR, FieldAddress(CODE_REG, Code::entry_point_offset(entry_kind)));
25112515
blx(LR); // Use blx instruction so that the return branch prediction works.
25122516
}
25132517

@@ -2516,8 +2520,9 @@ void Assembler::BranchLink(const ExternalLabel* label) {
25162520
blx(LR); // Use blx instruction so that the return branch prediction works.
25172521
}
25182522

2519-
void Assembler::BranchLinkPatchable(const StubEntry& stub_entry) {
2520-
BranchLinkPatchable(Code::ZoneHandle(stub_entry.code()));
2523+
void Assembler::BranchLinkPatchable(const StubEntry& stub_entry,
2524+
Code::EntryKind entry_kind) {
2525+
BranchLinkPatchable(Code::ZoneHandle(stub_entry.code()), entry_kind);
25212526
}
25222527

25232528
void Assembler::BranchLinkOffset(Register base, int32_t offset) {

assembler_arm.h

Lines changed: 14 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -696,19 +696,28 @@ class Assembler : public ValueObject {
696696

697697
void BranchLink(const StubEntry& stub_entry,
698698
Patchability patchable = kNotPatchable);
699-
void BranchLink(const Code& code, Patchability patchable);
699+
void BranchLink(const Code& code,
700+
Patchability patchable,
701+
Code::EntryKind entry_kind = Code::EntryKind::kNormal);
700702
void BranchLinkToRuntime();
701703

702704
void CallNullErrorShared(bool save_fpu_registers);
703705

704706
// Branch and link to an entry address. Call sequence can be patched.
705-
void BranchLinkPatchable(const StubEntry& stub_entry);
706-
void BranchLinkPatchable(const Code& code);
707+
void BranchLinkPatchable(
708+
const StubEntry& stub_entry,
709+
Code::EntryKind entry_kind = Code::EntryKind::kNormal);
710+
711+
void BranchLinkPatchable(
712+
const Code& code,
713+
Code::EntryKind entry_kind = Code::EntryKind::kNormal);
707714

708715
// Emit a call that shares its object pool entries with other calls
709716
// that have the same equivalence marker.
710-
void BranchLinkWithEquivalence(const StubEntry& stub_entry,
711-
const Object& equivalence);
717+
void BranchLinkWithEquivalence(
718+
const StubEntry& stub_entry,
719+
const Object& equivalence,
720+
Code::EntryKind entry_kind = Code::EntryKind::kNormal);
712721

713722
// Branch and link to [base + offset]. Call sequence is never patched.
714723
void BranchLinkOffset(Register base, int32_t offset);

assembler_x64.cc

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -61,26 +61,28 @@ void Assembler::call(const ExternalLabel* label) {
6161
call(TMP);
6262
}
6363

64-
void Assembler::CallPatchable(const StubEntry& stub_entry) {
64+
void Assembler::CallPatchable(const StubEntry& stub_entry,
65+
Code::EntryKind entry_kind) {
6566
ASSERT(constant_pool_allowed());
6667
const Code& target = Code::ZoneHandle(stub_entry.code());
6768
intptr_t call_start = buffer_.GetPosition();
6869
const intptr_t idx = object_pool_wrapper_.AddObject(target, kPatchable);
6970
const int32_t offset = ObjectPool::element_offset(idx);
7071
LoadWordFromPoolOffset(CODE_REG, offset - kHeapObjectTag);
71-
movq(TMP, FieldAddress(CODE_REG, Code::entry_point_offset()));
72+
movq(TMP, FieldAddress(CODE_REG, Code::entry_point_offset(entry_kind)));
7273
call(TMP);
7374
ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
7475
}
7576

7677
void Assembler::CallWithEquivalence(const StubEntry& stub_entry,
77-
const Object& equivalence) {
78+
const Object& equivalence,
79+
Code::EntryKind entry_kind) {
7880
ASSERT(constant_pool_allowed());
7981
const Code& target = Code::ZoneHandle(stub_entry.code());
8082
const intptr_t idx = object_pool_wrapper_.FindObject(target, equivalence);
8183
const int32_t offset = ObjectPool::element_offset(idx);
8284
LoadWordFromPoolOffset(CODE_REG, offset - kHeapObjectTag);
83-
movq(TMP, FieldAddress(CODE_REG, Code::entry_point_offset()));
85+
movq(TMP, FieldAddress(CODE_REG, Code::entry_point_offset(entry_kind)));
8486
call(TMP);
8587
}
8688

assembler_x64.h

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -692,16 +692,20 @@ class Assembler : public ValueObject {
692692
void JmpPatchable(const StubEntry& stub_entry, Register pp);
693693
void Jmp(const StubEntry& stub_entry, Register pp = PP);
694694
void J(Condition condition, const StubEntry& stub_entry, Register pp);
695-
void CallPatchable(const StubEntry& stub_entry);
695+
void CallPatchable(const StubEntry& stub_entry,
696+
Code::EntryKind entry_kind = Code::EntryKind::kNormal);
696697
void Call(const StubEntry& stub_entry);
697698
void CallToRuntime();
698699

699700
void CallNullErrorShared(bool save_fpu_registers);
700701

701702
// Emit a call that shares its object pool entries with other calls
702703
// that have the same equivalence marker.
703-
void CallWithEquivalence(const StubEntry& stub_entry,
704-
const Object& equivalence);
704+
void CallWithEquivalence(
705+
const StubEntry& stub_entry,
706+
const Object& equivalence,
707+
Code::EntryKind entry_kind = Code::EntryKind::kNormal);
708+
705709
// Unaware of write barrier (use StoreInto* methods for storing to objects).
706710
// TODO(koda): Add StackAddress/HeapAddress types to prevent misuse.
707711
void StoreObject(const Address& dst, const Object& obj);

0 commit comments

Comments
 (0)