@@ -38,31 +38,144 @@ DisplayList::DisplayList(DisplayListStorage&& storage,
38
38
op_count_(op_count),
39
39
nested_byte_count_(nested_byte_count),
40
40
nested_op_count_(nested_op_count),
41
+ unique_id_(next_unique_id()),
41
42
bounds_(bounds),
42
43
can_apply_group_opacity_(can_apply_group_opacity),
43
- rtree_(std::move(rtree)) {
44
+ rtree_(std::move(rtree)) {}
45
+
46
+ DisplayList::~DisplayList () {
47
+ uint8_t * ptr = storage_.get ();
48
+ DisposeOps (ptr, ptr + byte_count_);
49
+ }
50
+
51
+ uint32_t DisplayList::next_unique_id () {
44
52
static std::atomic<uint32_t > next_id{1 };
53
+ uint32_t id;
45
54
do {
46
- unique_id_ = next_id.fetch_add (+1 , std::memory_order_relaxed);
47
- } while (unique_id_ == 0 );
55
+ id = next_id.fetch_add (+1 , std::memory_order_relaxed);
56
+ } while (id == 0 );
57
+ return id;
48
58
}
49
59
50
- DisplayList::~DisplayList () {
60
+ class Culler {
61
+ public:
62
+ virtual bool init (DispatchContext& context) = 0;
63
+ virtual void update (DispatchContext& context) = 0;
64
+ };
65
+ class NopCuller : public Culler {
66
+ public:
67
+ static NopCuller instance;
68
+
69
+ bool init (DispatchContext& context) override {
70
+ // Setting next_render_index to 0 means that
71
+ // all rendering ops will be at or after that
72
+ // index so they will execute and all restore
73
+ // indices will be after it as well so all
74
+ // clip and transform operations will execute.
75
+ context.next_render_index = 0 ;
76
+ return true ;
77
+ }
78
+ void update (DispatchContext& context) override {}
79
+ };
80
+ NopCuller NopCuller::instance = NopCuller();
81
+ class VectorCuller : public Culler {
82
+ public:
83
+ VectorCuller (const DlRTree* rtree, const std::vector<int >& rect_indices)
84
+ : rtree_(rtree), cur_(rect_indices.begin()), end_(rect_indices.end()) {}
85
+
86
+ bool init (DispatchContext& context) override {
87
+ if (cur_ < end_) {
88
+ context.next_render_index = rtree_->id (*cur_++);
89
+ return true ;
90
+ } else {
91
+ // Setting next_render_index to MAX_INT means that
92
+ // all rendering ops will be "before" that index and
93
+ // they will skip themselves and all clip and transform
94
+ // ops will see that the next render index is not
95
+ // before the next restore index (even if both are MAX_INT)
96
+ // and so they will also not execute.
97
+ // None of this really matters because returning false
98
+ // here should cause the Dispatch operation to abort,
99
+ // but this value is conceptually correct if that short
100
+ // circuit optimization isn't used.
101
+ context.next_render_index = std::numeric_limits<int >::max ();
102
+ return false ;
103
+ }
104
+ }
105
+ void update (DispatchContext& context) override {
106
+ if (++context.cur_index > context.next_render_index ) {
107
+ while (cur_ < end_) {
108
+ context.next_render_index = rtree_->id (*cur_++);
109
+ if (context.next_render_index >= context.cur_index ) {
110
+ // It should be rare that we have duplicate indices
111
+ // but if we do, then having a while loop is a cheap
112
+ // insurance for those cases.
113
+ // The main cause of duplicate indices is when a
114
+ // DrawDisplayListOp was added to this DisplayList and
115
+ // both are computing an R-Tree, in which case the
116
+ // builder method will forward all of the child
117
+ // DisplayList's rects to this R-Tree with the same
118
+ // op_index.
119
+ return ;
120
+ }
121
+ }
122
+ context.next_render_index = std::numeric_limits<int >::max ();
123
+ }
124
+ }
125
+
126
+ private:
127
+ const DlRTree* rtree_;
128
+ std::vector<int >::const_iterator cur_;
129
+ std::vector<int >::const_iterator end_;
130
+ };
131
+
132
+ void DisplayList::Dispatch (Dispatcher& ctx) const {
51
133
uint8_t * ptr = storage_.get ();
52
- DisposeOps (ptr, ptr + byte_count_);
134
+ Dispatch (ctx, ptr, ptr + byte_count_, NopCuller::instance);
135
+ }
136
+ void DisplayList::Dispatch (Dispatcher& ctx, const SkRect& cull_rect) const {
137
+ if (cull_rect.isEmpty ()) {
138
+ return ;
139
+ }
140
+ if (cull_rect.contains (bounds ())) {
141
+ Dispatch (ctx);
142
+ return ;
143
+ }
144
+ const DlRTree* rtree = this ->rtree ().get ();
145
+ FML_DCHECK (rtree != nullptr );
146
+ if (rtree == nullptr ) {
147
+ FML_LOG (ERROR) << " dispatched with culling rect on DL with no rtree" ;
148
+ Dispatch (ctx);
149
+ return ;
150
+ }
151
+ uint8_t * ptr = storage_.get ();
152
+ std::vector<int > rect_indices;
153
+ rtree->search (cull_rect, &rect_indices);
154
+ VectorCuller culler (rtree, rect_indices);
155
+ Dispatch (ctx, ptr, ptr + byte_count_, culler);
53
156
}
54
157
55
158
void DisplayList::Dispatch (Dispatcher& dispatcher,
56
159
uint8_t * ptr,
57
- uint8_t * end) const {
160
+ uint8_t * end,
161
+ Culler& culler) const {
162
+ DispatchContext context = {
163
+ .dispatcher = dispatcher,
164
+ .cur_index = 0 ,
165
+ // next_render_index will be initialized by culler.init()
166
+ .next_restore_index = std::numeric_limits<int >::max (),
167
+ };
168
+ if (!culler.init (context)) {
169
+ return ;
170
+ }
58
171
while (ptr < end) {
59
172
auto op = reinterpret_cast <const DLOp*>(ptr);
60
173
ptr += op->size ;
61
174
FML_DCHECK (ptr <= end);
62
175
switch (op->type ) {
63
- #define DL_OP_DISPATCH (name ) \
64
- case DisplayListOpType::k##name: \
65
- static_cast <const name##Op*>(op)->dispatch (dispatcher ); \
176
+ #define DL_OP_DISPATCH (name ) \
177
+ case DisplayListOpType::k##name: \
178
+ static_cast <const name##Op*>(op)->dispatch (context ); \
66
179
break ;
67
180
68
181
FOR_EACH_DISPLAY_LIST_OP (DL_OP_DISPATCH)
@@ -73,6 +186,7 @@ void DisplayList::Dispatch(Dispatcher& dispatcher,
73
186
FML_DCHECK (false );
74
187
return ;
75
188
}
189
+ culler.update (context);
76
190
}
77
191
}
78
192
@@ -172,12 +286,20 @@ void DisplayList::RenderTo(DisplayListBuilder* builder,
172
286
if (!builder) {
173
287
return ;
174
288
}
175
- Dispatch (*builder);
289
+ if (has_rtree ()) {
290
+ Dispatch (*builder, builder->getLocalClipBounds ());
291
+ } else {
292
+ Dispatch (*builder);
293
+ }
176
294
}
177
295
178
296
void DisplayList::RenderTo (SkCanvas* canvas, SkScalar opacity) const {
179
297
DisplayListCanvasDispatcher dispatcher (canvas, opacity);
180
- Dispatch (dispatcher);
298
+ if (has_rtree ()) {
299
+ Dispatch (dispatcher, canvas->getLocalClipBounds ());
300
+ } else {
301
+ Dispatch (dispatcher);
302
+ }
181
303
}
182
304
183
305
bool DisplayList::Equals (const DisplayList* other) const {
0 commit comments