@@ -164,15 +164,31 @@ gc_decref(PyObject *op)
164
164
static void
165
165
disable_deferred_refcounting (PyObject * op )
166
166
{
167
- if (_PyObject_HasDeferredRefcount (op )) {
168
- op -> ob_gc_bits &= ~_PyGC_BITS_DEFERRED ;
169
- op -> ob_ref_shared -= _Py_REF_SHARED (_Py_REF_DEFERRED , 0 );
170
-
171
- if (PyType_Check (op )) {
172
- // Disable thread-local refcounting for heap types
173
- PyTypeObject * type = (PyTypeObject * )op ;
174
- if (PyType_HasFeature (type , Py_TPFLAGS_HEAPTYPE )) {
175
- _PyType_ReleaseId ((PyHeapTypeObject * )op );
167
+ if (!_PyObject_HasDeferredRefcount (op )) {
168
+ return ;
169
+ }
170
+
171
+ op -> ob_gc_bits &= ~_PyGC_BITS_DEFERRED ;
172
+ op -> ob_ref_shared -= _Py_REF_SHARED (_Py_REF_DEFERRED , 0 );
173
+
174
+ if (PyType_Check (op )) {
175
+ // Disable thread-local refcounting for heap types
176
+ PyTypeObject * type = (PyTypeObject * )op ;
177
+ if (PyType_HasFeature (type , Py_TPFLAGS_HEAPTYPE )) {
178
+ _PyType_ReleaseId ((PyHeapTypeObject * )op );
179
+ }
180
+ }
181
+ else if (PyGen_CheckExact (op ) || PyCoro_CheckExact (op ) || PyAsyncGen_CheckExact (op )) {
182
+ // Ensure any non-refcounted pointers in locals are converted to
183
+ // strong references. This ensures that the generator/coroutine is not
184
+ // freed before its locals.
185
+ PyGenObject * gen = (PyGenObject * )op ;
186
+ struct _PyInterpreterFrame * frame = & gen -> gi_iframe ;
187
+ assert (frame -> stackpointer != NULL );
188
+ for (_PyStackRef * ref = frame -> localsplus ; ref < frame -> stackpointer ; ref ++ ) {
189
+ if (!PyStackRef_IsNull (* ref ) && PyStackRef_IsDeferred (* ref )) {
190
+ // Convert a deferred reference to a strong reference.
191
+ * ref = PyStackRef_FromPyObjectSteal (PyStackRef_AsPyObjectSteal (* ref ));
176
192
}
177
193
}
178
194
}
@@ -313,6 +329,41 @@ gc_visit_heaps(PyInterpreterState *interp, mi_block_visit_fun *visitor,
313
329
return err ;
314
330
}
315
331
332
+ static inline void
333
+ gc_visit_stackref (_PyStackRef stackref )
334
+ {
335
+ // Note: we MUST check that it is deferred before checking the rest.
336
+ // Otherwise we might read into invalid memory due to non-deferred references
337
+ // being dead already.
338
+ if (PyStackRef_IsDeferred (stackref ) && !PyStackRef_IsNull (stackref )) {
339
+ PyObject * obj = PyStackRef_AsPyObjectBorrow (stackref );
340
+ if (_PyObject_GC_IS_TRACKED (obj )) {
341
+ gc_add_refs (obj , 1 );
342
+ }
343
+ }
344
+ }
345
+
346
+ // Add 1 to the gc_refs for every deferred reference on each thread's stack.
347
+ static void
348
+ gc_visit_thread_stacks (PyInterpreterState * interp )
349
+ {
350
+ HEAD_LOCK (& _PyRuntime );
351
+ for (PyThreadState * p = interp -> threads .head ; p != NULL ; p = p -> next ) {
352
+ _PyInterpreterFrame * f = p -> current_frame ;
353
+ while (f != NULL ) {
354
+ if (f -> f_executable != NULL && PyCode_Check (f -> f_executable )) {
355
+ PyCodeObject * co = (PyCodeObject * )f -> f_executable ;
356
+ int max_stack = co -> co_nlocalsplus + co -> co_stacksize ;
357
+ for (int i = 0 ; i < max_stack ; i ++ ) {
358
+ gc_visit_stackref (f -> localsplus [i ]);
359
+ }
360
+ }
361
+ f = f -> previous ;
362
+ }
363
+ }
364
+ HEAD_UNLOCK (& _PyRuntime );
365
+ }
366
+
316
367
static void
317
368
merge_queued_objects (_PyThreadStateImpl * tstate , struct collection_state * state )
318
369
{
@@ -617,6 +668,9 @@ deduce_unreachable_heap(PyInterpreterState *interp,
617
668
gc_visit_heaps (interp , & validate_gc_objects , & state -> base );
618
669
#endif
619
670
671
+ // Visit the thread stacks to account for any deferred references.
672
+ gc_visit_thread_stacks (interp );
673
+
620
674
// Transitively mark reachable objects by clearing the
621
675
// _PyGC_BITS_UNREACHABLE flag.
622
676
if (gc_visit_heaps (interp , & mark_heap_visitor , & state -> base ) < 0 ) {
@@ -897,6 +951,24 @@ visit_decref_unreachable(PyObject *op, void *data)
897
951
return 0 ;
898
952
}
899
953
954
+ int
955
+ _PyGC_VisitFrameStack (_PyInterpreterFrame * frame , visitproc visit , void * arg )
956
+ {
957
+ _PyStackRef * ref = _PyFrame_GetLocalsArray (frame );
958
+ /* locals and stack */
959
+ for (; ref < frame -> stackpointer ; ref ++ ) {
960
+ // This is a bit tricky! We want to ignore deferred references when
961
+ // computing the incoming references, but otherwise treat them like
962
+ // regular references.
963
+ if (PyStackRef_IsDeferred (* ref ) &&
964
+ (visit == visit_decref || visit == visit_decref_unreachable )) {
965
+ continue ;
966
+ }
967
+ Py_VISIT (PyStackRef_AsPyObjectBorrow (* ref ));
968
+ }
969
+ return 0 ;
970
+ }
971
+
900
972
// Handle objects that may have resurrected after a call to 'finalize_garbage'.
901
973
static int
902
974
handle_resurrected_objects (struct collection_state * state )
0 commit comments