@@ -757,13 +757,17 @@ func scanstack(gp *g, gcw *gcWork) {
757
757
}
758
758
if gp ._panic != nil {
759
759
// Panics are always stack allocated.
760
- state .putPtr (uintptr (unsafe .Pointer (gp ._panic )))
760
+ state .putPtr (uintptr (unsafe .Pointer (gp ._panic )), false )
761
761
}
762
762
763
763
// Find and scan all reachable stack objects.
764
+ //
765
+ // The state's pointer queue prioritizes precise pointers over
766
+ // conservative pointers so that we'll prefer scanning stack
767
+ // objects precisely.
764
768
state .buildIndex ()
765
769
for {
766
- p := state .getPtr ()
770
+ p , conservative := state .getPtr ()
767
771
if p == 0 {
768
772
break
769
773
}
@@ -778,7 +782,13 @@ func scanstack(gp *g, gcw *gcWork) {
778
782
}
779
783
obj .setType (nil ) // Don't scan it again.
780
784
if stackTraceDebug {
781
- println (" live stkobj at" , hex (state .stack .lo + uintptr (obj .off )), "of type" , t .string ())
785
+ printlock ()
786
+ print (" live stkobj at" , hex (state .stack .lo + uintptr (obj .off )), "of type" , t .string ())
787
+ if conservative {
788
+ print (" (conservative)" )
789
+ }
790
+ println ()
791
+ printunlock ()
782
792
}
783
793
gcdata := t .gcdata
784
794
var s * mspan
@@ -796,7 +806,12 @@ func scanstack(gp *g, gcw *gcWork) {
796
806
gcdata = (* byte )(unsafe .Pointer (s .startAddr ))
797
807
}
798
808
799
- scanblock (state .stack .lo + uintptr (obj .off ), t .ptrdata , gcdata , gcw , & state )
809
+ b := state .stack .lo + uintptr (obj .off )
810
+ if conservative {
811
+ scanConservative (b , t .ptrdata , gcdata , gcw , & state )
812
+ } else {
813
+ scanblock (b , t .ptrdata , gcdata , gcw , & state )
814
+ }
800
815
801
816
if s != nil {
802
817
dematerializeGCProg (s )
@@ -820,7 +835,7 @@ func scanstack(gp *g, gcw *gcWork) {
820
835
x .nobj = 0
821
836
putempty ((* workbuf )(unsafe .Pointer (x )))
822
837
}
823
- if state .buf != nil || state .freeBuf != nil {
838
+ if state .buf != nil || state .cbuf != nil || state . freeBuf != nil {
824
839
throw ("remaining pointer buffers" )
825
840
}
826
841
}
@@ -832,6 +847,49 @@ func scanframeworker(frame *stkframe, state *stackScanState, gcw *gcWork) {
832
847
print ("scanframe " , funcname (frame .fn ), "\n " )
833
848
}
834
849
850
+ isAsyncPreempt := frame .fn .valid () && frame .fn .funcID == funcID_asyncPreempt
851
+ if state .conservative || isAsyncPreempt {
852
+ if debugScanConservative {
853
+ println ("conservatively scanning function" , funcname (frame .fn ), "at PC" , hex (frame .continpc ))
854
+ }
855
+
856
+ // Conservatively scan the frame. Unlike the precise
857
+ // case, this includes the outgoing argument space
858
+ // since we may have stopped while this function was
859
+ // setting up a call.
860
+ //
861
+ // TODO: We could narrow this down if the compiler
862
+ // produced a single map per function of stack slots
863
+ // and registers that ever contain a pointer.
864
+ if frame .varp != 0 {
865
+ size := frame .varp - frame .sp
866
+ if size > 0 {
867
+ scanConservative (frame .sp , size , nil , gcw , state )
868
+ }
869
+ }
870
+
871
+ // Scan arguments to this frame.
872
+ if frame .arglen != 0 {
873
+ // TODO: We could pass the entry argument map
874
+ // to narrow this down further.
875
+ scanConservative (frame .argp , frame .arglen , nil , gcw , state )
876
+ }
877
+
878
+ if isAsyncPreempt {
879
+ // This function's frame contained the
880
+ // registers for the asynchronously stopped
881
+ // parent frame. Scan the parent
882
+ // conservatively.
883
+ state .conservative = true
884
+ } else {
885
+ // We only wanted to scan those two frames
886
+ // conservatively. Clear the flag for future
887
+ // frames.
888
+ state .conservative = false
889
+ }
890
+ return
891
+ }
892
+
835
893
locals , args , objs := getStackMap (frame , & state .cache , false )
836
894
837
895
// Scan local variables if stack frame has been allocated.
@@ -1104,7 +1162,7 @@ func scanblock(b0, n0 uintptr, ptrmask *uint8, gcw *gcWork, stk *stackScanState)
1104
1162
if obj , span , objIndex := findObject (p , b , i ); obj != 0 {
1105
1163
greyobject (obj , b , i , span , gcw , objIndex )
1106
1164
} else if stk != nil && p >= stk .stack .lo && p < stk .stack .hi {
1107
- stk .putPtr (p )
1165
+ stk .putPtr (p , false )
1108
1166
}
1109
1167
}
1110
1168
}
@@ -1214,6 +1272,101 @@ func scanobject(b uintptr, gcw *gcWork) {
1214
1272
gcw .scanWork += int64 (i )
1215
1273
}
1216
1274
1275
+ // scanConservative scans block [b, b+n) conservatively, treating any
1276
+ // pointer-like value in the block as a pointer.
1277
+ //
1278
+ // If ptrmask != nil, only words that are marked in ptrmask are
1279
+ // considered as potential pointers.
1280
+ //
1281
+ // If state != nil, it's assumed that [b, b+n) is a block in the stack
1282
+ // and may contain pointers to stack objects.
1283
+ func scanConservative (b , n uintptr , ptrmask * uint8 , gcw * gcWork , state * stackScanState ) {
1284
+ if debugScanConservative {
1285
+ printlock ()
1286
+ print ("conservatively scanning [" , hex (b ), "," , hex (b + n ), ")\n " )
1287
+ hexdumpWords (b , b + n , func (p uintptr ) byte {
1288
+ if ptrmask != nil {
1289
+ word := (p - b ) / sys .PtrSize
1290
+ bits := * addb (ptrmask , word / 8 )
1291
+ if (bits >> (word % 8 ))& 1 == 0 {
1292
+ return '$'
1293
+ }
1294
+ }
1295
+
1296
+ val := * (* uintptr )(unsafe .Pointer (p ))
1297
+ if state != nil && state .stack .lo <= val && val < state .stack .hi {
1298
+ return '@'
1299
+ }
1300
+
1301
+ span := spanOfHeap (val )
1302
+ if span == nil {
1303
+ return ' '
1304
+ }
1305
+ idx := span .objIndex (val )
1306
+ if span .isFree (idx ) {
1307
+ return ' '
1308
+ }
1309
+ return '*'
1310
+ })
1311
+ printunlock ()
1312
+ }
1313
+
1314
+ for i := uintptr (0 ); i < n ; i += sys .PtrSize {
1315
+ if ptrmask != nil {
1316
+ word := i / sys .PtrSize
1317
+ bits := * addb (ptrmask , word / 8 )
1318
+ if bits == 0 {
1319
+ // Skip 8 words (the loop increment will do the 8th)
1320
+ //
1321
+ // This must be the first time we've
1322
+ // seen this word of ptrmask, so i
1323
+ // must be 8-word-aligned, but check
1324
+ // our reasoning just in case.
1325
+ if i % (sys .PtrSize * 8 ) != 0 {
1326
+ throw ("misaligned mask" )
1327
+ }
1328
+ i += sys .PtrSize * 8 - sys .PtrSize
1329
+ continue
1330
+ }
1331
+ if (bits >> (word % 8 ))& 1 == 0 {
1332
+ continue
1333
+ }
1334
+ }
1335
+
1336
+ val := * (* uintptr )(unsafe .Pointer (b + i ))
1337
+
1338
+ // Check if val points into the stack.
1339
+ if state != nil && state .stack .lo <= val && val < state .stack .hi {
1340
+ // val may point to a stack object. This
1341
+ // object may be dead from last cycle and
1342
+ // hence may contain pointers to unallocated
1343
+ // objects, but unlike heap objects we can't
1344
+ // tell if it's already dead. Hence, if all
1345
+ // pointers to this object are from
1346
+ // conservative scanning, we have to scan it
1347
+ // defensively, too.
1348
+ state .putPtr (val , true )
1349
+ continue
1350
+ }
1351
+
1352
+ // Check if val points to a heap span.
1353
+ span := spanOfHeap (val )
1354
+ if span == nil {
1355
+ continue
1356
+ }
1357
+
1358
+ // Check if val points to an allocated object.
1359
+ idx := span .objIndex (val )
1360
+ if span .isFree (idx ) {
1361
+ continue
1362
+ }
1363
+
1364
+ // val points to an allocated object. Mark it.
1365
+ obj := span .base () + idx * span .elemsize
1366
+ greyobject (obj , b , i , span , gcw , idx )
1367
+ }
1368
+ }
1369
+
1217
1370
// Shade the object if it isn't already.
1218
1371
// The object is not nil and known to be in the heap.
1219
1372
// Preemption must be disabled.
0 commit comments