Skip to content

Commit 1c5fa40

Browse files
authored
cmd/devp2p: reduce output of node crawler (#26674)
Our discovery crawler spits out a huge amount of logs, most of which is pretty non-interesting. This change moves the very verbose output to Debug, and adds a 8-second status log message giving the general idea about what's going on.
1 parent 03585ed commit 1c5fa40

File tree

1 file changed

+45
-8
lines changed

1 file changed

+45
-8
lines changed

cmd/devp2p/crawl.go

Lines changed: 45 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,14 @@ type crawler struct {
3636
revalidateInterval time.Duration
3737
}
3838

39+
const (
40+
nodeRemoved = iota
41+
nodeSkipRecent
42+
nodeSkipIncompat
43+
nodeAdded
44+
nodeUpdated
45+
)
46+
3947
type resolver interface {
4048
RequestENR(*enode.Node) (*enode.Node, error)
4149
}
@@ -63,19 +71,39 @@ func (c *crawler) run(timeout time.Duration) nodeSet {
6371
var (
6472
timeoutTimer = time.NewTimer(timeout)
6573
timeoutCh <-chan time.Time
74+
statusTicker = time.NewTicker(time.Second * 8)
6675
doneCh = make(chan enode.Iterator, len(c.iters))
6776
liveIters = len(c.iters)
6877
)
6978
defer timeoutTimer.Stop()
79+
defer statusTicker.Stop()
7080
for _, it := range c.iters {
7181
go c.runIterator(doneCh, it)
7282
}
7383

84+
var (
85+
added int
86+
updated int
87+
skipped int
88+
recent int
89+
removed int
90+
)
7491
loop:
7592
for {
7693
select {
7794
case n := <-c.ch:
78-
c.updateNode(n)
95+
switch c.updateNode(n) {
96+
case nodeSkipIncompat:
97+
skipped++
98+
case nodeSkipRecent:
99+
recent++
100+
case nodeRemoved:
101+
removed++
102+
case nodeAdded:
103+
added++
104+
default:
105+
updated++
106+
}
79107
case it := <-doneCh:
80108
if it == c.inputIter {
81109
// Enable timeout when we're done revalidating the input nodes.
@@ -89,6 +117,10 @@ loop:
89117
}
90118
case <-timeoutCh:
91119
break loop
120+
case <-statusTicker.C:
121+
log.Info("Crawling in progress",
122+
"added", added, "updated", updated, "removed", removed,
123+
"ignored(recent)", recent, "ignored(incompatible)", skipped)
92124
}
93125
}
94126

@@ -113,22 +145,25 @@ func (c *crawler) runIterator(done chan<- enode.Iterator, it enode.Iterator) {
113145
}
114146
}
115147

116-
func (c *crawler) updateNode(n *enode.Node) {
148+
// updateNode updates the info about the given node, and returns a status
149+
// about what changed
150+
func (c *crawler) updateNode(n *enode.Node) int {
117151
node, ok := c.output[n.ID()]
118152

119153
// Skip validation of recently-seen nodes.
120154
if ok && time.Since(node.LastCheck) < c.revalidateInterval {
121-
return
155+
return nodeSkipRecent
122156
}
123157

124158
// Request the node record.
125159
nn, err := c.disc.RequestENR(n)
126160
node.LastCheck = truncNow()
161+
status := nodeUpdated
127162
if err != nil {
128163
if node.Score == 0 {
129164
// Node doesn't implement EIP-868.
130165
log.Debug("Skipping node", "id", n.ID())
131-
return
166+
return nodeSkipIncompat
132167
}
133168
node.Score /= 2
134169
} else {
@@ -137,18 +172,20 @@ func (c *crawler) updateNode(n *enode.Node) {
137172
node.Score++
138173
if node.FirstResponse.IsZero() {
139174
node.FirstResponse = node.LastCheck
175+
status = nodeAdded
140176
}
141177
node.LastResponse = node.LastCheck
142178
}
143179

144180
// Store/update node in output set.
145181
if node.Score <= 0 {
146-
log.Info("Removing node", "id", n.ID())
182+
log.Debug("Removing node", "id", n.ID())
147183
delete(c.output, n.ID())
148-
} else {
149-
log.Info("Updating node", "id", n.ID(), "seq", n.Seq(), "score", node.Score)
150-
c.output[n.ID()] = node
184+
return nodeRemoved
151185
}
186+
log.Debug("Updating node", "id", n.ID(), "seq", n.Seq(), "score", node.Score)
187+
c.output[n.ID()] = node
188+
return status
152189
}
153190

154191
func truncNow() time.Time {

0 commit comments

Comments
 (0)