Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions sentry_sdk/_log_batcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,18 @@

class LogBatcher:
MAX_LOGS_BEFORE_FLUSH = 100
MAX_LOGS_BEFORE_DROP = 1_000
FLUSH_WAIT_TIME = 5.0

def __init__(
self,
capture_func, # type: Callable[[Envelope], None]
record_lost_func, # type: Callable[..., None]
):
# type: (...) -> None
self._log_buffer = [] # type: List[Log]
self._capture_func = capture_func
self._record_lost_func = record_lost_func
self._running = True
self._lock = threading.Lock()

Expand Down Expand Up @@ -79,6 +82,14 @@ def add(
return None

with self._lock:
if len(self._log_buffer) >= self.MAX_LOGS_BEFORE_DROP:
self._record_lost_func(
reason="queue_overflow",
data_category="log_item",
quantity=1,
)
return None

self._log_buffer.append(log)
if len(self._log_buffer) >= self.MAX_LOGS_BEFORE_FLUSH:
self._flush_event.set()
Expand Down
5 changes: 4 additions & 1 deletion sentry_sdk/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,10 @@ def _record_lost_event(
if has_logs_enabled(self.options):
from sentry_sdk._log_batcher import LogBatcher

self.log_batcher = LogBatcher(capture_func=_capture_envelope)
self.log_batcher = LogBatcher(
capture_func=_capture_envelope,
record_lost_func=_record_lost_event,
)

self.metrics_batcher = None
if has_metrics_enabled(self.options):
Expand Down
25 changes: 25 additions & 0 deletions tests/test_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,3 +446,28 @@ def test_logs_with_literal_braces(
assert logs[0]["attributes"]["sentry.message.template"] == message
else:
assert "sentry.message.template" not in logs[0]["attributes"]


@minimum_python_37
def test_batcher_drops_logs(sentry_init, monkeypatch):
sentry_init(enable_logs=True)
client = sentry_sdk.get_client()

def no_op_flush():
pass

monkeypatch.setattr(client.log_batcher, "_flush", no_op_flush)

lost_event_calls = []

def record_lost_event(reason, data_category=None, item=None, *, quantity=1):
lost_event_calls.append((reason, data_category, item, quantity))

monkeypatch.setattr(client.log_batcher, "_record_lost_func", record_lost_event)

for i in range(1_005): # 5 logs over the hard limit
sentry_sdk.logger.info("This is a 'info' log...")

assert len(lost_event_calls) == 5
for lost_event_call in lost_event_calls:
assert lost_event_call == ("queue_overflow", "log_item", None, 1)