|
5 | 5 | import random
|
6 | 6 | import re
|
7 | 7 | import string
|
| 8 | +from ast import Dict |
8 | 9 | from collections import namedtuple
|
9 | 10 | from datetime import datetime, timezone
|
10 |
| -from typing import Iterable |
| 11 | +from typing import Any, Callable, Iterable, List, Optional, Union |
11 | 12 |
|
12 | 13 | import pytest
|
13 | 14 |
|
14 | 15 | from aws_lambda_powertools import Logger, Tracer
|
15 | 16 | from aws_lambda_powertools.logging import correlation_paths
|
16 | 17 | from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError
|
17 |
| -from aws_lambda_powertools.logging.formatter import BasePowertoolsFormatter |
| 18 | +from aws_lambda_powertools.logging.formatter import BasePowertoolsFormatter, LambdaPowertoolsFormatter |
18 | 19 | from aws_lambda_powertools.logging.logger import set_package_logger
|
19 | 20 | from aws_lambda_powertools.shared import constants
|
20 | 21 | from aws_lambda_powertools.utilities.data_classes import S3Event, event_source
|
@@ -564,6 +565,63 @@ def handler(event, context):
|
564 | 565 | assert logger.get_correlation_id() is None
|
565 | 566 |
|
566 | 567 |
|
| 568 | +def test_logger_custom_powertools_formatter_clear_state(stdout, service_name, lambda_context): |
| 569 | + class CustomFormatter(LambdaPowertoolsFormatter): |
| 570 | + def __init__( |
| 571 | + self, |
| 572 | + json_serializer: Optional[Callable[[Dict], str]] = None, |
| 573 | + json_deserializer: Optional[Callable[[Union[Dict, str, bool, int, float]], str]] = None, |
| 574 | + json_default: Optional[Callable[[Any], Any]] = None, |
| 575 | + datefmt: Optional[str] = None, |
| 576 | + use_datetime_directive: bool = False, |
| 577 | + log_record_order: Optional[List[str]] = None, |
| 578 | + utc: bool = False, |
| 579 | + **kwargs, |
| 580 | + ): |
| 581 | + super().__init__( |
| 582 | + json_serializer, |
| 583 | + json_deserializer, |
| 584 | + json_default, |
| 585 | + datefmt, |
| 586 | + use_datetime_directive, |
| 587 | + log_record_order, |
| 588 | + utc, |
| 589 | + **kwargs, |
| 590 | + ) |
| 591 | + |
| 592 | + custom_formatter = CustomFormatter() |
| 593 | + |
| 594 | + # GIVEN a Logger is initialized with a custom formatter |
| 595 | + logger = Logger(service=service_name, stream=stdout, logger_formatter=custom_formatter) |
| 596 | + |
| 597 | + # WHEN a lambda function is decorated with logger |
| 598 | + # and state is to be cleared in the next invocation |
| 599 | + @logger.inject_lambda_context(clear_state=True) |
| 600 | + def handler(event, context): |
| 601 | + if event.get("add_key"): |
| 602 | + logger.append_keys(my_key="value") |
| 603 | + logger.info("Hello") |
| 604 | + |
| 605 | + handler({"add_key": True}, lambda_context) |
| 606 | + handler({}, lambda_context) |
| 607 | + |
| 608 | + lambda_context_keys = ( |
| 609 | + "function_name", |
| 610 | + "function_memory_size", |
| 611 | + "function_arn", |
| 612 | + "function_request_id", |
| 613 | + ) |
| 614 | + |
| 615 | + first_log, second_log = capture_multiple_logging_statements_output(stdout) |
| 616 | + |
| 617 | + # THEN my_key should only present once |
| 618 | + # and lambda contextual info should also be in both logs |
| 619 | + assert "my_key" in first_log |
| 620 | + assert "my_key" not in second_log |
| 621 | + assert all(k in first_log for k in lambda_context_keys) |
| 622 | + assert all(k in second_log for k in lambda_context_keys) |
| 623 | + |
| 624 | + |
567 | 625 | def test_logger_custom_handler(lambda_context, service_name, tmp_path):
|
568 | 626 | # GIVEN a Logger is initialized with a FileHandler
|
569 | 627 | log_file = tmp_path / "log.json"
|
|
0 commit comments