diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py
index 2c095782e73..d481f5396db 100644
--- a/aws_lambda_powertools/metrics/metrics.py
+++ b/aws_lambda_powertools/metrics/metrics.py
@@ -2,6 +2,7 @@
import json
import logging
import os
+import warnings
from typing import Any, Callable
from aws_lambda_powertools.metrics.base import MetricManager
@@ -82,7 +83,12 @@ def clear_metrics(self):
self.metric_set.clear()
self.dimension_set.clear()
- def log_metrics(self, lambda_handler: Callable[[Any, Any], Any] = None, capture_cold_start_metric: bool = False):
+ def log_metrics(
+ self,
+ lambda_handler: Callable[[Any, Any], Any] = None,
+ capture_cold_start_metric: bool = False,
+ raise_on_empty_metrics: bool = False,
+ ):
"""Decorator to serialize and publish metrics at the end of a function execution.
Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler).
@@ -102,6 +108,10 @@ def handler(event, context)
----------
lambda_handler : Callable[[Any, Any], Any], optional
Lambda function handler, by default None
+ capture_cold_start_metric : bool, optional
+ Captures cold start metric, by default False
+ raise_on_empty_metrics : bool, optional
+ Raise exception if no metrics are emitted, by default False
Raises
------
@@ -113,7 +123,11 @@ def handler(event, context)
# Return a partial function with args filled
if lambda_handler is None:
logger.debug("Decorator called with parameters")
- return functools.partial(self.log_metrics, capture_cold_start_metric=capture_cold_start_metric)
+ return functools.partial(
+ self.log_metrics,
+ capture_cold_start_metric=capture_cold_start_metric,
+ raise_on_empty_metrics=raise_on_empty_metrics,
+ )
@functools.wraps(lambda_handler)
def decorate(event, context):
@@ -122,10 +136,13 @@ def decorate(event, context):
if capture_cold_start_metric:
self.__add_cold_start_metric(context=context)
finally:
- metrics = self.serialize_metric_set()
- self.clear_metrics()
- logger.debug("Publishing metrics", {"metrics": metrics})
- print(json.dumps(metrics))
+ if not raise_on_empty_metrics and not self.metric_set:
+ warnings.warn("No metrics to publish, skipping")
+ else:
+ metrics = self.serialize_metric_set()
+ self.clear_metrics()
+ logger.debug("Publishing metrics", {"metrics": metrics})
+ print(json.dumps(metrics))
return response
diff --git a/docs/content/core/metrics.mdx b/docs/content/core/metrics.mdx
index bd1de65f88c..23292295f68 100644
--- a/docs/content/core/metrics.mdx
+++ b/docs/content/core/metrics.mdx
@@ -103,15 +103,31 @@ def lambda_handler(evt, ctx):
...
```
-`log_metrics` decorator **validates**, **serializes**, and **flushes** all your metrics. During metrics validation, if any of the following criteria is met, `SchemaValidationError` exception will be raised:
+`log_metrics` decorator **validates**, **serializes**, and **flushes** all your metrics. During metrics validation, if no metrics are provided then a warning will be logged, but no exception will be raised.
-* At least of one Metric and Dimension
+If metrics are provided, and any of the following criteria are not met, `SchemaValidationError` exception will be raised:
+
+* Minimum of 1 dimension
* Maximum of 9 dimensions
* Namespace is set, and no more than one
* Metric units must be [supported by CloudWatch](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_MetricDatum.html)
+If you want to ensure that at least one metric is emitted, you can pass `raise_on_empty_metrics` to the **log_metrics** decorator:
+
+```python:title=lambda_handler.py
+from aws_lambda_powertools.metrics import Metrics
+
+@metrics.log_metrics(raise_on_empty_metrics=True) # highlight-line
+def lambda_handler(evt, ctx):
+ ...
+```
+
+
+ If you expect your function to execute without publishing metrics every time, you can suppress the warning with warnings.filterwarnings("ignore", "No metrics to publish*")
.
+
+
- When nesting multiple middlwares, you should use log_metrics
as your last decorator wrapping all subsequent ones.
+ When nesting multiple middlewares, you should use log_metrics
as your last decorator wrapping all subsequent ones.
```python:title=lambda_handler_nested_middlewares.py
@@ -133,6 +149,10 @@ def lambda_handler(evt, ctx):
If you prefer not to use `log_metrics` because you might want to encapsulate additional logic when doing so, you can manually flush and clear metrics as follows:
+
+ Metrics, dimensions and namespace validation still applies.
+
+
```python:title=manual_metric_serialization.py
import json
from aws_lambda_powertools.metrics import Metrics, MetricUnit
diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py
index 7f3a57c9538..c19ee2b5abd 100644
--- a/tests/functional/test_metrics.py
+++ b/tests/functional/test_metrics.py
@@ -1,4 +1,5 @@
import json
+import warnings
from collections import namedtuple
from typing import Any, Dict, List
@@ -351,13 +352,13 @@ def test_log_no_metrics_error_propagation(capsys, metric, dimension, namespace):
# GIVEN Metrics is initialized
my_metrics = Metrics()
- @my_metrics.log_metrics
+ @my_metrics.log_metrics(raise_on_empty_metrics=True)
def lambda_handler(evt, context):
- # WHEN log_metrics is used despite having no metrics
+ # WHEN log_metrics is used with raise_on_empty_metrics param and has no metrics
# and the function decorated also raised an exception
raise ValueError("Bubble up")
- # THEN we should first raise SchemaValidationError as the main exception
+ # THEN the raised exception should be
with pytest.raises(SchemaValidationError):
lambda_handler({}, {})
@@ -633,3 +634,19 @@ def lambda_handler(evt, context):
assert "ColdStart" not in output
assert "function_name" not in output
+
+
+def test_log_metrics_decorator_no_metrics(dimensions, namespace):
+ # GIVEN Metrics is initialized
+ my_metrics = Metrics(namespace=namespace["name"], service="test_service")
+
+ # WHEN using the log_metrics decorator and no metrics have been added
+ @my_metrics.log_metrics
+ def lambda_handler(evt, context):
+ pass
+
+ # THEN it should raise a warning instead of throwing an exception
+ with warnings.catch_warnings(record=True) as w:
+ lambda_handler({}, {})
+ assert len(w) == 1
+ assert str(w[-1].message) == "No metrics to publish, skipping"