Skip to content

feat(metrics): add Datadog observability provider #2906

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 44 commits into from
Aug 14, 2023
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
44 commits
Select commit Hold shift + click to select a range
c940966
add datadog provider
roger-zhangg Aug 1, 2023
8c21a9b
fix poetry lock
roger-zhangg Aug 1, 2023
98f2f05
merging from develop
leandrodamascena Aug 7, 2023
7602e5e
Refactoring Datadog provider with the new BaseProvider
leandrodamascena Aug 9, 2023
68ed6a3
Adding default tags method
leandrodamascena Aug 9, 2023
7790986
Cleaning tests + adding specific comments
leandrodamascena Aug 9, 2023
4acacb7
Fix small things + improving docstring
leandrodamascena Aug 9, 2023
f60a3ba
Fixing minor bugs
leandrodamascena Aug 9, 2023
ccfc641
Merge branch 'develop' into datadog_provider
leandrodamascena Aug 10, 2023
b681d93
Addressing feedback
leandrodamascena Aug 10, 2023
9785e12
rebasing from upstream
leandrodamascena Aug 10, 2023
64e0bbb
Documentation: adding creating metrics
leandrodamascena Aug 10, 2023
84f38d5
Documentation: adding examples
leandrodamascena Aug 10, 2023
ae95c25
Documentation: fixing highlights
leandrodamascena Aug 10, 2023
cf383ac
Documentation: fixing mypy problems 💀
leandrodamascena Aug 10, 2023
a1405af
merging from develop
leandrodamascena Aug 10, 2023
4850ce5
Merge branch 'develop' into datadog_provider
leandrodamascena Aug 11, 2023
15955e7
Addressing Heitor's feedback + improving DX
leandrodamascena Aug 11, 2023
a1c3754
Fix documentantion and add python3.11 as default runtime
leandrodamascena Aug 11, 2023
228001c
Fix documentantion
leandrodamascena Aug 11, 2023
34e9f08
Merge branch 'develop' into datadog_provider
leandrodamascena Aug 14, 2023
4bdf4ff
Moving internal functions to Provider class
leandrodamascena Aug 14, 2023
d8b84de
Adding more information about partners
leandrodamascena Aug 14, 2023
724a2e7
docs(config): collapse by default given nav size
heitorlessa Aug 14, 2023
5e4593c
docs: fix yaml frontmatter issue
heitorlessa Aug 14, 2023
5e95e56
docs: auto-include abbreviations
heitorlessa Aug 14, 2023
6e841c7
docs(nav): move datadog to its own nav
heitorlessa Aug 14, 2023
0467e50
docs(datadog): provide terminologies; feat cleanup
heitorlessa Aug 14, 2023
7c39ded
docs(metrics): correct typo in terminologies
heitorlessa Aug 14, 2023
ed7a567
shorten word
heitorlessa Aug 14, 2023
eb1ee5d
docs(datadog): shorten install
heitorlessa Aug 14, 2023
de11c5c
docs(datadog): simplify add_metrics
heitorlessa Aug 14, 2023
28673c5
docs(datadog): simplify tags, mention new warning
heitorlessa Aug 14, 2023
7888c38
docs(datadog): cleanup default tags
heitorlessa Aug 14, 2023
340b446
docs(datadog): simplify code snippet
heitorlessa Aug 14, 2023
9085e9e
docs(datadog): move forwarder to advanced; cleanup
heitorlessa Aug 14, 2023
f6f20e8
docs(datadog): cleanup flush
heitorlessa Aug 14, 2023
3ed85ee
docs(datadog): correct typo in cold start
heitorlessa Aug 14, 2023
3b0812f
docs: code annotation, move env vars
heitorlessa Aug 14, 2023
f33a719
docs: recommend using Secrets for API Key
heitorlessa Aug 14, 2023
d4c5f8f
Adding tags validation + tests
leandrodamascena Aug 14, 2023
6fd7945
Reverting changes
leandrodamascena Aug 14, 2023
a78b4fc
docs(metrics): add observability providers section
heitorlessa Aug 14, 2023
400486c
Addressing Heitor's feedback
leandrodamascena Aug 14, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions aws_lambda_powertools/metrics/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@ def lambda_handler():
service name to be used as metric dimension, by default "service_undefined"
namespace : str, optional
Namespace for metrics
provider: AmazonCloudWatchEMFProvider, optional
Pre-configured AmazonCloudWatchEMFProvider provider

Raises
------
Expand Down
9 changes: 7 additions & 2 deletions aws_lambda_powertools/metrics/provider/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,8 +179,13 @@ def handler(event, context):
e
Propagate error received
"""
extra_args = {}

default_dimensions = kwargs.get("default_dimensions")
if kwargs.get("default_dimensions"):
extra_args.update({"default_dimensions": kwargs.get("default_dimensions")})

if kwargs.get("default_tags"):
extra_args.update({"default_tags": kwargs.get("default_tags")})

# If handler is None we've been called with parameters
# Return a partial function with args filled
Expand All @@ -190,7 +195,7 @@ def handler(event, context):
self.log_metrics,
capture_cold_start_metric=capture_cold_start_metric,
raise_on_empty_metrics=raise_on_empty_metrics,
default_dimensions=default_dimensions,
**extra_args,
)

@functools.wraps(lambda_handler)
Expand Down
7 changes: 7 additions & 0 deletions aws_lambda_powertools/metrics/provider/datadog/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from aws_lambda_powertools.metrics.provider.datadog.datadog import DatadogProvider
from aws_lambda_powertools.metrics.provider.datadog.metrics import DatadogMetrics

__all__ = [
"DatadogMetrics",
"DatadogProvider",
]
291 changes: 291 additions & 0 deletions aws_lambda_powertools/metrics/provider/datadog/datadog.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,291 @@
from __future__ import annotations

import json
import logging
import numbers
import os
import time
import warnings
from typing import Any, Callable, Dict, List, Optional

from aws_lambda_powertools.metrics.exceptions import MetricValueError, SchemaValidationError
from aws_lambda_powertools.metrics.provider import BaseProvider
from aws_lambda_powertools.shared import constants
from aws_lambda_powertools.shared.functions import resolve_env_var_choice
from aws_lambda_powertools.utilities.typing import LambdaContext

logger = logging.getLogger(__name__)

# Check if using datadog layer
try:
from datadog_lambda.metric import lambda_metric # type: ignore
except ImportError: # pragma: no cover
lambda_metric = None # pragma: no cover

DEFAULT_NAMESPACE = "default"


class DatadogProvider(BaseProvider):
"""
DatadogProvider creates metrics asynchronously via Datadog extension or exporter.

**Use `aws_lambda_powertools.DatadogMetrics` to create and metrics to Datadog.**

Environment variables
---------------------
POWERTOOLS_METRICS_NAMESPACE : str
metric namespace to be set for all metrics

Raises
------
MetricValueError
When metric value isn't a number
SchemaValidationError
When metric object fails EMF schema validation
"""

def __init__(
self,
metric_set: List | None = None,
namespace: str | None = None,
flush_to_log: bool | None = None,
default_tags: List | None = None,
):
self.metric_set = metric_set if metric_set is not None else []
self.namespace = resolve_env_var_choice(choice=namespace, env=os.getenv(constants.METRICS_NAMESPACE_ENV))
if self.namespace is None:
self.namespace = DEFAULT_NAMESPACE
self.default_tags = default_tags or []
self.flush_to_log = resolve_env_var_choice(choice=flush_to_log, env=os.getenv(constants.DATADOG_FLUSH_TO_LOG))

# adding name,value,timestamp,tags
def add_metric(
self,
name: str,
value: float,
timestamp: int | None = None,
tags: List | None = None,
**kwargs: Any,
) -> None:
"""
The add_metrics function that will be used by metrics class.

Parameters
----------
name: str
Name/Key for the metrics
value: float
Value for the metrics
timestamp: int
Timestamp in int for the metrics, default = time.time()
tags: List[str]
In format like List["tag:value","tag2:value2"]
args: Any
extra args will be dropped for compatibility
kwargs: Any
extra kwargs will be converted into tags, e.g., add_metrics(sales=sam) -> tags=['sales:sam']

Examples
--------
>>> provider = DatadogProvider()
>>>
>>> provider.add_metric(
>>> name='coffee_house.order_value',
>>> value=12.45,
>>> tags=['product:latte', 'order:online'],
>>> sales='sam'
>>> )
"""
if not isinstance(value, numbers.Real):
raise MetricValueError(f"{value} is not a valid number")

if tags is None:
tags = []

if not timestamp:
timestamp = int(time.time())

for tag_key, tag_value in kwargs.items():
tags.append(f"{tag_key}:{tag_value}")

self.metric_set.append({"m": name, "v": value, "e": timestamp, "t": tags})

def serialize_metric_set(self, metrics: List | None = None) -> List:
"""Serializes metrics

Example
-------
**Serialize metrics into Datadog format**

metrics = DatadogMetric()
# ...add metrics, tags, namespace
ret = metrics.serialize_metric_set()

Returns
-------
List
Serialized metrics following Datadog specification

Raises
------
SchemaValidationError
Raised when serialization fail schema validation
"""

if metrics is None: # pragma: no cover
metrics = self.metric_set

if len(metrics) == 0:
raise SchemaValidationError("Must contain at least one metric.")

output_list: List = []

logger.debug({"details": "Serializing metrics", "metrics": metrics})

for single_metric in metrics:
if self.namespace != DEFAULT_NAMESPACE:
metric_name = f"{self.namespace}.{single_metric['m']}"
else:
metric_name = single_metric["m"]
output_list.append(
{
"m": metric_name,
"v": single_metric["v"],
"e": single_metric["e"],
"t": single_metric["t"] or list(self.default_tags),
},
)

return output_list

# flush serialized data to output
def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None:
"""Manually flushes the metrics. This is normally not necessary,
unless you're running on other runtimes besides Lambda, where the @log_metrics
decorator already handles things for you.

Parameters
----------
raise_on_empty_metrics : bool, optional
raise exception if no metrics are emitted, by default False
"""
if not raise_on_empty_metrics and len(self.metric_set) == 0:
warnings.warn(
"No application metrics to publish. The cold-start metric may be published if enabled. "
"If application metrics should never be empty, consider using 'raise_on_empty_metrics'",
stacklevel=2,
)

else:
metrics = self.serialize_metric_set()
# submit through datadog extension
if lambda_metric and not self.flush_to_log:
# use lambda_metric function from datadog package, submit metrics to datadog
for metric_item in metrics: # pragma: no cover
lambda_metric( # pragma: no cover
metric_name=metric_item["m"],
value=metric_item["v"],
timestamp=metric_item["e"],
tags=metric_item["t"],
)
else:
# dd module not found: flush to log, this format can be recognized via datadog log forwarder
# https://github.com/Datadog/datadog-lambda-python/blob/main/datadog_lambda/metric.py#L77
for metric_item in metrics:
print(json.dumps(metric_item, separators=(",", ":")))

self.clear_metrics()

def clear_metrics(self):
logger.debug("Clearing out existing metric set from memory")
self.metric_set.clear()

def add_cold_start_metric(self, context: LambdaContext) -> None:
"""Add cold start metric and function_name dimension

Parameters
----------
context : Any
Lambda context
"""
logger.debug("Adding cold start metric and function_name tagging")
self.add_metric(name="ColdStart", value=1, function_name=context.function_name)

def log_metrics(
self,
lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None,
capture_cold_start_metric: bool = False,
raise_on_empty_metrics: bool = False,
**kwargs,
):
"""Decorator to serialize and publish metrics at the end of a function execution.

Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler).

Example
-------
**Lambda function using tracer and metrics decorators**

from aws_lambda_powertools import DatadogMetrics, Tracer

metrics = DatadogMetrics(namespace="powertools")
tracer = Tracer(service="payment")

@tracer.capture_lambda_handler
@metrics.log_metrics
def handler(event, context):
...

Parameters
----------
lambda_handler : Callable[[Any, Any], Any], optional
lambda function handler, by default None
capture_cold_start_metric : bool, optional
captures cold start metric, by default False
raise_on_empty_metrics : bool, optional
raise exception if no metrics are emitted, by default False
**kwargs

Raises
------
e
Propagate error received
"""

default_tags = kwargs.get("default_tags")

if default_tags:
self.set_default_tags(**default_tags)

return super().log_metrics(
lambda_handler=lambda_handler,
capture_cold_start_metric=capture_cold_start_metric,
raise_on_empty_metrics=raise_on_empty_metrics,
**kwargs,
)

def set_default_tags(self, **kwargs) -> None:
"""Persist tags across Lambda invocations

Parameters
----------
tags : **kwargs
tags as key=value

Example
-------
**Sets some default dimensions that will always be present across metrics and invocations**

from aws_lambda_powertools import Metrics

metrics = Metrics(namespace="ServerlessAirline", service="payment")
metrics.set_default_tags(environment="demo", another="one")

@metrics.log_metrics()
def lambda_handler():
return True
"""
for tag_key, tag_value in kwargs.items():
tag = f"{tag_key}:{tag_value}"
if tag not in self.default_tags:
self.default_tags.append(tag)
Loading