From a62c5e58a8f4e7ed9795ba2c99f0384983c84906 Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 16:52:59 +0100 Subject: [PATCH 01/13] initial --- .riot/requirements/16cd2e7.txt | 32 ++ .riot/requirements/1d937f5.txt | 34 ++ ddtrace/_monkey.py | 1 + .../contrib/integration_registry/mappings.py | 1 + .../integration_registry/registry.yaml | 6 + .../contrib/internal/playwright/__init__.py | 62 +++ ddtrace/contrib/internal/playwright/patch.py | 237 +++++++++ ddtrace/internal/settings/_config.py | 1 + ddtrace/propagation/http.py | 300 ++++++++--- docker-compose.yml | 8 +- docker/Dockerfile | 27 + riotfile.py | 106 +++- tests/ci_visibility/suitespec.yml | 15 + tests/contrib/playwright/__init__.py | 1 + .../contrib/playwright/test_playwright_e2e.py | 125 +++++ .../playwright/test_playwright_integration.py | 473 ++++++++++++++++++ .../playwright/test_playwright_patch.py | 203 ++++++++ 17 files changed, 1539 insertions(+), 93 deletions(-) create mode 100644 .riot/requirements/16cd2e7.txt create mode 100644 .riot/requirements/1d937f5.txt create mode 100644 ddtrace/contrib/internal/playwright/__init__.py create mode 100644 ddtrace/contrib/internal/playwright/patch.py create mode 100644 tests/contrib/playwright/__init__.py create mode 100644 tests/contrib/playwright/test_playwright_e2e.py create mode 100644 tests/contrib/playwright/test_playwright_integration.py create mode 100644 tests/contrib/playwright/test_playwright_patch.py diff --git a/.riot/requirements/16cd2e7.txt b/.riot/requirements/16cd2e7.txt new file mode 100644 index 00000000000..20aa2c9ee4c --- /dev/null +++ b/.riot/requirements/16cd2e7.txt @@ -0,0 +1,32 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/16cd2e7.in +# +attrs==25.4.0 +certifi==2025.11.12 +charset-normalizer==3.4.4 +coverage[toml]==7.12.0 +greenlet==3.2.4 +hypothesis==6.45.0 +idna==3.11 +iniconfig==2.3.0 +mock==5.2.0 +opentracing==2.4.0 +packaging==25.0 +playwright==1.56.0 +pluggy==1.6.0 +pyee==13.0.0 +pygments==2.19.2 +pytest==9.0.1 +pytest-base-url==2.1.0 +pytest-cov==7.0.0 +pytest-mock==3.15.1 +pytest-playwright==0.7.2 +python-slugify==8.0.4 +requests==2.32.5 +sortedcontainers==2.4.0 +text-unidecode==1.3 +typing-extensions==4.15.0 +urllib3==2.5.0 diff --git a/.riot/requirements/1d937f5.txt b/.riot/requirements/1d937f5.txt new file mode 100644 index 00000000000..1aa9ee6be36 --- /dev/null +++ b/.riot/requirements/1d937f5.txt @@ -0,0 +1,34 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1d937f5.in +# +attrs==25.4.0 +certifi==2025.11.12 +charset-normalizer==3.4.4 +coverage[toml]==7.12.0 +exceptiongroup==1.3.1 +greenlet==3.2.4 +hypothesis==6.45.0 +idna==3.11 +iniconfig==2.3.0 +mock==5.2.0 +opentracing==2.4.0 +packaging==25.0 +playwright==1.56.0 +pluggy==1.6.0 +pyee==13.0.0 +pygments==2.19.2 +pytest==9.0.1 +pytest-base-url==2.1.0 +pytest-cov==7.0.0 +pytest-mock==3.15.1 +pytest-playwright==0.7.2 +python-slugify==8.0.4 +requests==2.32.5 +sortedcontainers==2.4.0 +text-unidecode==1.3 +tomli==2.3.0 +typing-extensions==4.15.0 +urllib3==2.5.0 diff --git a/ddtrace/_monkey.py b/ddtrace/_monkey.py index f437674fe34..f28886bd742 100644 --- a/ddtrace/_monkey.py +++ b/ddtrace/_monkey.py @@ -113,6 +113,7 @@ "unittest": True, "coverage": False, "selenium": True, + "playwright": True, "valkey": True, "openai_agents": True, "ray": False, diff --git a/ddtrace/contrib/integration_registry/mappings.py b/ddtrace/contrib/integration_registry/mappings.py index 922ed6c1b79..4d276ac3a7c 100644 --- a/ddtrace/contrib/integration_registry/mappings.py +++ b/ddtrace/contrib/integration_registry/mappings.py @@ -17,6 +17,7 @@ "dbapi", "dbapi_async", "selenium", + "playwright", } DEPENDENCY_TO_INTEGRATION_MAPPING_SPECIAL_CASES = { diff --git a/ddtrace/contrib/integration_registry/registry.yaml b/ddtrace/contrib/integration_registry/registry.yaml index fde6e910528..fbf43f84e2a 100644 --- a/ddtrace/contrib/integration_registry/registry.yaml +++ b/ddtrace/contrib/integration_registry/registry.yaml @@ -819,6 +819,12 @@ integrations: dependency_names: - selenium +- integration_name: playwright + is_external_package: true + is_tested: false + dependency_names: + - playwright + - integration_name: snowflake is_external_package: true is_tested: true diff --git a/ddtrace/contrib/internal/playwright/__init__.py b/ddtrace/contrib/internal/playwright/__init__.py new file mode 100644 index 00000000000..4229020c791 --- /dev/null +++ b/ddtrace/contrib/internal/playwright/__init__.py @@ -0,0 +1,62 @@ +""" +Trace the Playwright browser automation library to trace browser requests and enable distributed tracing. + +Enabling +~~~~~~~~ + +The Playwright integration is enabled by default in test contexts. Use +:func:`patch()` to enable the integration:: + + from ddtrace import patch + patch(playwright=True) + +When using pytest, the `--ddtrace-patch-all` flag is required in order for this integration to +be enabled. + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.playwright['distributed_tracing'] + + Include distributed tracing headers in browser requests sent from Playwright. + This option can also be set with the ``DD_PLAYWRIGHT_DISTRIBUTED_TRACING`` + environment variable. + + Default: ``True`` + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +The integration can be configured per instance:: + + from ddtrace import config + + # Disable distributed tracing globally. + config.playwright['distributed_tracing'] = False + +Headers tracing is supported for this integration. + +How It Works +~~~~~~~~~~~~ + +The Playwright integration automatically injects Datadog distributed tracing headers +into all browser requests made through Playwright. This enables end-to-end tracing +from your application through to browser-initiated backend requests. + +The integration uses a multi-layered approach to ensure headers are injected +regardless of how Playwright is used: + +1. **Context-level injection**: Headers are added to BrowserContext.extra_http_headers +2. **Route interception**: A catch-all route handler intercepts all requests and injects headers +3. **Request-level patching**: Individual request objects are patched as needed + +Headers injected include: +- ``x-datadog-trace-id``: The lower 64-bits of the 128-bit trace-id in decimal format +- ``x-datadog-parent-id``: The 64-bits span-id of the current span in decimal format +- ``x-datadog-sampling-priority``: Sampling decision (optional) +- ``x-datadog-origin``: Origin information (optional, not used for browser requests) +- ``x-datadog-tags``: Supplemental trace state information (optional) + +This integration is particularly useful for E2E testing scenarios where you want to +trace requests from browser automation through to your backend services. +""" diff --git a/ddtrace/contrib/internal/playwright/patch.py b/ddtrace/contrib/internal/playwright/patch.py new file mode 100644 index 00000000000..0822f242845 --- /dev/null +++ b/ddtrace/contrib/internal/playwright/patch.py @@ -0,0 +1,237 @@ +import os +from typing import Dict + +from ddtrace import config +from ddtrace.constants import SPAN_KIND +from ddtrace.ext import SpanTypes +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.formats import asbool +from ddtrace.propagation.http import HTTPPropagator +from ddtrace.trace import tracer + + +log = get_logger(__name__) + +# Configure the playwright integration +config._add( + "playwright", + { + "distributed_tracing": asbool(os.getenv("DD_PLAYWRIGHT_DISTRIBUTED_TRACING", default=True)), + }, +) + + +def get_version() -> str: + """Get the Playwright version.""" + try: + import playwright + + return getattr(playwright, "__version__", "") + except (ImportError, AttributeError): + return "" + + +def _supported_versions() -> Dict[str, str]: + return {"playwright": "*"} + + +def _inject_distributed_tracing_headers(headers: Dict[str, str], context=None, is_test_context=None) -> None: + """ + Inject Datadog distributed tracing headers into the provided headers dict. + + This uses the provided context (if any), otherwise falls back to the current active span context. + If no active span exists, creates a temporary span for header injection. + """ + if not config.playwright.get("distributed_tracing", True): + return + + try: + # Use provided context, or get the current active span + span_context = context + + # Use the explicitly provided is_test_context flag, or try to detect it + if is_test_context is None and span_context is not None: + # Check if the provided context is from a test span by walking up the span hierarchy + # This is needed because the context object itself doesn't store the test.type tag + try: + current_span = tracer.current_span() + while current_span: + if current_span.context == span_context and current_span.get_tag('test.type') == 'test': + is_test_context = True + break + current_span = current_span._parent + except Exception: + pass + elif span_context is None: + current_span = tracer.current_span() + if current_span: + span_context = current_span.context + + if span_context: + # Use the span context to inject headers, passing the test context flag + HTTPPropagator.inject(span_context, headers, is_test_context=is_test_context) + else: + # No active span, create a temporary span for header injection + with tracer.trace("playwright.browser.request", span_type=SpanTypes.HTTP) as span: + span._set_tag_str(SPAN_KIND, "client") + span._set_tag_str("component", config.playwright.integration_name) + HTTPPropagator.inject(span.context, headers, is_test_context=is_test_context) + + except Exception as e: + log.debug("Failed to inject distributed tracing headers: %s", e) + + +def _patch_browser_context_new_context(): + """Patch Browser.new_context to inject headers at the context level.""" + try: + from playwright.sync_api import Browser + except ImportError: + log.debug("Playwright not available for patching") + return + + original_new_context = Browser.new_context + + def _wrapped_new_context(*args, **kwargs): + # Capture the current span context at the time of context creation + # This ensures test context is preserved for async browser requests + current_span = tracer.current_span() + test_context = current_span.context if current_span else None + + # Inject headers into extra_http_headers + headers = kwargs.setdefault("extra_http_headers", {}) + is_test_context_flag = current_span and current_span.get_tag('test.type') == 'test' + _inject_distributed_tracing_headers(headers, test_context, is_test_context_flag) + + # Create the context + context = original_new_context(*args, **kwargs) + + # Store the test context and test flag on the context for use by route handlers + context._dd_test_context = test_context + context._dd_is_test_context = current_span and current_span.get_tag('test.type') == 'test' + + # Also install a route handler as a fallback + _install_route_handler(context) + + return context + + Browser.new_context = _wrapped_new_context + + +def _patch_api_request_new_context(): + """Patch playwright.request.new_context for API requests.""" + try: + import playwright + + if not hasattr(playwright, "request"): + return + + original_new_context = playwright.request.new_context + + def _wrapped_api_new_context(*args, **kwargs): + # Inject headers into extra_http_headers for API requests + headers = kwargs.setdefault("extra_http_headers", {}) + _inject_distributed_tracing_headers(headers) + + return original_new_context(*args, **kwargs) + + playwright.request.new_context = _wrapped_api_new_context + + except Exception as e: + log.debug("Failed to patch API request context: %s", e) + + +def _install_route_handler(context) -> None: + """ + Install a catch-all route handler on the context to inject headers into all requests. + + This ensures headers are injected even if the context was created without + extra_http_headers or if individual requests override headers. + """ + if not hasattr(context, "_dd_route_handler_installed"): + try: + + def _inject_headers(route, request): + """Route handler that injects distributed tracing headers into each request.""" + try: + # Get existing headers + headers = dict(getattr(request, "headers", {}) or {}) + + # Use the stored test context and flag from when the browser context was created + test_context = getattr(context, "_dd_test_context", None) + is_test_context = getattr(context, "_dd_is_test_context", None) + + # Inject our distributed tracing headers + _inject_distributed_tracing_headers(headers, test_context, is_test_context) + + # Continue the request with injected headers + route.continue_(headers=headers) + + except Exception as e: + # Fallback: continue without headers if injection fails + log.debug("Failed to inject headers in route handler: %s", e) + try: + route.continue_() + except Exception: + pass + + # Install catch-all route handler + context.route("**/*", _inject_headers) + context._dd_route_handler_installed = True + + except Exception as e: + log.debug("Failed to install route handler: %s", e) + + +def patch() -> None: + """Apply the Playwright integration patch.""" + try: + import playwright + except ImportError: + log.debug("Playwright not available, skipping patch") + return + + if getattr(playwright, "_datadog_patch", False): + return + + try: + # Patch Browser.new_context for browser contexts + _patch_browser_context_new_context() + + # Patch API request context + _patch_api_request_new_context() + + playwright._datadog_patch = True + log.debug("Playwright integration patched successfully") + + except Exception as e: + log.debug("Failed to patch Playwright: %s", e) + + +def unpatch() -> None: + """Remove the Playwright integration patch.""" + try: + import playwright + except ImportError: + return + + if not getattr(playwright, "_datadog_patch", False): + return + + try: + from playwright.sync_api import Browser + + # Restore original methods if they were patched + if hasattr(Browser, "_original_new_context"): + Browser.new_context = Browser._original_new_context + delattr(Browser, "_original_new_context") + + # Restore API request context + if hasattr(playwright, "request") and hasattr(playwright.request, "_original_new_context"): + playwright.request.new_context = playwright.request._original_new_context + delattr(playwright.request, "_original_new_context") + + playwright._datadog_patch = False + log.debug("Playwright integration unpatched successfully") + + except Exception as e: + log.debug("Failed to unpatch Playwright: %s", e) diff --git a/ddtrace/internal/settings/_config.py b/ddtrace/internal/settings/_config.py index c133e6a0eb8..b7e832f3004 100644 --- a/ddtrace/internal/settings/_config.py +++ b/ddtrace/internal/settings/_config.py @@ -183,6 +183,7 @@ "asyncpg", "django", "aiobotocore", + "playwright", "pytest_bdd", "starlette", "valkey", diff --git a/ddtrace/propagation/http.py b/ddtrace/propagation/http.py index 3f2fdd2b726..a6cc651b847 100644 --- a/ddtrace/propagation/http.py +++ b/ddtrace/propagation/http.py @@ -24,6 +24,7 @@ from ..constants import AUTO_KEEP from ..constants import AUTO_REJECT from ..constants import USER_KEEP +from ..ext.test import TYPE as TEST_TYPE from ..internal._tagset import TagsetDecodeError from ..internal._tagset import TagsetEncodeError from ..internal._tagset import TagsetMaxSizeDecodeError @@ -37,7 +38,9 @@ from ..internal.constants import BAGGAGE_TAG_PREFIX from ..internal.constants import DD_TRACE_BAGGAGE_MAX_BYTES from ..internal.constants import DD_TRACE_BAGGAGE_MAX_ITEMS -from ..internal.constants import HIGHER_ORDER_TRACE_ID_BITS as _HIGHER_ORDER_TRACE_ID_BITS +from ..internal.constants import ( + HIGHER_ORDER_TRACE_ID_BITS as _HIGHER_ORDER_TRACE_ID_BITS, +) from ..internal.constants import LAST_DD_PARENT_ID_KEY from ..internal.constants import MAX_UINT_64BITS as _MAX_UINT_64BITS from ..internal.constants import PROPAGATION_STYLE_B3_MULTI @@ -54,12 +57,36 @@ log = get_logger(__name__) +def _is_test_context() -> bool: + """ + Check if the current span is part of a test trace. + + This checks if any span in the current trace has the test.type tag set to "test", + indicating we're in a test execution context. + """ + try: + # Check the current span and walk up the trace to find any test span + span = core.tracer.current_span() + while span: + if span.get_tag(TEST_TYPE) == "test": + return True + span = span._parent + + except Exception: + # If we can't access the tracer or current span, assume not in test context + pass + + return False + + # HTTP headers one should set for distributed tracing. # These are cross-language (eg: Python, Go and other implementations should honor these) _HTTP_BAGGAGE_PREFIX: Literal["ot-baggage-"] = "ot-baggage-" HTTP_HEADER_TRACE_ID: Literal["x-datadog-trace-id"] = "x-datadog-trace-id" HTTP_HEADER_PARENT_ID: Literal["x-datadog-parent-id"] = "x-datadog-parent-id" -HTTP_HEADER_SAMPLING_PRIORITY: Literal["x-datadog-sampling-priority"] = "x-datadog-sampling-priority" +HTTP_HEADER_SAMPLING_PRIORITY: Literal["x-datadog-sampling-priority"] = ( + "x-datadog-sampling-priority" +) HTTP_HEADER_ORIGIN: Literal["x-datadog-origin"] = "x-datadog-origin" _HTTP_HEADER_B3_SINGLE: Literal["b3"] = "b3" _HTTP_HEADER_B3_TRACE_ID: Literal["x-b3-traceid"] = "x-b3-traceid" @@ -81,9 +108,13 @@ def _possible_header(header): # versions of these headers POSSIBLE_HTTP_HEADER_TRACE_IDS = _possible_header(HTTP_HEADER_TRACE_ID) POSSIBLE_HTTP_HEADER_PARENT_IDS = _possible_header(HTTP_HEADER_PARENT_ID) -POSSIBLE_HTTP_HEADER_SAMPLING_PRIORITIES = _possible_header(HTTP_HEADER_SAMPLING_PRIORITY) +POSSIBLE_HTTP_HEADER_SAMPLING_PRIORITIES = _possible_header( + HTTP_HEADER_SAMPLING_PRIORITY +) POSSIBLE_HTTP_HEADER_ORIGIN = _possible_header(HTTP_HEADER_ORIGIN) -_POSSIBLE_HTTP_HEADER_TAGS = frozenset([_HTTP_HEADER_TAGS, get_wsgi_header(_HTTP_HEADER_TAGS).lower()]) +_POSSIBLE_HTTP_HEADER_TAGS = frozenset( + [_HTTP_HEADER_TAGS, get_wsgi_header(_HTTP_HEADER_TAGS).lower()] +) _POSSIBLE_HTTP_HEADER_B3_SINGLE_HEADER = _possible_header(_HTTP_HEADER_B3_SINGLE) _POSSIBLE_HTTP_HEADER_B3_TRACE_IDS = _possible_header(_HTTP_HEADER_B3_TRACE_ID) _POSSIBLE_HTTP_HEADER_B3_SPAN_IDS = _possible_header(_HTTP_HEADER_B3_SPAN_ID) @@ -214,7 +245,9 @@ def _extract_meta(tags_value): meta = { "_dd.propagation_error": "extract_max_size", } - log.warning("failed to decode x-datadog-tags: %r", tags_value, exc_info=True) + log.warning( + "failed to decode x-datadog-tags: %r", tags_value, exc_info=True + ) except TagsetDecodeError: meta = { "_dd.propagation_error": "decoding_error", @@ -230,7 +263,9 @@ def _put_together_trace_id(trace_id_hob_hex: str, low_64_bits: int) -> int: @staticmethod def _higher_order_is_valid(upper_64_bits: str) -> bool: try: - if len(upper_64_bits) != 16 or not (int(upper_64_bits, 16) or (upper_64_bits.islower())): + if len(upper_64_bits) != 16 or not ( + int(upper_64_bits, 16) or (upper_64_bits.islower()) + ): raise ValueError except ValueError: return False @@ -238,32 +273,44 @@ def _higher_order_is_valid(upper_64_bits: str) -> bool: return True @staticmethod - def _inject(span_context, headers): - # type: (Context, Dict[str, str]) -> None + def _inject(span_context, headers, is_test_context=None): + # type: (Context, Dict[str, str], Optional[bool]) -> None if span_context.trace_id is None or span_context.span_id is None: log.debug("tried to inject invalid context %r", span_context) return # When apm tracing is not enabled, only distributed traces with the `_dd.p.ts` tag # are propagated. If the tag is not present, we should not propagate downstream. - if not asm_config._apm_tracing_enabled and (APPSEC.PROPAGATION_HEADER not in span_context._meta): + if not asm_config._apm_tracing_enabled and ( + APPSEC.PROPAGATION_HEADER not in span_context._meta + ): return if span_context.trace_id > _MAX_UINT_64BITS: # set lower order 64 bits in `x-datadog-trace-id` header. For backwards compatibility these # bits should be converted to a base 10 integer. - headers[HTTP_HEADER_TRACE_ID] = str(_get_64_lowest_order_bits_as_int(span_context.trace_id)) + headers[HTTP_HEADER_TRACE_ID] = str( + _get_64_lowest_order_bits_as_int(span_context.trace_id) + ) # set higher order 64 bits in `_dd.p.tid` to propagate the full 128 bit trace id. # Note - The higher order bits must be encoded in hex - span_context._meta[_HIGHER_ORDER_TRACE_ID_BITS] = _get_64_highest_order_bits_as_hex(span_context.trace_id) + span_context._meta[_HIGHER_ORDER_TRACE_ID_BITS] = ( + _get_64_highest_order_bits_as_hex(span_context.trace_id) + ) else: headers[HTTP_HEADER_TRACE_ID] = str(span_context.trace_id) headers[HTTP_HEADER_PARENT_ID] = str(span_context.span_id) sampling_priority = span_context.sampling_priority + + # Use special sampling priority 114 for test contexts + # Check if explicitly marked as test context, otherwise fall back to current span check + if is_test_context is True or (is_test_context is None and _is_test_context()): + sampling_priority = 114 + # Propagate priority only if defined if sampling_priority is not None: - headers[HTTP_HEADER_SAMPLING_PRIORITY] = str(span_context.sampling_priority) + headers[HTTP_HEADER_SAMPLING_PRIORITY] = str(sampling_priority) # Propagate origin only if defined if span_context.dd_origin is not None: headers[HTTP_HEADER_ORIGIN] = ensure_text(span_context.dd_origin) @@ -278,7 +325,9 @@ def _inject(span_context, headers): # Only propagate trace tags which means ignoring the _dd.origin tags_to_encode = { - k: v for k, v in span_context._meta.items() if _DatadogMultiHeader._is_valid_datadog_trace_tag_key(k) + k: v + for k, v in span_context._meta.items() + if _DatadogMultiHeader._is_valid_datadog_trace_tag_key(k) } if tags_to_encode: @@ -297,7 +346,9 @@ def _inject(span_context, headers): log.warning("failed to encode x-datadog-tags", exc_info=True) # Record telemetry for successful injection - _record_http_telemetry("context_header_style.injected", PROPAGATION_STYLE_DATADOG) + _record_http_telemetry( + "context_header_style.injected", PROPAGATION_STYLE_DATADOG + ) @staticmethod def _extract(headers): @@ -312,7 +363,8 @@ def _extract(headers): if trace_id <= 0 or trace_id > _MAX_UINT_64BITS: log.warning( - "Invalid trace id: %r. `x-datadog-trace-id` must be greater than zero and less than 2**64", trace_id_str + "Invalid trace id: %r. `x-datadog-trace-id` must be greater than zero and less than 2**64", + trace_id_str, ) return None @@ -343,11 +395,18 @@ def _extract(headers): trace_id_hob_hex = meta[_HIGHER_ORDER_TRACE_ID_BITS] if _DatadogMultiHeader._higher_order_is_valid(trace_id_hob_hex): if config._128_bit_trace_id_enabled: - trace_id = _DatadogMultiHeader._put_together_trace_id(trace_id_hob_hex, trace_id) + trace_id = _DatadogMultiHeader._put_together_trace_id( + trace_id_hob_hex, trace_id + ) else: - meta["_dd.propagation_error"] = "malformed_tid {}".format(trace_id_hob_hex) + meta["_dd.propagation_error"] = "malformed_tid {}".format( + trace_id_hob_hex + ) del meta[_HIGHER_ORDER_TRACE_ID_BITS] - log.warning("malformed_tid: %s. Failed to decode trace id from http headers", trace_id_hob_hex) + log.warning( + "malformed_tid: %s. Failed to decode trace id from http headers", + trace_id_hob_hex, + ) if not meta: meta = {} @@ -431,8 +490,8 @@ class _B3MultiHeader: """ @staticmethod - def _inject(span_context, headers): - # type: (Context, Dict[str, str]) -> None + def _inject(span_context, headers, is_test_context=None): + # type: (Context, Dict[str, str], Optional[bool]) -> None if span_context.trace_id is None or span_context.span_id is None: log.debug("tried to inject invalid context %r", span_context) return @@ -450,7 +509,9 @@ def _inject(span_context, headers): headers[_HTTP_HEADER_B3_FLAGS] = "1" # Record telemetry for successful injection - _record_http_telemetry("context_header_style.injected", PROPAGATION_STYLE_B3_MULTI) + _record_http_telemetry( + "context_header_style.injected", PROPAGATION_STYLE_B3_MULTI + ) @staticmethod def _extract(headers): @@ -551,13 +612,16 @@ class _B3SingleHeader: """ @staticmethod - def _inject(span_context, headers): - # type: (Context, Dict[str, str]) -> None + def _inject(span_context, headers, is_test_context=None): + # type: (Context, Dict[str, str], Optional[bool]) -> None if span_context.trace_id is None or span_context.span_id is None: log.debug("tried to inject invalid context %r", span_context) return - single_header = "{}-{}".format(_dd_id_to_b3_id(span_context.trace_id), _dd_id_to_b3_id(span_context.span_id)) + single_header = "{}-{}".format( + _dd_id_to_b3_id(span_context.trace_id), + _dd_id_to_b3_id(span_context.span_id), + ) sampling_priority = span_context.sampling_priority if sampling_priority is not None: if sampling_priority <= 0: @@ -569,12 +633,16 @@ def _inject(span_context, headers): headers[_HTTP_HEADER_B3_SINGLE] = single_header # Record telemetry for successful injection - _record_http_telemetry("context_header_style.injected", PROPAGATION_STYLE_B3_SINGLE) + _record_http_telemetry( + "context_header_style.injected", PROPAGATION_STYLE_B3_SINGLE + ) @staticmethod def _extract(headers): # type: (Dict[str, str]) -> Optional[Context] - single_header = _extract_header_value(_POSSIBLE_HTTP_HEADER_B3_SINGLE_HEADER, headers) + single_header = _extract_header_value( + _POSSIBLE_HTTP_HEADER_B3_SINGLE_HEADER, headers + ) if not single_header: return None @@ -706,9 +774,14 @@ def _get_traceparent_values(tp): raise ValueError("ff is an invalid traceparent version: %s" % tp) elif version != "00": # currently 00 is the only version format, but if future versions come up we may need to add changes - log.warning("unsupported traceparent version:%r, still attempting to parse", version) + log.warning( + "unsupported traceparent version:%r, still attempting to parse", version + ) elif version == "00" and future_vals is not None: - raise ValueError("Traceparents with the version `00` should contain 4 values delimited by a dash: %s" % tp) + raise ValueError( + "Traceparents with the version `00` should contain 4 values delimited by a dash: %s" + % tp + ) trace_id = _hex_id_to_dd_id(trace_id_hex) span_id = _hex_id_to_dd_id(span_id_hex) @@ -761,7 +834,9 @@ def _get_tracestate_values(ts_l): # need to convert from t. to _dd.p. other_propagated_tags = { - "_dd.p.%s" % k[2:]: _TraceContext.decode_tag_val(v) for (k, v) in dd.items() if k.startswith("t.") + "_dd.p.%s" % k[2:]: _TraceContext.decode_tag_val(v) + for (k, v) in dd.items() + if k.startswith("t.") } return sampling_priority_ts_int, other_propagated_tags, origin, lpid @@ -770,7 +845,9 @@ def _get_tracestate_values(ts_l): @staticmethod def _get_sampling_priority( - traceparent_sampled: int, tracestate_sampling_priority: Optional[int], origin: Optional[str] = None + traceparent_sampled: int, + tracestate_sampling_priority: Optional[int], + origin: Optional[str] = None, ): """ When the traceparent sampled flag is set, the Datadog sampling priority is either @@ -814,7 +891,11 @@ def _extract(headers): return None trace_id, span_id, trace_flag = _TraceContext._get_traceparent_values(tp) except (ValueError, AssertionError): - log.exception("received invalid w3c traceparent: %s ", tp, extra={"send_to_telemetry": False}) + log.exception( + "received invalid w3c traceparent: %s ", + tp, + extra={"send_to_telemetry": False}, + ) return None meta = {W3C_TRACEPARENT_KEY: tp} @@ -848,14 +929,20 @@ def _get_context(trace_id, span_id, trace_flag, ts, meta=None): tracestate_values = None if tracestate_values: - sampling_priority_ts, other_propagated_tags, origin, lpid = tracestate_values + sampling_priority_ts, other_propagated_tags, origin, lpid = ( + tracestate_values + ) meta.update(other_propagated_tags.items()) if lpid: meta[LAST_DD_PARENT_ID_KEY] = lpid - sampling_priority = _TraceContext._get_sampling_priority(trace_flag, sampling_priority_ts, origin) + sampling_priority = _TraceContext._get_sampling_priority( + trace_flag, sampling_priority_ts, origin + ) else: - log.debug("no dd list member in tracestate from incoming request: %r", ts) + log.debug( + "no dd list member in tracestate from incoming request: %r", ts + ) return Context( trace_id=trace_id, @@ -866,8 +953,8 @@ def _get_context(trace_id, span_id, trace_flag, ts, meta=None): ) @staticmethod - def _inject(span_context, headers): - # type: (Context, Dict[str, str]) -> None + def _inject(span_context, headers, is_test_context=None): + # type: (Context, Dict[str, str], Optional[bool]) -> None tp = span_context._traceparent if tp: headers[_HTTP_HEADER_TRACEPARENT] = tp @@ -879,27 +966,37 @@ def _inject(span_context, headers): elif LAST_DD_PARENT_ID_KEY in span_context._meta: # Datadog Span is not active, propagate the last datadog span_id span_id = int(span_context._meta[LAST_DD_PARENT_ID_KEY], 16) - headers[_HTTP_HEADER_TRACESTATE] = w3c_tracestate_add_p(span_context._tracestate, span_id) + headers[_HTTP_HEADER_TRACESTATE] = w3c_tracestate_add_p( + span_context._tracestate, span_id + ) else: headers[_HTTP_HEADER_TRACESTATE] = span_context._tracestate # Record telemetry for successful injection - _record_http_telemetry("context_header_style.injected", _PROPAGATION_STYLE_W3C_TRACECONTEXT) + _record_http_telemetry( + "context_header_style.injected", _PROPAGATION_STYLE_W3C_TRACECONTEXT + ) class _BaggageHeader: """Helper class to inject/extract Baggage Headers""" - SAFE_CHARACTERS_KEY = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!#$%&'*+-.^_`|~" + SAFE_CHARACTERS_KEY = ( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!#$%&'*+-.^_`|~" + ) SAFE_CHARACTERS_VALUE = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!#$%&'()*+-./:<>?@[]^_`{|}~" @staticmethod def _encode_key(key: str) -> str: - return urllib.parse.quote(str(key).strip(), safe=_BaggageHeader.SAFE_CHARACTERS_KEY) + return urllib.parse.quote( + str(key).strip(), safe=_BaggageHeader.SAFE_CHARACTERS_KEY + ) @staticmethod def _encode_value(value: str) -> str: - return urllib.parse.quote(str(value).strip(), safe=_BaggageHeader.SAFE_CHARACTERS_VALUE) + return urllib.parse.quote( + str(value).strip(), safe=_BaggageHeader.SAFE_CHARACTERS_VALUE + ) @staticmethod def _inject(span_context: Context, headers: Dict[str, str]) -> None: @@ -923,7 +1020,9 @@ def _inject(span_context: Context, headers: Dict[str, str]) -> None: total_size = 0 for key, value in baggage_items: item = f"{_BaggageHeader._encode_key(key)}={_BaggageHeader._encode_value(value)}" - item_size = len(item.encode("utf-8")) + (1 if encoded_items else 0) # +1 for comma if not first item + item_size = len(item.encode("utf-8")) + ( + 1 if encoded_items else 0 + ) # +1 for comma if not first item if total_size + item_size > DD_TRACE_BAGGAGE_MAX_BYTES: log.warning("Baggage header size exceeded, dropping excess items") # Record telemetry for baggage header size exceeding limit @@ -941,7 +1040,9 @@ def _inject(span_context: Context, headers: Dict[str, str]) -> None: headers[_HTTP_HEADER_BAGGAGE] = header_value # Record telemetry for successful baggage injection - _record_http_telemetry("context_header_style.injected", _PROPAGATION_STYLE_BAGGAGE) + _record_http_telemetry( + "context_header_style.injected", _PROPAGATION_STYLE_BAGGAGE + ) except Exception: log.warning("Failed to encode and inject baggage header") @@ -1003,7 +1104,9 @@ def _get_sampled_injection_context( appropriate span and triggers sampling before returning the injection context. """ # Extract context for header injection (non_active_span takes precedence) - injection_context = trace_info.context if isinstance(trace_info, Span) else trace_info + injection_context = ( + trace_info.context if isinstance(trace_info, Span) else trace_info + ) # Find root span for sampling decisions if injection_context.sampling_priority is not None: @@ -1023,19 +1126,27 @@ def _get_sampled_injection_context( elif isinstance(trace_info, Span): # Use span's root for sampling sampling_span = trace_info._local_root - elif (current_root := core.tracer.current_root_span()) and current_root.trace_id == trace_info.trace_id: + elif ( + current_root := core.tracer.current_root_span() + ) and current_root.trace_id == trace_info.trace_id: # Get the local root span for the current trace (if it is active, otherwise we can't sample) sampling_span = current_root # Sample the local root span before injecting headers. if sampling_span: core.tracer.sample(sampling_span) - log.debug("%s sampled before propagating trace: span_context=%s", sampling_span, injection_context) + log.debug( + "%s sampled before propagating trace: span_context=%s", + sampling_span, + injection_context, + ) return injection_context @staticmethod - def _extract_configured_contexts_avail(normalized_headers: Dict[str, str]) -> Tuple[List[Context], List[str]]: + def _extract_configured_contexts_avail( + normalized_headers: Dict[str, str], + ) -> Tuple[List[Context], List[str]]: contexts = [] styles_w_ctx = [] if config._propagation_style_extract is not None: @@ -1052,15 +1163,23 @@ def _extract_configured_contexts_avail(normalized_headers: Dict[str, str]) -> Tu return contexts, styles_w_ctx @staticmethod - def _context_to_span_link(context: Context, style: str, reason: str) -> Optional[SpanLink]: + def _context_to_span_link( + context: Context, style: str, reason: str + ) -> Optional[SpanLink]: # encoding expects at least trace_id and span_id if context.span_id and context.trace_id: return SpanLink( context.trace_id, context.span_id, - flags=1 if context.sampling_priority and context.sampling_priority > 0 else 0, + flags=( + 1 + if context.sampling_priority and context.sampling_priority > 0 + else 0 + ), tracestate=( - context._meta.get(W3C_TRACESTATE_KEY, "") if style == _PROPAGATION_STYLE_W3C_TRACECONTEXT else None + context._meta.get(W3C_TRACESTATE_KEY, "") + if style == _PROPAGATION_STYLE_W3C_TRACECONTEXT + else None ), attributes={ "reason": reason, @@ -1089,19 +1208,30 @@ def _resolve_contexts(contexts, styles_w_ctx, normalized_headers): # add the tracestate to the primary context elif style_w_ctx == _PROPAGATION_STYLE_W3C_TRACECONTEXT: # extract and add the raw ts value to the primary_context - ts = _extract_header_value(_POSSIBLE_HTTP_HEADER_TRACESTATE, normalized_headers) + ts = _extract_header_value( + _POSSIBLE_HTTP_HEADER_TRACESTATE, normalized_headers + ) if ts: primary_context._meta[W3C_TRACESTATE_KEY] = ts - if primary_context.trace_id == context.trace_id and primary_context.span_id != context.span_id: + if ( + primary_context.trace_id == context.trace_id + and primary_context.span_id != context.span_id + ): dd_context = None if PROPAGATION_STYLE_DATADOG in styles_w_ctx: - dd_context = contexts[styles_w_ctx.index(PROPAGATION_STYLE_DATADOG)] + dd_context = contexts[ + styles_w_ctx.index(PROPAGATION_STYLE_DATADOG) + ] if LAST_DD_PARENT_ID_KEY in context._meta: # tracecontext headers contain a p value, ensure this value is sent to backend - primary_context._meta[LAST_DD_PARENT_ID_KEY] = context._meta[LAST_DD_PARENT_ID_KEY] + primary_context._meta[LAST_DD_PARENT_ID_KEY] = context._meta[ + LAST_DD_PARENT_ID_KEY + ] elif dd_context: # if p value is not present in tracestate, use the parent id from the datadog headers - primary_context._meta[LAST_DD_PARENT_ID_KEY] = "{:016x}".format(dd_context.span_id) + primary_context._meta[LAST_DD_PARENT_ID_KEY] = "{:016x}".format( + dd_context.span_id + ) # the span_id in tracecontext takes precedence over the first extracted propagation style primary_context.span_id = context.span_id @@ -1109,7 +1239,7 @@ def _resolve_contexts(contexts, styles_w_ctx, normalized_headers): return primary_context @staticmethod - def inject(context: Union[Context, Span], headers: Dict[str, str]) -> None: + def inject(context: Union[Context, Span], headers: Dict[str, str], is_test_context=None) -> None: """Inject Context attributes that have to be propagated as HTTP headers. Here is an example using `requests`:: @@ -1143,7 +1273,11 @@ def parent_call(): # Handle sampling and get context for header injection span_context = HTTPPropagator._get_sampled_injection_context(context, None) # Log a warning if we cannot determine a sampling decision before injecting headers. - if span_context.span_id and span_context.trace_id and span_context.sampling_priority is None: + if ( + span_context.span_id + and span_context.trace_id + and span_context.sampling_priority is None + ): log.debug( "Sampling decision not available. Downstream spans will not inherit a sampling priority: " "args=(context=%s, ...) detected span context=%s", @@ -1164,18 +1298,21 @@ def parent_call(): log.debug("tried to inject invalid context %r", span_context) return - if config._propagation_http_baggage_enabled is True and span_context._baggage is not None: + if ( + config._propagation_http_baggage_enabled is True + and span_context._baggage is not None + ): for key in span_context._baggage: headers[_HTTP_BAGGAGE_PREFIX + key] = span_context._baggage[key] if PROPAGATION_STYLE_DATADOG in config._propagation_style_inject: - _DatadogMultiHeader._inject(span_context, headers) + _DatadogMultiHeader._inject(span_context, headers, is_test_context=is_test_context) if PROPAGATION_STYLE_B3_MULTI in config._propagation_style_inject: - _B3MultiHeader._inject(span_context, headers) + _B3MultiHeader._inject(span_context, headers, is_test_context) if PROPAGATION_STYLE_B3_SINGLE in config._propagation_style_inject: - _B3SingleHeader._inject(span_context, headers) + _B3SingleHeader._inject(span_context, headers, is_test_context) if _PROPAGATION_STYLE_W3C_TRACECONTEXT in config._propagation_style_inject: - _TraceContext._inject(span_context, headers) + _TraceContext._inject(span_context, headers, is_test_context) @staticmethod def extract(headers): @@ -1214,20 +1351,28 @@ def my_controller(url, headers): context = propagator._extract(normalized_headers) style = prop_style if context: - _record_http_telemetry("context_header_style.extracted", prop_style) + _record_http_telemetry( + "context_header_style.extracted", prop_style + ) if config._propagation_http_baggage_enabled is True: _attach_baggage_to_context(normalized_headers, context) break # loop through all extract propagation styles else: - contexts, styles_w_ctx = HTTPPropagator._extract_configured_contexts_avail(normalized_headers) + contexts, styles_w_ctx = ( + HTTPPropagator._extract_configured_contexts_avail( + normalized_headers + ) + ) # check that styles_w_ctx is not empty if styles_w_ctx: style = styles_w_ctx[0] if contexts: - context = HTTPPropagator._resolve_contexts(contexts, styles_w_ctx, normalized_headers) + context = HTTPPropagator._resolve_contexts( + contexts, styles_w_ctx, normalized_headers + ) if config._propagation_http_baggage_enabled is True: _attach_baggage_to_context(normalized_headers, context) @@ -1236,14 +1381,18 @@ def my_controller(url, headers): baggage_context = _BaggageHeader._extract(normalized_headers) if baggage_context._baggage != {}: # Record telemetry for successful baggage extraction - _record_http_telemetry("context_header_style.extracted", _PROPAGATION_STYLE_BAGGAGE) + _record_http_telemetry( + "context_header_style.extracted", _PROPAGATION_STYLE_BAGGAGE + ) if context: context._baggage = baggage_context.get_all_baggage_items() else: context = baggage_context if config._baggage_tag_keys: - raw_keys = [k.strip() for k in config._baggage_tag_keys if k.strip()] + raw_keys = [ + k.strip() for k in config._baggage_tag_keys if k.strip() + ] # wildcard: tag all baggage keys if "*" in raw_keys: tag_keys = baggage_context.get_all_baggage_items().keys() @@ -1251,17 +1400,26 @@ def my_controller(url, headers): tag_keys = raw_keys for stripped_key in tag_keys: - if (value := baggage_context.get_baggage_item(stripped_key)) is not None: + if ( + value := baggage_context.get_baggage_item(stripped_key) + ) is not None: prefixed_key = BAGGAGE_TAG_PREFIX + stripped_key if prefixed_key not in context._meta: context._meta[prefixed_key] = value if config._propagation_behavior_extract == _PROPAGATION_BEHAVIOR_RESTART: - link = HTTPPropagator._context_to_span_link(context, style, "propagation_behavior_extract") - context = Context(baggage=context.get_all_baggage_items(), span_links=[link] if link else []) + link = HTTPPropagator._context_to_span_link( + context, style, "propagation_behavior_extract" + ) + context = Context( + baggage=context.get_all_baggage_items(), + span_links=[link] if link else [], + ) return context except Exception: - log.debug("error while extracting context propagation headers", exc_info=True) + log.debug( + "error while extracting context propagation headers", exc_info=True + ) return Context() diff --git a/docker-compose.yml b/docker-compose.yml index 0eccebb8040..7d3c79dfdf6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -182,10 +182,10 @@ services: testrunner: # DEV uncomment to test local changes to the Dockerfile - # build: - # context: ./docker - # dockerfile: Dockerfile - image: ghcr.io/datadog/dd-trace-py/testrunner:1717b70091ae646ec3ee29a5cb77158a82209aa3@sha256:aac6cacc68517874da310d5ea78a967f34af8388af5c6621aba77a73c4757657 + build: + context: ./docker + dockerfile: Dockerfile + # image: ghcr.io/datadog/dd-trace-py/testrunner:1717b70091ae646ec3ee29a5cb77158a82209aa3@sha256:aac6cacc68517874da310d5ea78a967f34af8388af5c6621aba77a73c4757657 command: bash environment: DD_FAST_BUILD: "1" diff --git a/docker/Dockerfile b/docker/Dockerfile index eb22648b122..d191ae6bbd8 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -74,6 +74,33 @@ RUN apt-get update \ && apt-get update \ && apt-get install -y --no-install-recommends google-chrome-stable ; \ fi \ + # Playwright system dependencies (available for both amd64 and arm64) + && apt-get install -y --no-install-recommends \ + libnspr4 \ + libnss3 \ + libdbus-1-3 \ + libatk1.0-0 \ + libatk-bridge2.0-0 \ + libcups2 \ + libxcb1 \ + libxkbcommon0 \ + libatspi2.0-0 \ + libx11-6 \ + libxcomposite1 \ + libxdamage1 \ + libxext6 \ + libxfixes3 \ + libxrandr2 \ + libgbm1 \ + libcairo2 \ + libcairo-gobject2 \ + libpango-1.0-0 \ + libpangocairo-1.0-0 \ + libasound2 \ + libxcursor1 \ + libgtk-3-0 \ + libgdk-pixbuf2.0-0 \ + libx11-xcb1 \ # Cleaning up apt cache space && rm -rf /var/lib/apt/lists/* diff --git a/riotfile.py b/riotfile.py index eecb5c8537b..f18887e21d4 100644 --- a/riotfile.py +++ b/riotfile.py @@ -65,7 +65,9 @@ def str_to_version(version: str) -> Tuple[int, int]: MAX_PYTHON_VERSION = version_to_str(max(SUPPORTED_PYTHON_VERSIONS)) -def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYTHON_VERSION) -> List[str]: +def select_pys( + min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYTHON_VERSION +) -> List[str]: """Helper to select python versions from the list of versions we support >>> select_pys() @@ -80,7 +82,11 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT min_version = str_to_version(min_version) max_version = str_to_version(max_version) - return [version_to_str(version) for version in SUPPORTED_PYTHON_VERSIONS if min_version <= version <= max_version] + return [ + version_to_str(version) + for version in SUPPORTED_PYTHON_VERSIONS + if min_version <= version <= max_version + ] # Common venv configurations for appsec threats testing @@ -913,7 +919,9 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "daphne": [latest], "requests": [latest], "redis": ">=2.10,<2.11", - "psycopg2-binary": [">=2.8.6"], # We need <2.9.0 for Python 2.7, and >2.9.0 for 3.9+ + "psycopg2-binary": [ + ">=2.8.6" + ], # We need <2.9.0 for Python 2.7, and >2.9.0 for 3.9+ "pytest-django[testing]": "==3.10.0", "pylibmc": latest, "python-memcached": latest, @@ -1050,7 +1058,12 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT venvs=[ Venv( pys=["3.9"], - pkgs={"dramatiq": "~=1.10.0", "pytest": latest, "redis": latest, "pika": latest}, + pkgs={ + "dramatiq": "~=1.10.0", + "pytest": latest, + "redis": latest, + "pika": latest, + }, ), Venv( pys=select_pys(max_version="3.13"), @@ -1101,7 +1114,9 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="elasticsearch:async", command="pytest {cmdargs} tests/contrib/elasticsearch/test_async.py", - env={"AIOHTTP_NO_EXTENSIONS": "1"}, # needed until aiohttp is updated to support python 3.12 + env={ + "AIOHTTP_NO_EXTENSIONS": "1" + }, # needed until aiohttp is updated to support python 3.12 venvs=[ Venv( pys=select_pys(), @@ -1265,7 +1280,10 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( pys=select_pys(min_version="3.9", max_version="3.11"), ), - Venv(pys=select_pys(min_version="3.12", max_version="3.13"), pkgs={"redis": latest}), + Venv( + pys=select_pys(min_version="3.12", max_version="3.13"), + pkgs={"redis": latest}, + ), ], ), ], @@ -1391,8 +1409,12 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ], }, venvs=[ - Venv(command="pytest {cmdargs} --ignore=tests/contrib/pymemcache/autopatch tests/contrib/pymemcache"), - Venv(command="python tests/ddtrace_run.py pytest {cmdargs} tests/contrib/pymemcache/autopatch/"), + Venv( + command="pytest {cmdargs} --ignore=tests/contrib/pymemcache/autopatch tests/contrib/pymemcache" + ), + Venv( + command="python tests/ddtrace_run.py pytest {cmdargs} tests/contrib/pymemcache/autopatch/" + ), ], ), Venv( @@ -1455,12 +1477,18 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( # starlette added support for Python 3.9 in 0.14 pys="3.9", - pkgs={"starlette": ["~=0.14.0", "~=0.20.0", "~=0.33.0"], "httpx": "~=0.22.0"}, + pkgs={ + "starlette": ["~=0.14.0", "~=0.20.0", "~=0.33.0"], + "httpx": "~=0.22.0", + }, ), Venv( # starlette added support for Python 3.10 in 0.15 pys="3.10", - pkgs={"starlette": ["~=0.15.0", "~=0.20.0", "~=0.33.0", latest], "httpx": "~=0.27.0"}, + pkgs={ + "starlette": ["~=0.15.0", "~=0.20.0", "~=0.33.0", latest], + "httpx": "~=0.27.0", + }, ), Venv( # starlette added support for Python 3.11 in 0.21 @@ -1605,7 +1633,11 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ], ), Venv( - pkgs={"vcrpy": "==7.0.0", "botocore": "==1.38.26", "boto3": "==1.38.26"}, + pkgs={ + "vcrpy": "==7.0.0", + "botocore": "==1.38.26", + "boto3": "==1.38.26", + }, venvs=[ Venv( pys=select_pys(), @@ -1642,7 +1674,10 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ], }, ), - Venv(pys=select_pys(min_version="3.11"), pkgs={"mariadb": ["~=1.1.2", latest]}), + Venv( + pys=select_pys(min_version="3.11"), + pkgs={"mariadb": ["~=1.1.2", latest]}, + ), ], ), Venv( @@ -2265,7 +2300,10 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT command="pytest {cmdargs} tests/contrib/rediscluster", pkgs={"pytest-randomly": latest}, venvs=[ - Venv(pys=select_pys(max_version="3.11"), pkgs={"redis-py-cluster": [">=2.0,<2.1", latest]}), + Venv( + pys=select_pys(max_version="3.11"), + pkgs={"redis-py-cluster": [">=2.0,<2.1", latest]}, + ), ], ), Venv( @@ -2508,7 +2546,10 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT # sqlite3 is tied to the Python version and is not installable via pip # To test a range of versions without updating Python, we use Linux only pysqlite3-binary package # Remove pysqlite3-binary on Python 3.9+ locally on non-linux machines - Venv(pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={"pysqlite3-binary": [latest]}), + Venv( + pys=select_pys(min_version="3.9", max_version="3.12"), + pkgs={"pysqlite3-binary": [latest]}, + ), ], ), Venv( @@ -2604,7 +2645,9 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT # opentelemetry-api doesn't yet work with Python 3.14 pys=select_pys(min_version="3.9", max_version="3.13"), # Ensure we test against versions of opentelemetry-api that broke compatibility with ddtrace - pkgs={"opentelemetry-api": ["~=1.0.0", "~=1.15.0", "~=1.26.0", latest]}, + pkgs={ + "opentelemetry-api": ["~=1.0.0", "~=1.15.0", "~=1.26.0", latest] + }, ), Venv( # opentelemetry-exporter-otlp doesn't yet work with Python 3.14 @@ -2897,7 +2940,9 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( pys=select_pys(min_version="3.9", max_version="3.13"), ), - Venv(pys=select_pys(min_version="3.14"), pkgs={"ormsgpack": ">=1.11.0"}), + Venv( + pys=select_pys(min_version="3.14"), pkgs={"ormsgpack": ">=1.11.0"} + ), ], ), Venv( @@ -3073,7 +3118,10 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"confluent-kafka": ["~=1.9.2", latest]}, ), # confluent-kafka added support for Python 3.11 in 2.0.2 - Venv(pys=select_pys(min_version="3.11", max_version="3.13"), pkgs={"confluent-kafka": latest}), + Venv( + pys=select_pys(min_version="3.11", max_version="3.13"), + pkgs={"confluent-kafka": latest}, + ), ], ), ], @@ -3097,7 +3145,11 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, command="pytest {cmdargs} tests/contrib/aiokafka/", pys=select_pys(), - pkgs={"pytest-asyncio": [latest], "pytest-randomly": latest, "aiokafka": ["~=0.9.0", latest]}, + pkgs={ + "pytest-asyncio": [latest], + "pytest-randomly": latest, + "aiokafka": ["~=0.9.0", latest], + }, ), Venv( name="azure_eventhubs", @@ -3413,6 +3465,24 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), ], ), + Venv( + name="playwright", + pys=["3.10", "3.12"], + pkgs={ + "playwright": latest, + "pytest-playwright": latest, + }, + command="playwright install && pytest --no-cov {cmdargs} -c /dev/null --no-ddtrace tests/contrib/playwright", + env={ + "DD_AGENT_TRACER_URL": "9126", + }, + venvs=[ + Venv( + name="playwright-pytest", + # command="playwright install && pytest --no-cov {cmdargs} -c /dev/null --no-ddtrace tests/contrib/playwright", + ), + ], + ), Venv( name="appsec_integrations_flask", command="pytest -vvv {cmdargs} tests/appsec/integrations/flask_tests/", diff --git a/tests/ci_visibility/suitespec.yml b/tests/ci_visibility/suitespec.yml index b630d6742a0..c5e7d34b544 100644 --- a/tests/ci_visibility/suitespec.yml +++ b/tests/ci_visibility/suitespec.yml @@ -12,6 +12,8 @@ components: - ddtrace/contrib/internal/pytest_benchmark/* selenium: - ddtrace/contrib/internal/selenium/* + playwright: + - ddtrace/contrib/internal/playwright/* unittest: - ddtrace/contrib/internal/unittest/* suites: @@ -73,6 +75,19 @@ suites: - tests/snapshots/test_selenium* runner: riot snapshot: true + playwright: + parallelism: 2 + paths: + - '@bootstrap' + - '@core' + - '@contrib' + - '@tracing' + - '@ci_visibility' + - '@pytest' + - '@playwright' + - tests/contrib/playwright/* + runner: riot + snapshot: false unittest: parallelism: 2 paths: diff --git a/tests/contrib/playwright/__init__.py b/tests/contrib/playwright/__init__.py new file mode 100644 index 00000000000..a9eb08a02c0 --- /dev/null +++ b/tests/contrib/playwright/__init__.py @@ -0,0 +1 @@ +# Playwright integration tests diff --git a/tests/contrib/playwright/test_playwright_e2e.py b/tests/contrib/playwright/test_playwright_e2e.py new file mode 100644 index 00000000000..2f22191033a --- /dev/null +++ b/tests/contrib/playwright/test_playwright_e2e.py @@ -0,0 +1,125 @@ +""" +End-to-end tests for Playwright distributed tracing in pytest context. + +These tests simulate real pytest usage scenarios. +""" +import pytest + +from ddtrace import config +from ddtrace.contrib.internal.playwright import patch +from ddtrace.ext.test import TYPE as TEST_TYPE + + +playwright = pytest.importorskip("playwright") + + +@pytest.mark.skipif(not hasattr(pytest, "mark"), reason="pytest.mark not available") +class TestPlaywrightE2E: + """E2E tests that simulate real pytest + Playwright usage.""" + + def test_pytest_context_with_playwright_tracing(self): + """Test that simulates running Playwright in a pytest test context.""" + from ddtrace.propagation.http import HTTPPropagator + + tracer = pytest.importorskip("ddtrace").tracer + + # Simulate pytest test context + with tracer.trace("pytest_test_function") as test_span: + test_span.set_tag(TEST_TYPE, "test") + test_span.set_tag("test.name", "test_example") + test_span.set_tag("test.framework", "pytest") + + # Enable Playwright distributed tracing + config.playwright["distributed_tracing"] = True + + # Patch Playwright (as would happen in conftest.py or setup) + patch.patch() + + try: + # Simulate a browser request that would inject headers + headers = {} + + # This simulates what happens when Playwright makes requests + HTTPPropagator.inject(test_span.context, headers) + + # Verify test context behavior + assert headers.get("x-datadog-sampling-priority") == "114" + assert "x-datadog-trace-id" in headers + assert "x-datadog-parent-id" in headers + + # Verify the test span information is propagated + trace_id = headers.get("x-datadog-trace-id") + parent_id = headers.get("x-datadog-parent-id") + + assert trace_id is not None + assert parent_id is not None + + finally: + patch.unpatch() + + def test_sampling_priority_inheritance_in_test_context(self): + """Test that child spans in test context inherit the sampling priority override.""" + from ddtrace.propagation.http import HTTPPropagator + + tracer = pytest.importorskip("ddtrace").tracer + + config.playwright["distributed_tracing"] = True + + with tracer.trace("pytest_session") as session_span: + session_span.set_tag(TEST_TYPE, "test") + session_span.set_tag("test.framework", "pytest") + + # Child span (like a test function) + with tracer.trace("pytest_test") as test_span: + test_span.set_tag(TEST_TYPE, "test") + + # Grandchild span (like a Playwright browser operation) + with tracer.trace("browser_request") as browser_span: + headers = {} + HTTPPropagator.inject(browser_span.context, headers) + + # Even though browser_span doesn't have test.type directly, + # the parent spans do, so it should still get priority 114 + # (This depends on how we implement the context detection) + + # For now, let's verify basic header injection + assert "x-datadog-trace-id" in headers + assert "x-datadog-parent-id" in headers + + # The sampling priority behavior depends on our implementation + # If we check current_span(), it should be 114 + # If we check the injected context, it might vary + priority = headers.get("x-datadog-sampling-priority") + assert priority is not None, "Sampling priority should be set" + + def test_mixed_contexts_in_test_execution(self): + """Test behavior when mixing test and non-test operations in the same execution.""" + from ddtrace.propagation.http import HTTPPropagator + + tracer = pytest.importorskip("ddtrace").tracer + + config.playwright["distributed_tracing"] = True + + headers_test = {} + headers_regular = {} + + # Test operation in test context + with tracer.trace("test_operation") as test_span: + test_span.set_tag(TEST_TYPE, "test") + HTTPPropagator.inject(test_span.context, headers_test) + + # Regular operation outside test context + with tracer.trace("regular_operation") as regular_span: + regular_span.context.sampling_priority = 2 + HTTPPropagator.inject(regular_span.context, headers_regular) + + # Test context should get priority 114 + assert headers_test.get("x-datadog-sampling-priority") == "114" + + # Regular context should keep its original priority + assert headers_regular.get("x-datadog-sampling-priority") == "2" + + # Both should have trace headers + for headers in [headers_test, headers_regular]: + assert "x-datadog-trace-id" in headers + assert "x-datadog-parent-id" in headers diff --git a/tests/contrib/playwright/test_playwright_integration.py b/tests/contrib/playwright/test_playwright_integration.py new file mode 100644 index 00000000000..e3a062b59b1 --- /dev/null +++ b/tests/contrib/playwright/test_playwright_integration.py @@ -0,0 +1,473 @@ +""" +Integration tests for Playwright distributed tracing. + +These tests verify the actual Playwright integration works correctly. +""" +import threading +import time +from http.server import BaseHTTPRequestHandler, HTTPServer +import pytest + +from ddtrace import config +from ddtrace.contrib.internal.playwright import patch +from ddtrace.ext.test import TYPE as TEST_TYPE + + +playwright = pytest.importorskip("playwright") + + +class TestPlaywrightIntegration: + """Integration tests that verify actual Playwright functionality.""" + + @pytest.fixture + def playwright_instance(self): + """Set up a Playwright instance for testing.""" + from playwright.sync_api import sync_playwright + + with sync_playwright() as p: + yield p + + def test_playwright_patch_integration(self): + """Test that Playwright patching integrates properly with HTTP propagation.""" + from ddtrace.propagation.http import HTTPPropagator + + tracer = pytest.importorskip("ddtrace").tracer + config.playwright["distributed_tracing"] = True + + # Patch Playwright + patch.patch() + + try: + # Test that patching doesn't break and integrates with HTTP propagation + with tracer.trace("playwright_operation") as span: + span.set_tag(TEST_TYPE, "test") + + headers = {} + HTTPPropagator.inject(span.context, headers) + + # Verify test context behavior + assert headers.get("x-datadog-sampling-priority") == "114" + assert "x-datadog-trace-id" in headers + assert "x-datadog-parent-id" in headers + + finally: + patch.unpatch() + + def test_playwright_config_integration(self): + """Test that playwright config integrates with the patching system.""" + # Test config access + assert hasattr(config, "playwright") + + # Test config modification + original_value = config.playwright.get("distributed_tracing", True) + config.playwright["distributed_tracing"] = False + assert config.playwright["distributed_tracing"] is False + + # Test patching with disabled tracing + config.playwright["distributed_tracing"] = False + patch.patch() + try: + # Should not raise exceptions + assert True + finally: + patch.unpatch() + + # Restore original value + config.playwright["distributed_tracing"] = original_value + + def test_playwright_actual_browser_context_creation(self, playwright_instance): + """Test that Playwright context creation works with patching.""" + p = playwright_instance + config.playwright["distributed_tracing"] = True + + # Patch Playwright + patch.patch() + + try: + # Try to launch browser - skip if browsers not available + try: + browser = p.chromium.launch(headless=True) + except Exception as e: + error_msg = str(e) + if "Executable doesn't exist" in error_msg or "Host system is missing dependencies" in error_msg: + pytest.skip("Playwright browsers not available - skipping browser test") + raise + + try: + # Create context - this should trigger our patched code + context = browser.new_context() + + # Verify context was created successfully + assert context is not None + + # Create a page to ensure context works + page = context.new_page() + assert page is not None + + # Close resources + page.close() + context.close() + + finally: + browser.close() + + finally: + patch.unpatch() + + def test_playwright_header_injection_with_actual_browser(self, playwright_instance): + """Test that headers are actually injected when making real browser requests.""" + from ddtrace.propagation.http import HTTPPropagator + + p = playwright_instance + tracer = pytest.importorskip("ddtrace").tracer + config.playwright["distributed_tracing"] = True + + # Patch Playwright + patch.patch() + + try: + # Try to launch browser - skip if browsers not available + try: + browser = p.chromium.launch(headless=True) + except Exception as e: + error_msg = str(e) + if "Executable doesn't exist" in error_msg or "Host system is missing dependencies" in error_msg: + pytest.skip("Playwright browsers not available - skipping browser test") + raise + + try: + context = browser.new_context() + try: + page = context.new_page() + try: + # Navigate to a simple page that should trigger header injection + page.goto("data:text/html,

Test

") + + # Verify the page loaded + assert page.locator("h1").text_content() == "Test" + + # Test that our header injection works in test context + with tracer.trace("test_browser_request") as span: + span.set_tag(TEST_TYPE, "test") + + headers = {} + HTTPPropagator.inject(span.context, headers) + + # Verify test context headers are injected + assert headers.get("x-datadog-sampling-priority") == "114" + assert "x-datadog-trace-id" in headers + assert "x-datadog-parent-id" in headers + + finally: + page.close() + finally: + context.close() + finally: + browser.close() + + finally: + patch.unpatch() + + def test_playwright_context_inheritance(self): + """Test that context inheritance works properly in Playwright scenarios.""" + from ddtrace.propagation.http import HTTPPropagator + + tracer = pytest.importorskip("ddtrace").tracer + config.playwright["distributed_tracing"] = True + + patch.patch() + + try: + headers = {} + + # Test context inheritance: parent span has test tag + with tracer.trace("test_session") as session_span: + session_span.set_tag(TEST_TYPE, "test") + + with tracer.trace("browser_context") as context_span: + HTTPPropagator.inject(context_span.context, headers) + + # Should inherit test context and use priority 114 + assert headers.get("x-datadog-sampling-priority") == "114" + + finally: + patch.unpatch() + + +class HeaderCaptureHandler(BaseHTTPRequestHandler): + """HTTP handler that captures request headers.""" + + captured_headers = [] + + def do_GET(self): + """Handle GET requests and capture headers.""" + # Capture all headers + headers = {} + for header_name, header_value in self.headers.items(): + headers[header_name.lower()] = header_value + + # Store the captured headers + HeaderCaptureHandler.captured_headers.append(headers) + + # Send a simple response + self.send_response(200) + self.send_header('Content-type', 'text/html') + self.end_headers() + self.wfile.write(b'

Test Page

') + + @classmethod + def clear_captured_headers(cls): + """Clear captured headers for a new test.""" + cls.captured_headers = [] + + +class TestPlaywrightHeaderInjectionE2E: + """End-to-end tests that verify actual HTTP header injection in browser requests.""" + + def test_playwright_injects_headers_in_browser_requests(self, playwright): + """Test that Playwright actually injects Datadog headers into browser HTTP requests.""" + import socket + from ddtrace.ext.test import TYPE as TEST_TYPE + + p = playwright + tracer = pytest.importorskip("ddtrace").tracer + config.playwright["distributed_tracing"] = True + + # Clear any previous captured headers + HeaderCaptureHandler.clear_captured_headers() + + # Find an available port + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(('', 0)) + port = s.getsockname()[1] + + # Start a test HTTP server + server = HTTPServer(('localhost', port), HeaderCaptureHandler) + server_thread = threading.Thread(target=server.serve_forever, daemon=True) + server_thread.start() + + # Give server time to start + time.sleep(0.1) + + try: + # Patch Playwright + patch.patch() + + # Create a test span with test.type tag + with tracer.trace("test_browser_header_injection") as test_span: + test_span.set_tag(TEST_TYPE, "test") + + try: + # Try to launch browser - skip if browsers not available + try: + browser = p.chromium.launch(headless=True) + except Exception as e: + error_msg = str(e) + if "Executable doesn't exist" in error_msg or "Host system is missing dependencies" in error_msg: + pytest.skip("Playwright browsers not available - skipping browser test") + raise + + try: + # Create browser context (this should inject headers at context level) + context = browser.new_context() + try: + page = context.new_page() + try: + # Navigate to our test server - this should trigger header injection + page.goto(f"http://localhost:{port}/") + + # Wait a bit for the request to complete + time.sleep(0.2) + + # Verify that headers were captured + assert len(HeaderCaptureHandler.captured_headers) > 0, "No HTTP requests were captured" + + # Check the headers from the request + request_headers = HeaderCaptureHandler.captured_headers[0] + + # Verify Datadog headers are present + assert "x-datadog-trace-id" in request_headers, "Missing x-datadog-trace-id header" + assert "x-datadog-parent-id" in request_headers, "Missing x-datadog-parent-id header" + assert "x-datadog-sampling-priority" in request_headers, "Missing x-datadog-sampling-priority header" + + # Verify the sampling priority is 114 (special test context value) + sampling_priority = request_headers["x-datadog-sampling-priority"] + assert sampling_priority == "114", f"Expected sampling priority 114, got {sampling_priority}" + + # Verify trace and parent IDs are valid + trace_id = request_headers["x-datadog-trace-id"] + parent_id = request_headers["x-datadog-parent-id"] + assert trace_id != "0", "Trace ID should not be zero" + assert parent_id != "0", "Parent ID should not be zero" + + finally: + page.close() + finally: + context.close() + finally: + browser.close() + + finally: + patch.unpatch() + + finally: + # Clean up server + server.shutdown() + server.server_close() + + def test_playwright_headers_not_injected_when_disabled(self, playwright): + """Test that headers are not injected when distributed_tracing is disabled.""" + import socket + + p = playwright + config.playwright["distributed_tracing"] = False # Disable tracing + + # Clear any previous captured headers + HeaderCaptureHandler.clear_captured_headers() + + # Find an available port + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(('', 0)) + port = s.getsockname()[1] + + # Start a test HTTP server + server = HTTPServer(('localhost', port), HeaderCaptureHandler) + server_thread = threading.Thread(target=server.serve_forever, daemon=True) + server_thread.start() + + # Give server time to start + time.sleep(0.1) + + try: + # Patch Playwright + patch.patch() + + try: + # Try to launch browser - skip if browsers not available + try: + browser = p.chromium.launch(headless=True) + except Exception as e: + error_msg = str(e) + if "Executable doesn't exist" in error_msg or "Host system is missing dependencies" in error_msg: + pytest.skip("Playwright browsers not available - skipping browser test") + raise + + try: + context = browser.new_context() + try: + page = context.new_page() + try: + # Navigate to our test server + page.goto(f"http://localhost:{port}/") + + # Wait a bit for the request to complete + time.sleep(0.2) + + # Verify that headers were captured + assert len(HeaderCaptureHandler.captured_headers) > 0, "No HTTP requests were captured" + + # Check the headers from the request + request_headers = HeaderCaptureHandler.captured_headers[0] + + # Verify Datadog headers are NOT present when tracing is disabled + assert "x-datadog-trace-id" not in request_headers, "x-datadog-trace-id should not be present when tracing disabled" + assert "x-datadog-parent-id" not in request_headers, "x-datadog-parent-id should not be present when tracing disabled" + assert "x-datadog-sampling-priority" not in request_headers, "x-datadog-sampling-priority should not be present when tracing disabled" + + finally: + page.close() + finally: + context.close() + finally: + browser.close() + + finally: + patch.unpatch() + + finally: + # Clean up server + server.shutdown() + server.server_close() + + def test_playwright_headers_injected_outside_test_context(self, playwright): + """Test that headers are injected with normal priority outside test context.""" + import socket + + p = playwright + tracer = pytest.importorskip("ddtrace").tracer + config.playwright["distributed_tracing"] = True + + # Clear any previous captured headers + HeaderCaptureHandler.clear_captured_headers() + + # Find an available port + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(('', 0)) + port = s.getsockname()[1] + + # Start a test HTTP server + server = HTTPServer(('localhost', port), HeaderCaptureHandler) + server_thread = threading.Thread(target=server.serve_forever, daemon=True) + server_thread.start() + + # Give server time to start + time.sleep(0.1) + + try: + # Patch Playwright + patch.patch() + + # Create a regular span (not test context) + with tracer.trace("regular_browser_operation") as regular_span: + regular_span.context.sampling_priority = 1 # Set normal priority + + try: + # Try to launch browser - skip if browsers not available + try: + browser = p.chromium.launch(headless=True) + except Exception as e: + error_msg = str(e) + if "Executable doesn't exist" in error_msg or "Host system is missing dependencies" in error_msg: + pytest.skip("Playwright browsers not available - skipping browser test") + raise + + try: + context = browser.new_context() + try: + page = context.new_page() + try: + # Navigate to our test server + page.goto(f"http://localhost:{port}/") + + # Wait a bit for the request to complete + time.sleep(0.2) + + # Verify that headers were captured + assert len(HeaderCaptureHandler.captured_headers) > 0, "No HTTP requests were captured" + + # Check the headers from the request + request_headers = HeaderCaptureHandler.captured_headers[0] + + # Verify Datadog headers are present + assert "x-datadog-trace-id" in request_headers, "Missing x-datadog-trace-id header" + assert "x-datadog-parent-id" in request_headers, "Missing x-datadog-parent-id header" + assert "x-datadog-sampling-priority" in request_headers, "Missing x-datadog-sampling-priority header" + + # Verify the sampling priority is 1 (normal priority, not 114) + sampling_priority = request_headers["x-datadog-sampling-priority"] + assert sampling_priority == "1", f"Expected sampling priority 1, got {sampling_priority}" + + finally: + page.close() + finally: + context.close() + finally: + browser.close() + + finally: + patch.unpatch() + + finally: + # Clean up server + server.shutdown() + server.server_close() diff --git a/tests/contrib/playwright/test_playwright_patch.py b/tests/contrib/playwright/test_playwright_patch.py new file mode 100644 index 00000000000..b4ddfcb34d9 --- /dev/null +++ b/tests/contrib/playwright/test_playwright_patch.py @@ -0,0 +1,203 @@ +import pytest + +from ddtrace import config +from ddtrace.contrib.internal.playwright import patch + + +class TestPlaywrightPatch: + def test_patch_unpatch(self): + """Test that patch and unpatch work without errors.""" + # Test patching + patch.patch() + + # Verify patch flag is set + try: + import playwright + + assert getattr(playwright, "_datadog_patch", False) + except ImportError: + pytest.skip("Playwright not available") + + # Test unpatching + patch.unpatch() + + # Verify patch flag is cleared + try: + import playwright + + assert not getattr(playwright, "_datadog_patch", False) + except ImportError: + pass + + def test_get_version(self): + """Test version detection.""" + version = patch.get_version() + assert isinstance(version, str) + + def test_supported_versions(self): + """Test supported versions.""" + versions = patch._supported_versions() + assert isinstance(versions, dict) + assert "playwright" in versions + + def test_config_initialization(self): + """Test that playwright config is properly initialized.""" + # Config should be accessible + assert hasattr(config, "playwright") + assert isinstance(config.playwright, dict) + + # Should have distributed_tracing setting + assert "distributed_tracing" in config.playwright + + @pytest.mark.skipif( + not hasattr(patch, "_inject_distributed_tracing_headers"), + reason="Integration may not be fully loaded", + ) + def test_inject_distributed_tracing_headers(self): + """Test that distributed tracing headers are injected correctly.""" + from ddtrace.contrib.internal.playwright.patch import _inject_distributed_tracing_headers + + # Test with distributed tracing enabled + config.playwright["distributed_tracing"] = True + headers = {} + _inject_distributed_tracing_headers(headers) + + # Should have injected Datadog headers when span is active + # Note: headers may not be present if no active span + assert isinstance(headers, dict) + + def test_inject_distributed_tracing_headers_disabled(self): + """Test that headers are not injected when distributed tracing is disabled.""" + from ddtrace.contrib.internal.playwright.patch import _inject_distributed_tracing_headers + + # Disable distributed tracing + config.playwright["distributed_tracing"] = False + headers = {} + original_count = len(headers) + + _inject_distributed_tracing_headers(headers) + + # No headers should be added + assert len(headers) == original_count + + def test_sampling_priority_override_in_test_context(self): + """Test that sampling priority 114 is used in test contexts.""" + from ddtrace.propagation.http import HTTPPropagator + from ddtrace.ext.test import TYPE as TEST_TYPE + + # Import tracer + tracer = pytest.importorskip("ddtrace").tracer + + # Test outside of test context - should use normal sampling priority + headers_normal = {} + with tracer.trace("normal_span") as span: + span.context.sampling_priority = 1 # Set a normal priority + HTTPPropagator.inject(span.context, headers_normal) + + normal_priority = headers_normal.get("x-datadog-sampling-priority") + + # Test in test context - should use priority 114 + headers_test = {} + with tracer.trace("test_span") as span: + span.set_tag(TEST_TYPE, "test") + span.context.sampling_priority = 1 # This should be overridden + HTTPPropagator.inject(span.context, headers_test) + + test_priority = headers_test.get("x-datadog-sampling-priority") + + # Test context should override to priority 114 + assert test_priority == "114", f"Expected priority 114 in test context, got {test_priority}" + + # Normal context should not be 114 (could be 1 or None) + assert normal_priority != "114", f"Normal context should not have priority 114, got {normal_priority}" + + def test_playwright_header_injection_with_test_context(self): + """Test that Playwright header injection works with test context priority override.""" + from ddtrace.contrib.internal.playwright.patch import _inject_distributed_tracing_headers + from ddtrace.ext.test import TYPE as TEST_TYPE + + tracer = pytest.importorskip("ddtrace").tracer + + # Enable distributed tracing + config.playwright["distributed_tracing"] = True + + # Test in regular context + headers_regular = {} + with tracer.trace("regular_operation") as span: + span.context.sampling_priority = 2 + _inject_distributed_tracing_headers(headers_regular) + + # Test in test context + headers_test = {} + with tracer.trace("test_operation") as span: + span.set_tag(TEST_TYPE, "test") + span.context.sampling_priority = 2 # Should be overridden + _inject_distributed_tracing_headers(headers_test) + + # Both should have trace headers + assert "x-datadog-trace-id" in headers_regular + assert "x-datadog-parent-id" in headers_regular + assert "x-datadog-trace-id" in headers_test + assert "x-datadog-parent-id" in headers_test + + # Test context should have priority 114 + assert headers_test.get("x-datadog-sampling-priority") == "114" + + # Regular context should have original priority (2) + assert headers_regular.get("x-datadog-sampling-priority") == "2" + + def test_end_to_end_playwright_with_test_context(self): + """End-to-end test of Playwright integration with test context sampling priority.""" + from ddtrace.contrib.internal.playwright.patch import _inject_distributed_tracing_headers + from ddtrace.propagation.http import HTTPPropagator + from ddtrace.ext.test import TYPE as TEST_TYPE + + tracer = pytest.importorskip("ddtrace").tracer + config.playwright["distributed_tracing"] = True + + # Test 1: Playwright headers in regular context + headers_regular = {} + with tracer.trace("regular_browser_op") as span: + span.context.sampling_priority = 3 + _inject_distributed_tracing_headers(headers_regular) + + # Test 2: Playwright headers in test context + headers_test = {} + with tracer.trace("test_browser_op") as span: + span.set_tag(TEST_TYPE, "test") + span.context.sampling_priority = 3 # Should be overridden + _inject_distributed_tracing_headers(headers_test) + + # Test 3: Direct HTTPPropagator.inject in test context + headers_direct = {} + with tracer.trace("direct_test_op") as span: + span.set_tag(TEST_TYPE, "test") + span.context.sampling_priority = 7 # Should be overridden + HTTPPropagator.inject(span.context, headers_direct) + + # Verify all contexts have basic trace headers + for name, headers in [("regular", headers_regular), ("test", headers_test), ("direct", headers_direct)]: + assert "x-datadog-trace-id" in headers, f"{name} context missing trace-id" + assert "x-datadog-parent-id" in headers, f"{name} context missing parent-id" + + # Verify sampling priorities + assert headers_regular.get("x-datadog-sampling-priority") == "3", "Regular context should keep original priority" + assert headers_test.get("x-datadog-sampling-priority") == "114", "Test context should override to 114" + assert headers_direct.get("x-datadog-sampling-priority") == "114", "Direct injection in test context should override to 114" + + def test_playwright_config_isolation(self): + """Test that playwright config changes don't affect global state.""" + original_value = config.playwright.get("distributed_tracing", True) + + try: + # Change config + config.playwright["distributed_tracing"] = False + assert config.playwright["distributed_tracing"] is False + + # Change back + config.playwright["distributed_tracing"] = True + assert config.playwright["distributed_tracing"] is True + + finally: + # Restore original value + config.playwright["distributed_tracing"] = original_value From 3a451b3f1d4b67fd05575ce0936e212e5f9287fd Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 17:43:27 +0100 Subject: [PATCH 02/13] simplify --- .../contrib/internal/playwright/__init__.py | 9 +- ddtrace/contrib/internal/playwright/patch.py | 161 +++++------ ddtrace/propagation/http.py | 252 ++++++------------ .../playwright/test_playwright_integration.py | 62 +++-- .../playwright/test_playwright_js_requests.py | 158 +++++++++++ .../playwright/test_playwright_patch.py | 55 ++-- 6 files changed, 376 insertions(+), 321 deletions(-) create mode 100644 tests/contrib/playwright/test_playwright_js_requests.py diff --git a/ddtrace/contrib/internal/playwright/__init__.py b/ddtrace/contrib/internal/playwright/__init__.py index 4229020c791..669e4448057 100644 --- a/ddtrace/contrib/internal/playwright/__init__.py +++ b/ddtrace/contrib/internal/playwright/__init__.py @@ -43,12 +43,9 @@ into all browser requests made through Playwright. This enables end-to-end tracing from your application through to browser-initiated backend requests. -The integration uses a multi-layered approach to ensure headers are injected -regardless of how Playwright is used: - -1. **Context-level injection**: Headers are added to BrowserContext.extra_http_headers -2. **Route interception**: A catch-all route handler intercepts all requests and injects headers -3. **Request-level patching**: Individual request objects are patched as needed +The integration uses a dual approach to ensure headers are injected: +1. **Context-level injection**: Headers added to BrowserContext.extra_http_headers (navigation) +2. **Route interception**: A catch-all route handler for JavaScript-initiated requests (fetch, XHR) Headers injected include: - ``x-datadog-trace-id``: The lower 64-bits of the 128-bit trace-id in decimal format diff --git a/ddtrace/contrib/internal/playwright/patch.py b/ddtrace/contrib/internal/playwright/patch.py index 0822f242845..1aaffc8361e 100644 --- a/ddtrace/contrib/internal/playwright/patch.py +++ b/ddtrace/contrib/internal/playwright/patch.py @@ -35,54 +35,35 @@ def _supported_versions() -> Dict[str, str]: return {"playwright": "*"} -def _inject_distributed_tracing_headers(headers: Dict[str, str], context=None, is_test_context=None) -> None: +def _get_tracing_headers() -> Dict[str, str]: """ - Inject Datadog distributed tracing headers into the provided headers dict. + Get distributed tracing headers for the current span context. - This uses the provided context (if any), otherwise falls back to the current active span context. - If no active span exists, creates a temporary span for header injection. + Returns a dictionary of headers to inject into HTTP requests. + If no span is active, creates a temporary span. """ if not config.playwright.get("distributed_tracing", True): - return + return {} + headers = {} try: - # Use provided context, or get the current active span - span_context = context - - # Use the explicitly provided is_test_context flag, or try to detect it - if is_test_context is None and span_context is not None: - # Check if the provided context is from a test span by walking up the span hierarchy - # This is needed because the context object itself doesn't store the test.type tag - try: - current_span = tracer.current_span() - while current_span: - if current_span.context == span_context and current_span.get_tag('test.type') == 'test': - is_test_context = True - break - current_span = current_span._parent - except Exception: - pass - elif span_context is None: - current_span = tracer.current_span() - if current_span: - span_context = current_span.context - - if span_context: - # Use the span context to inject headers, passing the test context flag - HTTPPropagator.inject(span_context, headers, is_test_context=is_test_context) + current_span = tracer.current_span() + if current_span: + HTTPPropagator.inject(current_span.context, headers) else: # No active span, create a temporary span for header injection with tracer.trace("playwright.browser.request", span_type=SpanTypes.HTTP) as span: span._set_tag_str(SPAN_KIND, "client") span._set_tag_str("component", config.playwright.integration_name) - HTTPPropagator.inject(span.context, headers, is_test_context=is_test_context) - + HTTPPropagator.inject(span.context, headers) except Exception as e: - log.debug("Failed to inject distributed tracing headers: %s", e) + log.debug("Failed to get distributed tracing headers: %s", e) + + return headers def _patch_browser_context_new_context(): - """Patch Browser.new_context to inject headers at the context level.""" + """Patch Browser.new_context to inject distributed tracing headers.""" try: from playwright.sync_api import Browser except ImportError: @@ -92,31 +73,65 @@ def _patch_browser_context_new_context(): original_new_context = Browser.new_context def _wrapped_new_context(*args, **kwargs): - # Capture the current span context at the time of context creation - # This ensures test context is preserved for async browser requests - current_span = tracer.current_span() - test_context = current_span.context if current_span else None + # Get distributed tracing headers for current context + dd_headers = _get_tracing_headers() - # Inject headers into extra_http_headers - headers = kwargs.setdefault("extra_http_headers", {}) - is_test_context_flag = current_span and current_span.get_tag('test.type') == 'test' - _inject_distributed_tracing_headers(headers, test_context, is_test_context_flag) + # Add headers to extra_http_headers (for navigation requests) + if dd_headers: + extra_headers = kwargs.setdefault("extra_http_headers", {}) + extra_headers.update(dd_headers) - # Create the context + # Create the browser context context = original_new_context(*args, **kwargs) - # Store the test context and test flag on the context for use by route handlers - context._dd_test_context = test_context - context._dd_is_test_context = current_span and current_span.get_tag('test.type') == 'test' - - # Also install a route handler as a fallback - _install_route_handler(context) + # Store headers on context for route handler to reuse + if dd_headers: + context._dd_tracing_headers = dd_headers + _install_route_handler(context) return context Browser.new_context = _wrapped_new_context +def _install_route_handler(context) -> None: + """ + Install route handler to inject headers into JavaScript-initiated requests. + + JavaScript fetch() and XHR requests don't inherit extra_http_headers, + so we intercept them via route handler and inject headers manually. + """ + if hasattr(context, "_dd_route_handler_installed"): + return + + try: + + def _inject_headers_handler(route, request): + """Inject distributed tracing headers into the request.""" + try: + # Get request headers and merge in our tracing headers + headers = dict(getattr(request, "headers", {}) or {}) + headers.update(getattr(context, "_dd_tracing_headers", {})) + + # Continue request with merged headers + route.continue_(headers=headers) + + except Exception as e: + # Fallback: continue without modification if injection fails + log.debug("Failed to inject headers in route handler: %s", e) + try: + route.continue_() + except Exception: + pass + + # Install catch-all route handler + context.route("**/*", _inject_headers_handler) + context._dd_route_handler_installed = True + + except Exception as e: + log.debug("Failed to install route handler: %s", e) + + def _patch_api_request_new_context(): """Patch playwright.request.new_context for API requests.""" try: @@ -128,9 +143,11 @@ def _patch_api_request_new_context(): original_new_context = playwright.request.new_context def _wrapped_api_new_context(*args, **kwargs): - # Inject headers into extra_http_headers for API requests - headers = kwargs.setdefault("extra_http_headers", {}) - _inject_distributed_tracing_headers(headers) + # Get and inject distributed tracing headers + dd_headers = _get_tracing_headers() + if dd_headers: + extra_headers = kwargs.setdefault("extra_http_headers", {}) + extra_headers.update(dd_headers) return original_new_context(*args, **kwargs) @@ -140,48 +157,6 @@ def _wrapped_api_new_context(*args, **kwargs): log.debug("Failed to patch API request context: %s", e) -def _install_route_handler(context) -> None: - """ - Install a catch-all route handler on the context to inject headers into all requests. - - This ensures headers are injected even if the context was created without - extra_http_headers or if individual requests override headers. - """ - if not hasattr(context, "_dd_route_handler_installed"): - try: - - def _inject_headers(route, request): - """Route handler that injects distributed tracing headers into each request.""" - try: - # Get existing headers - headers = dict(getattr(request, "headers", {}) or {}) - - # Use the stored test context and flag from when the browser context was created - test_context = getattr(context, "_dd_test_context", None) - is_test_context = getattr(context, "_dd_is_test_context", None) - - # Inject our distributed tracing headers - _inject_distributed_tracing_headers(headers, test_context, is_test_context) - - # Continue the request with injected headers - route.continue_(headers=headers) - - except Exception as e: - # Fallback: continue without headers if injection fails - log.debug("Failed to inject headers in route handler: %s", e) - try: - route.continue_() - except Exception: - pass - - # Install catch-all route handler - context.route("**/*", _inject_headers) - context._dd_route_handler_installed = True - - except Exception as e: - log.debug("Failed to install route handler: %s", e) - - def patch() -> None: """Apply the Playwright integration patch.""" try: diff --git a/ddtrace/propagation/http.py b/ddtrace/propagation/http.py index a6cc651b847..afba0c81b61 100644 --- a/ddtrace/propagation/http.py +++ b/ddtrace/propagation/http.py @@ -38,9 +38,7 @@ from ..internal.constants import BAGGAGE_TAG_PREFIX from ..internal.constants import DD_TRACE_BAGGAGE_MAX_BYTES from ..internal.constants import DD_TRACE_BAGGAGE_MAX_ITEMS -from ..internal.constants import ( - HIGHER_ORDER_TRACE_ID_BITS as _HIGHER_ORDER_TRACE_ID_BITS, -) +from ..internal.constants import HIGHER_ORDER_TRACE_ID_BITS as _HIGHER_ORDER_TRACE_ID_BITS from ..internal.constants import LAST_DD_PARENT_ID_KEY from ..internal.constants import MAX_UINT_64BITS as _MAX_UINT_64BITS from ..internal.constants import PROPAGATION_STYLE_B3_MULTI @@ -57,16 +55,27 @@ log = get_logger(__name__) -def _is_test_context() -> bool: +def _is_test_context(span_context=None) -> bool: """ - Check if the current span is part of a test trace. + Check if the current span or provided context is part of a test trace. This checks if any span in the current trace has the test.type tag set to "test", indicating we're in a test execution context. + + Args: + span_context: Optional context to check. If provided and the current span + is in the same trace (same trace_id), checks if it's a test trace. """ try: - # Check the current span and walk up the trace to find any test span span = core.tracer.current_span() + if span is None: + return False + + # If span_context provided, only check if they're in the same trace + if span_context is not None and span.trace_id != span_context.trace_id: + return False + + # Walk up the span hierarchy looking for test.type == "test" while span: if span.get_tag(TEST_TYPE) == "test": return True @@ -84,9 +93,7 @@ def _is_test_context() -> bool: _HTTP_BAGGAGE_PREFIX: Literal["ot-baggage-"] = "ot-baggage-" HTTP_HEADER_TRACE_ID: Literal["x-datadog-trace-id"] = "x-datadog-trace-id" HTTP_HEADER_PARENT_ID: Literal["x-datadog-parent-id"] = "x-datadog-parent-id" -HTTP_HEADER_SAMPLING_PRIORITY: Literal["x-datadog-sampling-priority"] = ( - "x-datadog-sampling-priority" -) +HTTP_HEADER_SAMPLING_PRIORITY: Literal["x-datadog-sampling-priority"] = "x-datadog-sampling-priority" HTTP_HEADER_ORIGIN: Literal["x-datadog-origin"] = "x-datadog-origin" _HTTP_HEADER_B3_SINGLE: Literal["b3"] = "b3" _HTTP_HEADER_B3_TRACE_ID: Literal["x-b3-traceid"] = "x-b3-traceid" @@ -108,13 +115,9 @@ def _possible_header(header): # versions of these headers POSSIBLE_HTTP_HEADER_TRACE_IDS = _possible_header(HTTP_HEADER_TRACE_ID) POSSIBLE_HTTP_HEADER_PARENT_IDS = _possible_header(HTTP_HEADER_PARENT_ID) -POSSIBLE_HTTP_HEADER_SAMPLING_PRIORITIES = _possible_header( - HTTP_HEADER_SAMPLING_PRIORITY -) +POSSIBLE_HTTP_HEADER_SAMPLING_PRIORITIES = _possible_header(HTTP_HEADER_SAMPLING_PRIORITY) POSSIBLE_HTTP_HEADER_ORIGIN = _possible_header(HTTP_HEADER_ORIGIN) -_POSSIBLE_HTTP_HEADER_TAGS = frozenset( - [_HTTP_HEADER_TAGS, get_wsgi_header(_HTTP_HEADER_TAGS).lower()] -) +_POSSIBLE_HTTP_HEADER_TAGS = frozenset([_HTTP_HEADER_TAGS, get_wsgi_header(_HTTP_HEADER_TAGS).lower()]) _POSSIBLE_HTTP_HEADER_B3_SINGLE_HEADER = _possible_header(_HTTP_HEADER_B3_SINGLE) _POSSIBLE_HTTP_HEADER_B3_TRACE_IDS = _possible_header(_HTTP_HEADER_B3_TRACE_ID) _POSSIBLE_HTTP_HEADER_B3_SPAN_IDS = _possible_header(_HTTP_HEADER_B3_SPAN_ID) @@ -245,9 +248,7 @@ def _extract_meta(tags_value): meta = { "_dd.propagation_error": "extract_max_size", } - log.warning( - "failed to decode x-datadog-tags: %r", tags_value, exc_info=True - ) + log.warning("failed to decode x-datadog-tags: %r", tags_value, exc_info=True) except TagsetDecodeError: meta = { "_dd.propagation_error": "decoding_error", @@ -263,9 +264,7 @@ def _put_together_trace_id(trace_id_hob_hex: str, low_64_bits: int) -> int: @staticmethod def _higher_order_is_valid(upper_64_bits: str) -> bool: try: - if len(upper_64_bits) != 16 or not ( - int(upper_64_bits, 16) or (upper_64_bits.islower()) - ): + if len(upper_64_bits) != 16 or not (int(upper_64_bits, 16) or (upper_64_bits.islower())): raise ValueError except ValueError: return False @@ -273,30 +272,24 @@ def _higher_order_is_valid(upper_64_bits: str) -> bool: return True @staticmethod - def _inject(span_context, headers, is_test_context=None): - # type: (Context, Dict[str, str], Optional[bool]) -> None + def _inject(span_context, headers): + # type: (Context, Dict[str, str]) -> None if span_context.trace_id is None or span_context.span_id is None: log.debug("tried to inject invalid context %r", span_context) return # When apm tracing is not enabled, only distributed traces with the `_dd.p.ts` tag # are propagated. If the tag is not present, we should not propagate downstream. - if not asm_config._apm_tracing_enabled and ( - APPSEC.PROPAGATION_HEADER not in span_context._meta - ): + if not asm_config._apm_tracing_enabled and (APPSEC.PROPAGATION_HEADER not in span_context._meta): return if span_context.trace_id > _MAX_UINT_64BITS: # set lower order 64 bits in `x-datadog-trace-id` header. For backwards compatibility these # bits should be converted to a base 10 integer. - headers[HTTP_HEADER_TRACE_ID] = str( - _get_64_lowest_order_bits_as_int(span_context.trace_id) - ) + headers[HTTP_HEADER_TRACE_ID] = str(_get_64_lowest_order_bits_as_int(span_context.trace_id)) # set higher order 64 bits in `_dd.p.tid` to propagate the full 128 bit trace id. # Note - The higher order bits must be encoded in hex - span_context._meta[_HIGHER_ORDER_TRACE_ID_BITS] = ( - _get_64_highest_order_bits_as_hex(span_context.trace_id) - ) + span_context._meta[_HIGHER_ORDER_TRACE_ID_BITS] = _get_64_highest_order_bits_as_hex(span_context.trace_id) else: headers[HTTP_HEADER_TRACE_ID] = str(span_context.trace_id) @@ -304,8 +297,8 @@ def _inject(span_context, headers, is_test_context=None): sampling_priority = span_context.sampling_priority # Use special sampling priority 114 for test contexts - # Check if explicitly marked as test context, otherwise fall back to current span check - if is_test_context is True or (is_test_context is None and _is_test_context()): + # Pass the span_context so we can check if it's from a test span + if _is_test_context(span_context): sampling_priority = 114 # Propagate priority only if defined @@ -325,9 +318,7 @@ def _inject(span_context, headers, is_test_context=None): # Only propagate trace tags which means ignoring the _dd.origin tags_to_encode = { - k: v - for k, v in span_context._meta.items() - if _DatadogMultiHeader._is_valid_datadog_trace_tag_key(k) + k: v for k, v in span_context._meta.items() if _DatadogMultiHeader._is_valid_datadog_trace_tag_key(k) } if tags_to_encode: @@ -346,9 +337,7 @@ def _inject(span_context, headers, is_test_context=None): log.warning("failed to encode x-datadog-tags", exc_info=True) # Record telemetry for successful injection - _record_http_telemetry( - "context_header_style.injected", PROPAGATION_STYLE_DATADOG - ) + _record_http_telemetry("context_header_style.injected", PROPAGATION_STYLE_DATADOG) @staticmethod def _extract(headers): @@ -395,13 +384,9 @@ def _extract(headers): trace_id_hob_hex = meta[_HIGHER_ORDER_TRACE_ID_BITS] if _DatadogMultiHeader._higher_order_is_valid(trace_id_hob_hex): if config._128_bit_trace_id_enabled: - trace_id = _DatadogMultiHeader._put_together_trace_id( - trace_id_hob_hex, trace_id - ) + trace_id = _DatadogMultiHeader._put_together_trace_id(trace_id_hob_hex, trace_id) else: - meta["_dd.propagation_error"] = "malformed_tid {}".format( - trace_id_hob_hex - ) + meta["_dd.propagation_error"] = "malformed_tid {}".format(trace_id_hob_hex) del meta[_HIGHER_ORDER_TRACE_ID_BITS] log.warning( "malformed_tid: %s. Failed to decode trace id from http headers", @@ -490,8 +475,8 @@ class _B3MultiHeader: """ @staticmethod - def _inject(span_context, headers, is_test_context=None): - # type: (Context, Dict[str, str], Optional[bool]) -> None + def _inject(span_context, headers): + # type: (Context, Dict[str, str]) -> None if span_context.trace_id is None or span_context.span_id is None: log.debug("tried to inject invalid context %r", span_context) return @@ -509,9 +494,7 @@ def _inject(span_context, headers, is_test_context=None): headers[_HTTP_HEADER_B3_FLAGS] = "1" # Record telemetry for successful injection - _record_http_telemetry( - "context_header_style.injected", PROPAGATION_STYLE_B3_MULTI - ) + _record_http_telemetry("context_header_style.injected", PROPAGATION_STYLE_B3_MULTI) @staticmethod def _extract(headers): @@ -612,8 +595,8 @@ class _B3SingleHeader: """ @staticmethod - def _inject(span_context, headers, is_test_context=None): - # type: (Context, Dict[str, str], Optional[bool]) -> None + def _inject(span_context, headers): + # type: (Context, Dict[str, str]) -> None if span_context.trace_id is None or span_context.span_id is None: log.debug("tried to inject invalid context %r", span_context) return @@ -633,16 +616,12 @@ def _inject(span_context, headers, is_test_context=None): headers[_HTTP_HEADER_B3_SINGLE] = single_header # Record telemetry for successful injection - _record_http_telemetry( - "context_header_style.injected", PROPAGATION_STYLE_B3_SINGLE - ) + _record_http_telemetry("context_header_style.injected", PROPAGATION_STYLE_B3_SINGLE) @staticmethod def _extract(headers): # type: (Dict[str, str]) -> Optional[Context] - single_header = _extract_header_value( - _POSSIBLE_HTTP_HEADER_B3_SINGLE_HEADER, headers - ) + single_header = _extract_header_value(_POSSIBLE_HTTP_HEADER_B3_SINGLE_HEADER, headers) if not single_header: return None @@ -774,14 +753,9 @@ def _get_traceparent_values(tp): raise ValueError("ff is an invalid traceparent version: %s" % tp) elif version != "00": # currently 00 is the only version format, but if future versions come up we may need to add changes - log.warning( - "unsupported traceparent version:%r, still attempting to parse", version - ) + log.warning("unsupported traceparent version:%r, still attempting to parse", version) elif version == "00" and future_vals is not None: - raise ValueError( - "Traceparents with the version `00` should contain 4 values delimited by a dash: %s" - % tp - ) + raise ValueError("Traceparents with the version `00` should contain 4 values delimited by a dash: %s" % tp) trace_id = _hex_id_to_dd_id(trace_id_hex) span_id = _hex_id_to_dd_id(span_id_hex) @@ -834,9 +808,7 @@ def _get_tracestate_values(ts_l): # need to convert from t. to _dd.p. other_propagated_tags = { - "_dd.p.%s" % k[2:]: _TraceContext.decode_tag_val(v) - for (k, v) in dd.items() - if k.startswith("t.") + "_dd.p.%s" % k[2:]: _TraceContext.decode_tag_val(v) for (k, v) in dd.items() if k.startswith("t.") } return sampling_priority_ts_int, other_propagated_tags, origin, lpid @@ -929,20 +901,14 @@ def _get_context(trace_id, span_id, trace_flag, ts, meta=None): tracestate_values = None if tracestate_values: - sampling_priority_ts, other_propagated_tags, origin, lpid = ( - tracestate_values - ) + sampling_priority_ts, other_propagated_tags, origin, lpid = tracestate_values meta.update(other_propagated_tags.items()) if lpid: meta[LAST_DD_PARENT_ID_KEY] = lpid - sampling_priority = _TraceContext._get_sampling_priority( - trace_flag, sampling_priority_ts, origin - ) + sampling_priority = _TraceContext._get_sampling_priority(trace_flag, sampling_priority_ts, origin) else: - log.debug( - "no dd list member in tracestate from incoming request: %r", ts - ) + log.debug("no dd list member in tracestate from incoming request: %r", ts) return Context( trace_id=trace_id, @@ -953,8 +919,8 @@ def _get_context(trace_id, span_id, trace_flag, ts, meta=None): ) @staticmethod - def _inject(span_context, headers, is_test_context=None): - # type: (Context, Dict[str, str], Optional[bool]) -> None + def _inject(span_context, headers): + # type: (Context, Dict[str, str]) -> None tp = span_context._traceparent if tp: headers[_HTTP_HEADER_TRACEPARENT] = tp @@ -966,37 +932,27 @@ def _inject(span_context, headers, is_test_context=None): elif LAST_DD_PARENT_ID_KEY in span_context._meta: # Datadog Span is not active, propagate the last datadog span_id span_id = int(span_context._meta[LAST_DD_PARENT_ID_KEY], 16) - headers[_HTTP_HEADER_TRACESTATE] = w3c_tracestate_add_p( - span_context._tracestate, span_id - ) + headers[_HTTP_HEADER_TRACESTATE] = w3c_tracestate_add_p(span_context._tracestate, span_id) else: headers[_HTTP_HEADER_TRACESTATE] = span_context._tracestate # Record telemetry for successful injection - _record_http_telemetry( - "context_header_style.injected", _PROPAGATION_STYLE_W3C_TRACECONTEXT - ) + _record_http_telemetry("context_header_style.injected", _PROPAGATION_STYLE_W3C_TRACECONTEXT) class _BaggageHeader: """Helper class to inject/extract Baggage Headers""" - SAFE_CHARACTERS_KEY = ( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!#$%&'*+-.^_`|~" - ) + SAFE_CHARACTERS_KEY = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!#$%&'*+-.^_`|~" SAFE_CHARACTERS_VALUE = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!#$%&'()*+-./:<>?@[]^_`{|}~" @staticmethod def _encode_key(key: str) -> str: - return urllib.parse.quote( - str(key).strip(), safe=_BaggageHeader.SAFE_CHARACTERS_KEY - ) + return urllib.parse.quote(str(key).strip(), safe=_BaggageHeader.SAFE_CHARACTERS_KEY) @staticmethod def _encode_value(value: str) -> str: - return urllib.parse.quote( - str(value).strip(), safe=_BaggageHeader.SAFE_CHARACTERS_VALUE - ) + return urllib.parse.quote(str(value).strip(), safe=_BaggageHeader.SAFE_CHARACTERS_VALUE) @staticmethod def _inject(span_context: Context, headers: Dict[str, str]) -> None: @@ -1020,9 +976,7 @@ def _inject(span_context: Context, headers: Dict[str, str]) -> None: total_size = 0 for key, value in baggage_items: item = f"{_BaggageHeader._encode_key(key)}={_BaggageHeader._encode_value(value)}" - item_size = len(item.encode("utf-8")) + ( - 1 if encoded_items else 0 - ) # +1 for comma if not first item + item_size = len(item.encode("utf-8")) + (1 if encoded_items else 0) # +1 for comma if not first item if total_size + item_size > DD_TRACE_BAGGAGE_MAX_BYTES: log.warning("Baggage header size exceeded, dropping excess items") # Record telemetry for baggage header size exceeding limit @@ -1040,9 +994,7 @@ def _inject(span_context: Context, headers: Dict[str, str]) -> None: headers[_HTTP_HEADER_BAGGAGE] = header_value # Record telemetry for successful baggage injection - _record_http_telemetry( - "context_header_style.injected", _PROPAGATION_STYLE_BAGGAGE - ) + _record_http_telemetry("context_header_style.injected", _PROPAGATION_STYLE_BAGGAGE) except Exception: log.warning("Failed to encode and inject baggage header") @@ -1104,9 +1056,7 @@ def _get_sampled_injection_context( appropriate span and triggers sampling before returning the injection context. """ # Extract context for header injection (non_active_span takes precedence) - injection_context = ( - trace_info.context if isinstance(trace_info, Span) else trace_info - ) + injection_context = trace_info.context if isinstance(trace_info, Span) else trace_info # Find root span for sampling decisions if injection_context.sampling_priority is not None: @@ -1126,9 +1076,7 @@ def _get_sampled_injection_context( elif isinstance(trace_info, Span): # Use span's root for sampling sampling_span = trace_info._local_root - elif ( - current_root := core.tracer.current_root_span() - ) and current_root.trace_id == trace_info.trace_id: + elif (current_root := core.tracer.current_root_span()) and current_root.trace_id == trace_info.trace_id: # Get the local root span for the current trace (if it is active, otherwise we can't sample) sampling_span = current_root @@ -1163,23 +1111,15 @@ def _extract_configured_contexts_avail( return contexts, styles_w_ctx @staticmethod - def _context_to_span_link( - context: Context, style: str, reason: str - ) -> Optional[SpanLink]: + def _context_to_span_link(context: Context, style: str, reason: str) -> Optional[SpanLink]: # encoding expects at least trace_id and span_id if context.span_id and context.trace_id: return SpanLink( context.trace_id, context.span_id, - flags=( - 1 - if context.sampling_priority and context.sampling_priority > 0 - else 0 - ), + flags=(1 if context.sampling_priority and context.sampling_priority > 0 else 0), tracestate=( - context._meta.get(W3C_TRACESTATE_KEY, "") - if style == _PROPAGATION_STYLE_W3C_TRACECONTEXT - else None + context._meta.get(W3C_TRACESTATE_KEY, "") if style == _PROPAGATION_STYLE_W3C_TRACECONTEXT else None ), attributes={ "reason": reason, @@ -1208,30 +1148,19 @@ def _resolve_contexts(contexts, styles_w_ctx, normalized_headers): # add the tracestate to the primary context elif style_w_ctx == _PROPAGATION_STYLE_W3C_TRACECONTEXT: # extract and add the raw ts value to the primary_context - ts = _extract_header_value( - _POSSIBLE_HTTP_HEADER_TRACESTATE, normalized_headers - ) + ts = _extract_header_value(_POSSIBLE_HTTP_HEADER_TRACESTATE, normalized_headers) if ts: primary_context._meta[W3C_TRACESTATE_KEY] = ts - if ( - primary_context.trace_id == context.trace_id - and primary_context.span_id != context.span_id - ): + if primary_context.trace_id == context.trace_id and primary_context.span_id != context.span_id: dd_context = None if PROPAGATION_STYLE_DATADOG in styles_w_ctx: - dd_context = contexts[ - styles_w_ctx.index(PROPAGATION_STYLE_DATADOG) - ] + dd_context = contexts[styles_w_ctx.index(PROPAGATION_STYLE_DATADOG)] if LAST_DD_PARENT_ID_KEY in context._meta: # tracecontext headers contain a p value, ensure this value is sent to backend - primary_context._meta[LAST_DD_PARENT_ID_KEY] = context._meta[ - LAST_DD_PARENT_ID_KEY - ] + primary_context._meta[LAST_DD_PARENT_ID_KEY] = context._meta[LAST_DD_PARENT_ID_KEY] elif dd_context: # if p value is not present in tracestate, use the parent id from the datadog headers - primary_context._meta[LAST_DD_PARENT_ID_KEY] = "{:016x}".format( - dd_context.span_id - ) + primary_context._meta[LAST_DD_PARENT_ID_KEY] = "{:016x}".format(dd_context.span_id) # the span_id in tracecontext takes precedence over the first extracted propagation style primary_context.span_id = context.span_id @@ -1239,7 +1168,7 @@ def _resolve_contexts(contexts, styles_w_ctx, normalized_headers): return primary_context @staticmethod - def inject(context: Union[Context, Span], headers: Dict[str, str], is_test_context=None) -> None: + def inject(context: Union[Context, Span], headers: Dict[str, str]) -> None: """Inject Context attributes that have to be propagated as HTTP headers. Here is an example using `requests`:: @@ -1273,11 +1202,7 @@ def parent_call(): # Handle sampling and get context for header injection span_context = HTTPPropagator._get_sampled_injection_context(context, None) # Log a warning if we cannot determine a sampling decision before injecting headers. - if ( - span_context.span_id - and span_context.trace_id - and span_context.sampling_priority is None - ): + if span_context.span_id and span_context.trace_id and span_context.sampling_priority is None: log.debug( "Sampling decision not available. Downstream spans will not inherit a sampling priority: " "args=(context=%s, ...) detected span context=%s", @@ -1298,21 +1223,18 @@ def parent_call(): log.debug("tried to inject invalid context %r", span_context) return - if ( - config._propagation_http_baggage_enabled is True - and span_context._baggage is not None - ): + if config._propagation_http_baggage_enabled is True and span_context._baggage is not None: for key in span_context._baggage: headers[_HTTP_BAGGAGE_PREFIX + key] = span_context._baggage[key] if PROPAGATION_STYLE_DATADOG in config._propagation_style_inject: - _DatadogMultiHeader._inject(span_context, headers, is_test_context=is_test_context) + _DatadogMultiHeader._inject(span_context, headers) if PROPAGATION_STYLE_B3_MULTI in config._propagation_style_inject: - _B3MultiHeader._inject(span_context, headers, is_test_context) + _B3MultiHeader._inject(span_context, headers) if PROPAGATION_STYLE_B3_SINGLE in config._propagation_style_inject: - _B3SingleHeader._inject(span_context, headers, is_test_context) + _B3SingleHeader._inject(span_context, headers) if _PROPAGATION_STYLE_W3C_TRACECONTEXT in config._propagation_style_inject: - _TraceContext._inject(span_context, headers, is_test_context) + _TraceContext._inject(span_context, headers) @staticmethod def extract(headers): @@ -1351,28 +1273,20 @@ def my_controller(url, headers): context = propagator._extract(normalized_headers) style = prop_style if context: - _record_http_telemetry( - "context_header_style.extracted", prop_style - ) + _record_http_telemetry("context_header_style.extracted", prop_style) if config._propagation_http_baggage_enabled is True: _attach_baggage_to_context(normalized_headers, context) break # loop through all extract propagation styles else: - contexts, styles_w_ctx = ( - HTTPPropagator._extract_configured_contexts_avail( - normalized_headers - ) - ) + contexts, styles_w_ctx = HTTPPropagator._extract_configured_contexts_avail(normalized_headers) # check that styles_w_ctx is not empty if styles_w_ctx: style = styles_w_ctx[0] if contexts: - context = HTTPPropagator._resolve_contexts( - contexts, styles_w_ctx, normalized_headers - ) + context = HTTPPropagator._resolve_contexts(contexts, styles_w_ctx, normalized_headers) if config._propagation_http_baggage_enabled is True: _attach_baggage_to_context(normalized_headers, context) @@ -1381,18 +1295,14 @@ def my_controller(url, headers): baggage_context = _BaggageHeader._extract(normalized_headers) if baggage_context._baggage != {}: # Record telemetry for successful baggage extraction - _record_http_telemetry( - "context_header_style.extracted", _PROPAGATION_STYLE_BAGGAGE - ) + _record_http_telemetry("context_header_style.extracted", _PROPAGATION_STYLE_BAGGAGE) if context: context._baggage = baggage_context.get_all_baggage_items() else: context = baggage_context if config._baggage_tag_keys: - raw_keys = [ - k.strip() for k in config._baggage_tag_keys if k.strip() - ] + raw_keys = [k.strip() for k in config._baggage_tag_keys if k.strip()] # wildcard: tag all baggage keys if "*" in raw_keys: tag_keys = baggage_context.get_all_baggage_items().keys() @@ -1400,17 +1310,13 @@ def my_controller(url, headers): tag_keys = raw_keys for stripped_key in tag_keys: - if ( - value := baggage_context.get_baggage_item(stripped_key) - ) is not None: + if (value := baggage_context.get_baggage_item(stripped_key)) is not None: prefixed_key = BAGGAGE_TAG_PREFIX + stripped_key if prefixed_key not in context._meta: context._meta[prefixed_key] = value if config._propagation_behavior_extract == _PROPAGATION_BEHAVIOR_RESTART: - link = HTTPPropagator._context_to_span_link( - context, style, "propagation_behavior_extract" - ) + link = HTTPPropagator._context_to_span_link(context, style, "propagation_behavior_extract") context = Context( baggage=context.get_all_baggage_items(), span_links=[link] if link else [], @@ -1419,7 +1325,5 @@ def my_controller(url, headers): return context except Exception: - log.debug( - "error while extracting context propagation headers", exc_info=True - ) + log.debug("error while extracting context propagation headers", exc_info=True) return Context() diff --git a/tests/contrib/playwright/test_playwright_integration.py b/tests/contrib/playwright/test_playwright_integration.py index e3a062b59b1..c41691cab58 100644 --- a/tests/contrib/playwright/test_playwright_integration.py +++ b/tests/contrib/playwright/test_playwright_integration.py @@ -3,9 +3,12 @@ These tests verify the actual Playwright integration works correctly. """ + +from http.server import BaseHTTPRequestHandler +from http.server import HTTPServer import threading import time -from http.server import BaseHTTPRequestHandler, HTTPServer + import pytest from ddtrace import config @@ -211,9 +214,9 @@ def do_GET(self): # Send a simple response self.send_response(200) - self.send_header('Content-type', 'text/html') + self.send_header("Content-type", "text/html") self.end_headers() - self.wfile.write(b'

Test Page

') + self.wfile.write(b"

Test Page

") @classmethod def clear_captured_headers(cls): @@ -227,6 +230,7 @@ class TestPlaywrightHeaderInjectionE2E: def test_playwright_injects_headers_in_browser_requests(self, playwright): """Test that Playwright actually injects Datadog headers into browser HTTP requests.""" import socket + from ddtrace.ext.test import TYPE as TEST_TYPE p = playwright @@ -238,11 +242,11 @@ def test_playwright_injects_headers_in_browser_requests(self, playwright): # Find an available port with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(('', 0)) + s.bind(("", 0)) port = s.getsockname()[1] # Start a test HTTP server - server = HTTPServer(('localhost', port), HeaderCaptureHandler) + server = HTTPServer(("localhost", port), HeaderCaptureHandler) server_thread = threading.Thread(target=server.serve_forever, daemon=True) server_thread.start() @@ -253,7 +257,7 @@ def test_playwright_injects_headers_in_browser_requests(self, playwright): # Patch Playwright patch.patch() - # Create a test span with test.type tag + # Create a test span with test.type tag with tracer.trace("test_browser_header_injection") as test_span: test_span.set_tag(TEST_TYPE, "test") @@ -263,7 +267,10 @@ def test_playwright_injects_headers_in_browser_requests(self, playwright): browser = p.chromium.launch(headless=True) except Exception as e: error_msg = str(e) - if "Executable doesn't exist" in error_msg or "Host system is missing dependencies" in error_msg: + if ( + "Executable doesn't exist" in error_msg + or "Host system is missing dependencies" in error_msg + ): pytest.skip("Playwright browsers not available - skipping browser test") raise @@ -288,11 +295,15 @@ def test_playwright_injects_headers_in_browser_requests(self, playwright): # Verify Datadog headers are present assert "x-datadog-trace-id" in request_headers, "Missing x-datadog-trace-id header" assert "x-datadog-parent-id" in request_headers, "Missing x-datadog-parent-id header" - assert "x-datadog-sampling-priority" in request_headers, "Missing x-datadog-sampling-priority header" + assert "x-datadog-sampling-priority" in request_headers, ( + "Missing x-datadog-sampling-priority header" + ) # Verify the sampling priority is 114 (special test context value) sampling_priority = request_headers["x-datadog-sampling-priority"] - assert sampling_priority == "114", f"Expected sampling priority 114, got {sampling_priority}" + assert sampling_priority == "114", ( + f"Expected sampling priority 114, got {sampling_priority}" + ) # Verify trace and parent IDs are valid trace_id = request_headers["x-datadog-trace-id"] @@ -327,11 +338,11 @@ def test_playwright_headers_not_injected_when_disabled(self, playwright): # Find an available port with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(('', 0)) + s.bind(("", 0)) port = s.getsockname()[1] # Start a test HTTP server - server = HTTPServer(('localhost', port), HeaderCaptureHandler) + server = HTTPServer(("localhost", port), HeaderCaptureHandler) server_thread = threading.Thread(target=server.serve_forever, daemon=True) server_thread.start() @@ -370,9 +381,15 @@ def test_playwright_headers_not_injected_when_disabled(self, playwright): request_headers = HeaderCaptureHandler.captured_headers[0] # Verify Datadog headers are NOT present when tracing is disabled - assert "x-datadog-trace-id" not in request_headers, "x-datadog-trace-id should not be present when tracing disabled" - assert "x-datadog-parent-id" not in request_headers, "x-datadog-parent-id should not be present when tracing disabled" - assert "x-datadog-sampling-priority" not in request_headers, "x-datadog-sampling-priority should not be present when tracing disabled" + assert "x-datadog-trace-id" not in request_headers, ( + "x-datadog-trace-id should not be present when tracing disabled" + ) + assert "x-datadog-parent-id" not in request_headers, ( + "x-datadog-parent-id should not be present when tracing disabled" + ) + assert "x-datadog-sampling-priority" not in request_headers, ( + "x-datadog-sampling-priority should not be present when tracing disabled" + ) finally: page.close() @@ -402,11 +419,11 @@ def test_playwright_headers_injected_outside_test_context(self, playwright): # Find an available port with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(('', 0)) + s.bind(("", 0)) port = s.getsockname()[1] # Start a test HTTP server - server = HTTPServer(('localhost', port), HeaderCaptureHandler) + server = HTTPServer(("localhost", port), HeaderCaptureHandler) server_thread = threading.Thread(target=server.serve_forever, daemon=True) server_thread.start() @@ -427,7 +444,10 @@ def test_playwright_headers_injected_outside_test_context(self, playwright): browser = p.chromium.launch(headless=True) except Exception as e: error_msg = str(e) - if "Executable doesn't exist" in error_msg or "Host system is missing dependencies" in error_msg: + if ( + "Executable doesn't exist" in error_msg + or "Host system is missing dependencies" in error_msg + ): pytest.skip("Playwright browsers not available - skipping browser test") raise @@ -451,11 +471,15 @@ def test_playwright_headers_injected_outside_test_context(self, playwright): # Verify Datadog headers are present assert "x-datadog-trace-id" in request_headers, "Missing x-datadog-trace-id header" assert "x-datadog-parent-id" in request_headers, "Missing x-datadog-parent-id header" - assert "x-datadog-sampling-priority" in request_headers, "Missing x-datadog-sampling-priority header" + assert "x-datadog-sampling-priority" in request_headers, ( + "Missing x-datadog-sampling-priority header" + ) # Verify the sampling priority is 1 (normal priority, not 114) sampling_priority = request_headers["x-datadog-sampling-priority"] - assert sampling_priority == "1", f"Expected sampling priority 1, got {sampling_priority}" + assert sampling_priority == "1", ( + f"Expected sampling priority 1, got {sampling_priority}" + ) finally: page.close() diff --git a/tests/contrib/playwright/test_playwright_js_requests.py b/tests/contrib/playwright/test_playwright_js_requests.py new file mode 100644 index 00000000000..9aa3c7797af --- /dev/null +++ b/tests/contrib/playwright/test_playwright_js_requests.py @@ -0,0 +1,158 @@ +""" +Test that Playwright integration works with JavaScript-initiated requests (fetch, XMLHttpRequest). + +This is critical because extra_http_headers should apply to ALL requests from the browser, +not just navigation requests like page.goto(). +""" + +from http.server import BaseHTTPRequestHandler +from http.server import HTTPServer +import socket +import threading +import time + +import pytest + +from ddtrace import config +from ddtrace.contrib.internal.playwright import patch +from ddtrace.ext.test import TYPE as TEST_TYPE + + +playwright = pytest.importorskip("playwright") + + +class HeaderCaptureHandler(BaseHTTPRequestHandler): + """HTTP handler that captures request headers.""" + + captured_headers = [] + + def do_GET(self): + """Handle GET requests and capture headers.""" + # Capture all headers + headers = {} + for header_name, header_value in self.headers.items(): + headers[header_name.lower()] = header_value + + # Store the captured headers + HeaderCaptureHandler.captured_headers.append(headers) + + # Send a simple response with CORS headers + self.send_response(200) + self.send_header("Content-type", "text/plain") + self.send_header("Access-Control-Allow-Origin", "*") + self.end_headers() + self.wfile.write(b"OK") + + def log_message(self, fmt, *args): + """Suppress log messages.""" + pass + + @classmethod + def clear_captured_headers(cls): + """Clear captured headers for a new test.""" + cls.captured_headers = [] + + +class TestPlaywrightJavaScriptRequests: + """Test that Playwright injects headers into JavaScript-initiated requests.""" + + def test_fetch_requests_include_tracing_headers(self, playwright): + """Test that JavaScript fetch() requests include Datadog tracing headers.""" + p = playwright + tracer = pytest.importorskip("ddtrace").tracer + config.playwright["distributed_tracing"] = True + + # Clear any previous captured headers + HeaderCaptureHandler.clear_captured_headers() + + # Find an available port + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(("", 0)) + port = s.getsockname()[1] + + # Start a test HTTP server + server = HTTPServer(("localhost", port), HeaderCaptureHandler) + server_thread = threading.Thread(target=server.serve_forever, daemon=True) + server_thread.start() + + # Give server time to start + time.sleep(0.1) + + try: + # Patch Playwright + patch.patch() + + try: + # Create a test span with test.type tag + with tracer.trace("test_js_fetch") as test_span: + test_span.set_tag(TEST_TYPE, "test") + + # Try to launch browser + try: + browser = p.chromium.launch(headless=True) + except Exception as e: + error_msg = str(e) + if ( + "Executable doesn't exist" in error_msg + or "Host system is missing dependencies" in error_msg + ): + pytest.skip("Playwright browsers not available") + raise + + try: + # Create browser context with headers + context = browser.new_context() + try: + page = context.new_page() + try: + # Navigate to a simple HTML page first + page.goto(f"http://localhost:{port}/") + + # Clear captured headers so we only see the fetch request + HeaderCaptureHandler.clear_captured_headers() + + # Execute JavaScript to make a fetch request + page.evaluate(f""" + fetch('http://localhost:{port}/api/data') + .then(r => r.text()) + .catch(err => console.error(err)); + """) + + # Wait for the fetch to complete + time.sleep(0.5) + + # Verify the fetch request was captured + assert len(HeaderCaptureHandler.captured_headers) >= 1, ( + f"Expected at least 1 fetch request, got {len(HeaderCaptureHandler.captured_headers)}" + ) + + # Check the fetch request headers (should be the first/only one now) + fetch_headers = HeaderCaptureHandler.captured_headers[0] + + # Verify Datadog headers + assert "x-datadog-trace-id" in fetch_headers, ( + f"Missing trace-id in fetch. Headers: {list(fetch_headers.keys())}" + ) + assert "x-datadog-parent-id" in fetch_headers, "Missing parent-id in fetch" + assert "x-datadog-sampling-priority" in fetch_headers, ( + "Missing sampling-priority in fetch" + ) + + # Verify test context sampling priority + priority = fetch_headers["x-datadog-sampling-priority"] + assert priority == "114", f"Expected priority 114, got {priority}" + + finally: + page.close() + finally: + context.close() + finally: + browser.close() + + finally: + patch.unpatch() + + finally: + # Clean up server + server.shutdown() + server.server_close() diff --git a/tests/contrib/playwright/test_playwright_patch.py b/tests/contrib/playwright/test_playwright_patch.py index b4ddfcb34d9..444c450c171 100644 --- a/tests/contrib/playwright/test_playwright_patch.py +++ b/tests/contrib/playwright/test_playwright_patch.py @@ -50,40 +50,35 @@ def test_config_initialization(self): assert "distributed_tracing" in config.playwright @pytest.mark.skipif( - not hasattr(patch, "_inject_distributed_tracing_headers"), + not hasattr(patch, "_get_tracing_headers"), reason="Integration may not be fully loaded", ) - def test_inject_distributed_tracing_headers(self): - """Test that distributed tracing headers are injected correctly.""" - from ddtrace.contrib.internal.playwright.patch import _inject_distributed_tracing_headers + def test_get_tracing_headers(self): + """Test that distributed tracing headers are retrieved correctly.""" + from ddtrace.contrib.internal.playwright.patch import _get_tracing_headers # Test with distributed tracing enabled config.playwright["distributed_tracing"] = True - headers = {} - _inject_distributed_tracing_headers(headers) + headers = _get_tracing_headers() - # Should have injected Datadog headers when span is active - # Note: headers may not be present if no active span + # Should return a dict (may be empty if no active span) assert isinstance(headers, dict) - def test_inject_distributed_tracing_headers_disabled(self): - """Test that headers are not injected when distributed tracing is disabled.""" - from ddtrace.contrib.internal.playwright.patch import _inject_distributed_tracing_headers + def test_get_tracing_headers_disabled(self): + """Test that no headers are returned when distributed tracing is disabled.""" + from ddtrace.contrib.internal.playwright.patch import _get_tracing_headers # Disable distributed tracing config.playwright["distributed_tracing"] = False - headers = {} - original_count = len(headers) + headers = _get_tracing_headers() - _inject_distributed_tracing_headers(headers) - - # No headers should be added - assert len(headers) == original_count + # Should return empty dict + assert headers == {} def test_sampling_priority_override_in_test_context(self): """Test that sampling priority 114 is used in test contexts.""" - from ddtrace.propagation.http import HTTPPropagator from ddtrace.ext.test import TYPE as TEST_TYPE + from ddtrace.propagation.http import HTTPPropagator # Import tracer tracer = pytest.importorskip("ddtrace").tracer @@ -113,7 +108,7 @@ def test_sampling_priority_override_in_test_context(self): def test_playwright_header_injection_with_test_context(self): """Test that Playwright header injection works with test context priority override.""" - from ddtrace.contrib.internal.playwright.patch import _inject_distributed_tracing_headers + from ddtrace.contrib.internal.playwright.patch import _get_tracing_headers from ddtrace.ext.test import TYPE as TEST_TYPE tracer = pytest.importorskip("ddtrace").tracer @@ -125,14 +120,14 @@ def test_playwright_header_injection_with_test_context(self): headers_regular = {} with tracer.trace("regular_operation") as span: span.context.sampling_priority = 2 - _inject_distributed_tracing_headers(headers_regular) + headers_regular = _get_tracing_headers() # Test in test context headers_test = {} with tracer.trace("test_operation") as span: span.set_tag(TEST_TYPE, "test") span.context.sampling_priority = 2 # Should be overridden - _inject_distributed_tracing_headers(headers_test) + headers_test = _get_tracing_headers() # Both should have trace headers assert "x-datadog-trace-id" in headers_regular @@ -148,25 +143,23 @@ def test_playwright_header_injection_with_test_context(self): def test_end_to_end_playwright_with_test_context(self): """End-to-end test of Playwright integration with test context sampling priority.""" - from ddtrace.contrib.internal.playwright.patch import _inject_distributed_tracing_headers - from ddtrace.propagation.http import HTTPPropagator + from ddtrace.contrib.internal.playwright.patch import _get_tracing_headers from ddtrace.ext.test import TYPE as TEST_TYPE + from ddtrace.propagation.http import HTTPPropagator tracer = pytest.importorskip("ddtrace").tracer config.playwright["distributed_tracing"] = True # Test 1: Playwright headers in regular context - headers_regular = {} with tracer.trace("regular_browser_op") as span: span.context.sampling_priority = 3 - _inject_distributed_tracing_headers(headers_regular) + headers_regular = _get_tracing_headers() # Test 2: Playwright headers in test context - headers_test = {} with tracer.trace("test_browser_op") as span: span.set_tag(TEST_TYPE, "test") span.context.sampling_priority = 3 # Should be overridden - _inject_distributed_tracing_headers(headers_test) + headers_test = _get_tracing_headers() # Test 3: Direct HTTPPropagator.inject in test context headers_direct = {} @@ -181,9 +174,13 @@ def test_end_to_end_playwright_with_test_context(self): assert "x-datadog-parent-id" in headers, f"{name} context missing parent-id" # Verify sampling priorities - assert headers_regular.get("x-datadog-sampling-priority") == "3", "Regular context should keep original priority" + assert headers_regular.get("x-datadog-sampling-priority") == "3", ( + "Regular context should keep original priority" + ) assert headers_test.get("x-datadog-sampling-priority") == "114", "Test context should override to 114" - assert headers_direct.get("x-datadog-sampling-priority") == "114", "Direct injection in test context should override to 114" + assert headers_direct.get("x-datadog-sampling-priority") == "114", ( + "Direct injection in test context should override to 114" + ) def test_playwright_config_isolation(self): """Test that playwright config changes don't affect global state.""" From 755bd2e4cadc18acf18fb7682d9177e0337d7cf9 Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 17:47:11 +0100 Subject: [PATCH 03/13] small revert --- ddtrace/propagation/http.py | 40 +++++++++---------------------------- 1 file changed, 9 insertions(+), 31 deletions(-) diff --git a/ddtrace/propagation/http.py b/ddtrace/propagation/http.py index afba0c81b61..254c7029bff 100644 --- a/ddtrace/propagation/http.py +++ b/ddtrace/propagation/http.py @@ -352,8 +352,7 @@ def _extract(headers): if trace_id <= 0 or trace_id > _MAX_UINT_64BITS: log.warning( - "Invalid trace id: %r. `x-datadog-trace-id` must be greater than zero and less than 2**64", - trace_id_str, + "Invalid trace id: %r. `x-datadog-trace-id` must be greater than zero and less than 2**64", trace_id_str ) return None @@ -388,10 +387,7 @@ def _extract(headers): else: meta["_dd.propagation_error"] = "malformed_tid {}".format(trace_id_hob_hex) del meta[_HIGHER_ORDER_TRACE_ID_BITS] - log.warning( - "malformed_tid: %s. Failed to decode trace id from http headers", - trace_id_hob_hex, - ) + log.warning("malformed_tid: %s. Failed to decode trace id from http headers", trace_id_hob_hex) if not meta: meta = {} @@ -601,10 +597,7 @@ def _inject(span_context, headers): log.debug("tried to inject invalid context %r", span_context) return - single_header = "{}-{}".format( - _dd_id_to_b3_id(span_context.trace_id), - _dd_id_to_b3_id(span_context.span_id), - ) + single_header = "{}-{}".format(_dd_id_to_b3_id(span_context.trace_id), _dd_id_to_b3_id(span_context.span_id)) sampling_priority = span_context.sampling_priority if sampling_priority is not None: if sampling_priority <= 0: @@ -817,9 +810,7 @@ def _get_tracestate_values(ts_l): @staticmethod def _get_sampling_priority( - traceparent_sampled: int, - tracestate_sampling_priority: Optional[int], - origin: Optional[str] = None, + traceparent_sampled: int, tracestate_sampling_priority: Optional[int], origin: Optional[str] = None ): """ When the traceparent sampled flag is set, the Datadog sampling priority is either @@ -863,11 +854,7 @@ def _extract(headers): return None trace_id, span_id, trace_flag = _TraceContext._get_traceparent_values(tp) except (ValueError, AssertionError): - log.exception( - "received invalid w3c traceparent: %s ", - tp, - extra={"send_to_telemetry": False}, - ) + log.exception("received invalid w3c traceparent: %s ", tp, extra={"send_to_telemetry": False}) return None meta = {W3C_TRACEPARENT_KEY: tp} @@ -1083,18 +1070,12 @@ def _get_sampled_injection_context( # Sample the local root span before injecting headers. if sampling_span: core.tracer.sample(sampling_span) - log.debug( - "%s sampled before propagating trace: span_context=%s", - sampling_span, - injection_context, - ) + log.debug("%s sampled before propagating trace: span_context=%s", sampling_span, injection_context) return injection_context @staticmethod - def _extract_configured_contexts_avail( - normalized_headers: Dict[str, str], - ) -> Tuple[List[Context], List[str]]: + def _extract_configured_contexts_avail(normalized_headers: Dict[str, str]) -> Tuple[List[Context], List[str]]: contexts = [] styles_w_ctx = [] if config._propagation_style_extract is not None: @@ -1117,7 +1098,7 @@ def _context_to_span_link(context: Context, style: str, reason: str) -> Optional return SpanLink( context.trace_id, context.span_id, - flags=(1 if context.sampling_priority and context.sampling_priority > 0 else 0), + flags=1 if context.sampling_priority and context.sampling_priority > 0 else 0, tracestate=( context._meta.get(W3C_TRACESTATE_KEY, "") if style == _PROPAGATION_STYLE_W3C_TRACECONTEXT else None ), @@ -1317,10 +1298,7 @@ def my_controller(url, headers): if config._propagation_behavior_extract == _PROPAGATION_BEHAVIOR_RESTART: link = HTTPPropagator._context_to_span_link(context, style, "propagation_behavior_extract") - context = Context( - baggage=context.get_all_baggage_items(), - span_links=[link] if link else [], - ) + context = Context(baggage=context.get_all_baggage_items(), span_links=[link] if link else []) return context From 3d687f88a5a7647057e9719c345a205a6b7516b3 Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 17:49:13 +0100 Subject: [PATCH 04/13] small revert --- riotfile.py | 88 +++++++++++------------------------------------------ 1 file changed, 18 insertions(+), 70 deletions(-) diff --git a/riotfile.py b/riotfile.py index f18887e21d4..80e07f5a532 100644 --- a/riotfile.py +++ b/riotfile.py @@ -65,9 +65,7 @@ def str_to_version(version: str) -> Tuple[int, int]: MAX_PYTHON_VERSION = version_to_str(max(SUPPORTED_PYTHON_VERSIONS)) -def select_pys( - min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYTHON_VERSION -) -> List[str]: +def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYTHON_VERSION) -> List[str]: """Helper to select python versions from the list of versions we support >>> select_pys() @@ -82,11 +80,7 @@ def select_pys( min_version = str_to_version(min_version) max_version = str_to_version(max_version) - return [ - version_to_str(version) - for version in SUPPORTED_PYTHON_VERSIONS - if min_version <= version <= max_version - ] + return [version_to_str(version) for version in SUPPORTED_PYTHON_VERSIONS if min_version <= version <= max_version] # Common venv configurations for appsec threats testing @@ -919,9 +913,7 @@ def select_pys( "daphne": [latest], "requests": [latest], "redis": ">=2.10,<2.11", - "psycopg2-binary": [ - ">=2.8.6" - ], # We need <2.9.0 for Python 2.7, and >2.9.0 for 3.9+ + "psycopg2-binary": [">=2.8.6"], # We need <2.9.0 for Python 2.7, and >2.9.0 for 3.9+ "pytest-django[testing]": "==3.10.0", "pylibmc": latest, "python-memcached": latest, @@ -1058,12 +1050,7 @@ def select_pys( venvs=[ Venv( pys=["3.9"], - pkgs={ - "dramatiq": "~=1.10.0", - "pytest": latest, - "redis": latest, - "pika": latest, - }, + pkgs={"dramatiq": "~=1.10.0", "pytest": latest, "redis": latest, "pika": latest}, ), Venv( pys=select_pys(max_version="3.13"), @@ -1114,9 +1101,7 @@ def select_pys( Venv( name="elasticsearch:async", command="pytest {cmdargs} tests/contrib/elasticsearch/test_async.py", - env={ - "AIOHTTP_NO_EXTENSIONS": "1" - }, # needed until aiohttp is updated to support python 3.12 + env={"AIOHTTP_NO_EXTENSIONS": "1"}, # needed until aiohttp is updated to support python 3.12 venvs=[ Venv( pys=select_pys(), @@ -1280,10 +1265,7 @@ def select_pys( Venv( pys=select_pys(min_version="3.9", max_version="3.11"), ), - Venv( - pys=select_pys(min_version="3.12", max_version="3.13"), - pkgs={"redis": latest}, - ), + Venv(pys=select_pys(min_version="3.12", max_version="3.13"), pkgs={"redis": latest}), ], ), ], @@ -1409,12 +1391,8 @@ def select_pys( ], }, venvs=[ - Venv( - command="pytest {cmdargs} --ignore=tests/contrib/pymemcache/autopatch tests/contrib/pymemcache" - ), - Venv( - command="python tests/ddtrace_run.py pytest {cmdargs} tests/contrib/pymemcache/autopatch/" - ), + Venv(command="pytest {cmdargs} --ignore=tests/contrib/pymemcache/autopatch tests/contrib/pymemcache"), + Venv(command="python tests/ddtrace_run.py pytest {cmdargs} tests/contrib/pymemcache/autopatch/"), ], ), Venv( @@ -1477,18 +1455,12 @@ def select_pys( Venv( # starlette added support for Python 3.9 in 0.14 pys="3.9", - pkgs={ - "starlette": ["~=0.14.0", "~=0.20.0", "~=0.33.0"], - "httpx": "~=0.22.0", - }, + pkgs={"starlette": ["~=0.14.0", "~=0.20.0", "~=0.33.0"], "httpx": "~=0.22.0"}, ), Venv( # starlette added support for Python 3.10 in 0.15 pys="3.10", - pkgs={ - "starlette": ["~=0.15.0", "~=0.20.0", "~=0.33.0", latest], - "httpx": "~=0.27.0", - }, + pkgs={"starlette": ["~=0.15.0", "~=0.20.0", "~=0.33.0", latest], "httpx": "~=0.27.0"}, ), Venv( # starlette added support for Python 3.11 in 0.21 @@ -1633,11 +1605,7 @@ def select_pys( ], ), Venv( - pkgs={ - "vcrpy": "==7.0.0", - "botocore": "==1.38.26", - "boto3": "==1.38.26", - }, + pkgs={"vcrpy": "==7.0.0", "botocore": "==1.38.26", "boto3": "==1.38.26"}, venvs=[ Venv( pys=select_pys(), @@ -1674,10 +1642,7 @@ def select_pys( ], }, ), - Venv( - pys=select_pys(min_version="3.11"), - pkgs={"mariadb": ["~=1.1.2", latest]}, - ), + Venv(pys=select_pys(min_version="3.11"), pkgs={"mariadb": ["~=1.1.2", latest]}), ], ), Venv( @@ -2300,10 +2265,7 @@ def select_pys( command="pytest {cmdargs} tests/contrib/rediscluster", pkgs={"pytest-randomly": latest}, venvs=[ - Venv( - pys=select_pys(max_version="3.11"), - pkgs={"redis-py-cluster": [">=2.0,<2.1", latest]}, - ), + Venv(pys=select_pys(max_version="3.11"), pkgs={"redis-py-cluster": [">=2.0,<2.1", latest]}), ], ), Venv( @@ -2546,10 +2508,7 @@ def select_pys( # sqlite3 is tied to the Python version and is not installable via pip # To test a range of versions without updating Python, we use Linux only pysqlite3-binary package # Remove pysqlite3-binary on Python 3.9+ locally on non-linux machines - Venv( - pys=select_pys(min_version="3.9", max_version="3.12"), - pkgs={"pysqlite3-binary": [latest]}, - ), + Venv(pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={"pysqlite3-binary": [latest]}), ], ), Venv( @@ -2645,9 +2604,7 @@ def select_pys( # opentelemetry-api doesn't yet work with Python 3.14 pys=select_pys(min_version="3.9", max_version="3.13"), # Ensure we test against versions of opentelemetry-api that broke compatibility with ddtrace - pkgs={ - "opentelemetry-api": ["~=1.0.0", "~=1.15.0", "~=1.26.0", latest] - }, + pkgs={"opentelemetry-api": ["~=1.0.0", "~=1.15.0", "~=1.26.0", latest]}, ), Venv( # opentelemetry-exporter-otlp doesn't yet work with Python 3.14 @@ -2940,9 +2897,7 @@ def select_pys( Venv( pys=select_pys(min_version="3.9", max_version="3.13"), ), - Venv( - pys=select_pys(min_version="3.14"), pkgs={"ormsgpack": ">=1.11.0"} - ), + Venv(pys=select_pys(min_version="3.14"), pkgs={"ormsgpack": ">=1.11.0"}), ], ), Venv( @@ -3118,10 +3073,7 @@ def select_pys( pkgs={"confluent-kafka": ["~=1.9.2", latest]}, ), # confluent-kafka added support for Python 3.11 in 2.0.2 - Venv( - pys=select_pys(min_version="3.11", max_version="3.13"), - pkgs={"confluent-kafka": latest}, - ), + Venv(pys=select_pys(min_version="3.11", max_version="3.13"), pkgs={"confluent-kafka": latest}), ], ), ], @@ -3145,11 +3097,7 @@ def select_pys( }, command="pytest {cmdargs} tests/contrib/aiokafka/", pys=select_pys(), - pkgs={ - "pytest-asyncio": [latest], - "pytest-randomly": latest, - "aiokafka": ["~=0.9.0", latest], - }, + pkgs={"pytest-asyncio": [latest], "pytest-randomly": latest, "aiokafka": ["~=0.9.0", latest]}, ), Venv( name="azure_eventhubs", From 824312129955240c87c4cf3ab8ff2f32fa9c4d12 Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 18:07:52 +0100 Subject: [PATCH 05/13] fmt --- ddtrace/contrib/internal/playwright/patch.py | 9 +++++++-- tests/contrib/playwright/test_playwright_e2e.py | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/ddtrace/contrib/internal/playwright/patch.py b/ddtrace/contrib/internal/playwright/patch.py index 1aaffc8361e..f59eabdbdb3 100644 --- a/ddtrace/contrib/internal/playwright/patch.py +++ b/ddtrace/contrib/internal/playwright/patch.py @@ -39,8 +39,11 @@ def _get_tracing_headers() -> Dict[str, str]: """ Get distributed tracing headers for the current span context. - Returns a dictionary of headers to inject into HTTP requests. - If no span is active, creates a temporary span. + Returns a dictionary with headers like: + - x-datadog-trace-id + - x-datadog-parent-id + - x-datadog-sampling-priority (114 for test contexts) + - x-datadog-tags (optional) """ if not config.playwright.get("distributed_tracing", True): return {} @@ -49,12 +52,14 @@ def _get_tracing_headers() -> Dict[str, str]: try: current_span = tracer.current_span() if current_span: + # HTTPPropagator.inject mutates the headers dict in place HTTPPropagator.inject(current_span.context, headers) else: # No active span, create a temporary span for header injection with tracer.trace("playwright.browser.request", span_type=SpanTypes.HTTP) as span: span._set_tag_str(SPAN_KIND, "client") span._set_tag_str("component", config.playwright.integration_name) + # HTTPPropagator.inject mutates the headers dict in place HTTPPropagator.inject(span.context, headers) except Exception as e: log.debug("Failed to get distributed tracing headers: %s", e) diff --git a/tests/contrib/playwright/test_playwright_e2e.py b/tests/contrib/playwright/test_playwright_e2e.py index 2f22191033a..2bfa1f28dee 100644 --- a/tests/contrib/playwright/test_playwright_e2e.py +++ b/tests/contrib/playwright/test_playwright_e2e.py @@ -3,6 +3,7 @@ These tests simulate real pytest usage scenarios. """ + import pytest from ddtrace import config From 1f3442f56cee4e7f4110fb1b2e468d68e948ed23 Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 18:08:50 +0100 Subject: [PATCH 06/13] Potential fix for code scanning alert no. 486: Binding a socket to all network interfaces Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com> --- tests/contrib/playwright/test_playwright_js_requests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/contrib/playwright/test_playwright_js_requests.py b/tests/contrib/playwright/test_playwright_js_requests.py index 9aa3c7797af..0240612bac7 100644 --- a/tests/contrib/playwright/test_playwright_js_requests.py +++ b/tests/contrib/playwright/test_playwright_js_requests.py @@ -67,7 +67,7 @@ def test_fetch_requests_include_tracing_headers(self, playwright): # Find an available port with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(("", 0)) + s.bind(("localhost", 0)) port = s.getsockname()[1] # Start a test HTTP server From 0541265d37b2763b76ce92d101587fc9a75ed6fe Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 18:09:06 +0100 Subject: [PATCH 07/13] Potential fix for code scanning alert no. 485: Binding a socket to all network interfaces Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com> --- tests/contrib/playwright/test_playwright_integration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/contrib/playwright/test_playwright_integration.py b/tests/contrib/playwright/test_playwright_integration.py index c41691cab58..420357ff125 100644 --- a/tests/contrib/playwright/test_playwright_integration.py +++ b/tests/contrib/playwright/test_playwright_integration.py @@ -338,7 +338,7 @@ def test_playwright_headers_not_injected_when_disabled(self, playwright): # Find an available port with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(("", 0)) + s.bind(("localhost", 0)) port = s.getsockname()[1] # Start a test HTTP server From 662a1b22cf440c14907f37fc2844b3e363112f7f Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 18:11:06 +0100 Subject: [PATCH 08/13] change to localhost --- tests/contrib/playwright/test_playwright_integration.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/contrib/playwright/test_playwright_integration.py b/tests/contrib/playwright/test_playwright_integration.py index 420357ff125..ee7f4ef0ee4 100644 --- a/tests/contrib/playwright/test_playwright_integration.py +++ b/tests/contrib/playwright/test_playwright_integration.py @@ -242,7 +242,7 @@ def test_playwright_injects_headers_in_browser_requests(self, playwright): # Find an available port with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(("", 0)) + s.bind(("localhost", 0)) port = s.getsockname()[1] # Start a test HTTP server @@ -419,7 +419,7 @@ def test_playwright_headers_injected_outside_test_context(self, playwright): # Find an available port with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(("", 0)) + s.bind(("localhost", 0)) port = s.getsockname()[1] # Start a test HTTP server From 9b416c89aba85cf1003b23f7d654e353d6b47dba Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 19:06:07 +0100 Subject: [PATCH 09/13] fmt --- riotfile.py | 8 +++++--- tests/contrib/playwright/test_playwright_js_requests.py | 3 ++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/riotfile.py b/riotfile.py index 80e07f5a532..d56d0d31c85 100644 --- a/riotfile.py +++ b/riotfile.py @@ -3371,7 +3371,8 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="profile-v2-memalloc", command="python -m tests.profiling.run pytest -v --no-cov --capture=no --benchmark-disable {cmdargs} tests/profiling/collector/test_memalloc.py", # noqa: E501 - # skipping v3.14 for now due to an unstable `lz4 ` lib issue: https://gitlab.ddbuild.io/DataDog/apm-reliability/dd-trace-py/-/jobs/1163312347 + # skipping v3.14 for now due to an unstable `lz4 ` lib issue: + # https://gitlab.ddbuild.io/DataDog/apm-reliability/dd-trace-py/-/jobs/1163312347 pys=select_pys(max_version="3.13"), pkgs={ "protobuf": latest, @@ -3420,14 +3421,15 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "playwright": latest, "pytest-playwright": latest, }, - command="playwright install && pytest --no-cov {cmdargs} -c /dev/null --no-ddtrace tests/contrib/playwright", + command=( + "playwright install && pytest --no-cov {cmdargs} -c /dev/null --no-ddtrace tests/contrib/playwright" + ), env={ "DD_AGENT_TRACER_URL": "9126", }, venvs=[ Venv( name="playwright-pytest", - # command="playwright install && pytest --no-cov {cmdargs} -c /dev/null --no-ddtrace tests/contrib/playwright", ), ], ), diff --git a/tests/contrib/playwright/test_playwright_js_requests.py b/tests/contrib/playwright/test_playwright_js_requests.py index 0240612bac7..e7c60aa5abb 100644 --- a/tests/contrib/playwright/test_playwright_js_requests.py +++ b/tests/contrib/playwright/test_playwright_js_requests.py @@ -123,7 +123,8 @@ def test_fetch_requests_include_tracing_headers(self, playwright): # Verify the fetch request was captured assert len(HeaderCaptureHandler.captured_headers) >= 1, ( - f"Expected at least 1 fetch request, got {len(HeaderCaptureHandler.captured_headers)}" + f"Expected at least 1 fetch request, got " + f"{len(HeaderCaptureHandler.captured_headers)}" ) # Check the fetch request headers (should be the first/only one now) From 2a11b71fc10cc8e05a9287ebd5e6857bc6696472 Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 19:13:05 +0100 Subject: [PATCH 10/13] mypy --- ddtrace/propagation/http.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ddtrace/propagation/http.py b/ddtrace/propagation/http.py index 254c7029bff..f669d4c9fb6 100644 --- a/ddtrace/propagation/http.py +++ b/ddtrace/propagation/http.py @@ -67,6 +67,9 @@ def _is_test_context(span_context=None) -> bool: is in the same trace (same trace_id), checks if it's a test trace. """ try: + if core.tracer is None: + return False + span = core.tracer.current_span() if span is None: return False From 9e2da942895cba219a67381159769661406ad527 Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 27 Nov 2025 19:36:17 +0100 Subject: [PATCH 11/13] remove sec warnings --- ddtrace/contrib/internal/playwright/patch.py | 4 ++-- ddtrace/propagation/http.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ddtrace/contrib/internal/playwright/patch.py b/ddtrace/contrib/internal/playwright/patch.py index f59eabdbdb3..51c36680517 100644 --- a/ddtrace/contrib/internal/playwright/patch.py +++ b/ddtrace/contrib/internal/playwright/patch.py @@ -126,8 +126,8 @@ def _inject_headers_handler(route, request): log.debug("Failed to inject headers in route handler: %s", e) try: route.continue_() - except Exception: - pass + except Exception as continue_error: + log.debug("Failed to continue route after header injection failure: %s", continue_error) # Install catch-all route handler context.route("**/*", _inject_headers_handler) diff --git a/ddtrace/propagation/http.py b/ddtrace/propagation/http.py index f669d4c9fb6..3c9c575c25a 100644 --- a/ddtrace/propagation/http.py +++ b/ddtrace/propagation/http.py @@ -84,9 +84,9 @@ def _is_test_context(span_context=None) -> bool: return True span = span._parent - except Exception: + except Exception as e: # If we can't access the tracer or current span, assume not in test context - pass + log.debug("Failed to check test context: %s", e) return False From 563ab63a9c7c645954c13d25562166cc12d25ec7 Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Fri, 28 Nov 2025 17:34:03 +0100 Subject: [PATCH 12/13] auto instrument playwright --- .../pytest/_auto_instrumented_integrations.py | 79 +++++++++++++++++++ ddtrace/contrib/internal/pytest/_plugin_v2.py | 44 +++++++++++ 2 files changed, 123 insertions(+) create mode 100644 ddtrace/contrib/internal/pytest/_auto_instrumented_integrations.py diff --git a/ddtrace/contrib/internal/pytest/_auto_instrumented_integrations.py b/ddtrace/contrib/internal/pytest/_auto_instrumented_integrations.py new file mode 100644 index 00000000000..8f5022374dc --- /dev/null +++ b/ddtrace/contrib/internal/pytest/_auto_instrumented_integrations.py @@ -0,0 +1,79 @@ +""" +Auto-instrumentation for pytest plugins. + +This module provides automatic detection and patching of supported +integrations when running tests with ddtrace. +""" + +import os + +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +# Plugin name constants - single source of truth +PLUGIN_PLAYWRIGHT = "playwright" +# Future plugins can be added here: +# PLUGIN_SELENIUM = "selenium" +# PLUGIN_HTTPX = "httpx" + + +def auto_instrument_playwright(config, detected_plugins): + """ + Automatically instrument Playwright if detected in the plugin set. + + This provides a zero-configuration experience for Playwright users. + Users can opt-out by setting DD_TRACE_PLAYWRIGHT_ENABLED=false. + + Args: + config: pytest Config object + detected_plugins: Set of detected plugin names to instrument + """ + # Only proceed if Playwright is in the detected plugins + if PLUGIN_PLAYWRIGHT not in detected_plugins: + return + + # Check if user explicitly disabled auto-instrumentation + if os.getenv("DD_TRACE_PLAYWRIGHT_ENABLED", "").lower() == "false": + log.debug( + "Playwright auto-instrumentation disabled via DD_TRACE_PLAYWRIGHT_ENABLED" + ) + return + + # Patch Playwright + try: + from ddtrace import patch + + # Note: The playwright integration has its own double-patch protection + # via playwright._datadog_patch flag, so we can safely call this + patch(playwright=True) + + log.debug("Enabled Playwright instrumentation") + except Exception as e: + # Don't fail tests if patching fails + log.warning("Failed to auto-instrument Playwright: %s", e, exc_info=True) + + +def auto_instrument_integrations(config, detected_plugins): + """ + Auto-instrument all supported integrations. + + This is called from pytest_configure and will automatically detect + and patch supported test integrations like Playwright. + + Note: This should only be called when --ddtrace-patch-all is NOT used, + since that already patches everything. The caller is responsible for + checking this condition. + + Args: + config: pytest Config object + detected_plugins: Set of detected plugin names to instrument + """ + # Auto-instrument Playwright + auto_instrument_playwright(config, detected_plugins) + + # Future: Add auto-instrumentation for other integrations here + # auto_instrument_selenium(config, detected_plugins) + # auto_instrument_httpx(config, detected_plugins) diff --git a/ddtrace/contrib/internal/pytest/_plugin_v2.py b/ddtrace/contrib/internal/pytest/_plugin_v2.py index 24fc6e96513..f9cf2e3ceb6 100644 --- a/ddtrace/contrib/internal/pytest/_plugin_v2.py +++ b/ddtrace/contrib/internal/pytest/_plugin_v2.py @@ -113,6 +113,33 @@ _current_coverage_collector = None +def _should_auto_instrument(config): + """ + Quick check to determine if we should attempt auto-instrumentation. + + This is called before importing the _auto_instrumentented_integrations module + to avoid import overhead when no auto-instrumentable plugins are present. + + Returns: + set: Set of detected plugin names that can be auto-instrumented, + or empty set if none found + """ + # Import plugin name constants + from ddtrace.contrib.internal.pytest._auto_instrumented_integrations import ( + PLUGIN_PLAYWRIGHT, + ) + + # List of plugins that we can auto-instrument + AUTO_INSTRUMENTABLE_PLUGINS = (PLUGIN_PLAYWRIGHT,) + + # Collect plugins that are actually present + return { + plugin_name + for plugin_name in AUTO_INSTRUMENTABLE_PLUGINS + if config.pluginmanager.hasplugin(plugin_name) + } + + def _handle_itr_should_skip(item, test_id) -> bool: """Checks whether a test should be skipped @@ -423,6 +450,23 @@ def pytest_configure(config: pytest_Config) -> None: # Main process pytest.global_worker_itr_results = 0 + # Auto-instrument supported integrations (e.g., Playwright) + # Only if --ddtrace-patch-all is not used, since that already patches everything + # Lazy-load to avoid overhead when no auto-instrumentable plugins are present + if not ( + config.getoption("ddtrace-patch-all") + or config.getini("ddtrace-patch-all") + ): + # Quick check: detect which plugins we can auto-instrument + detected_plugins = _should_auto_instrument(config) + if detected_plugins: + # Only import if we found plugins to instrument + from ddtrace.contrib.internal.pytest._auto_instrumented_integrations import ( + auto_instrument_integrations, + ) + + auto_instrument_integrations(config, detected_plugins) + else: # If the pytest ddtrace plugin is not enabled, we should disable CI Visibility, as it was enabled during # pytest_load_initial_conftests From 87d0b229f9f337b50ad40294312604f047d1fedc Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Mon, 1 Dec 2025 10:32:22 +0100 Subject: [PATCH 13/13] update docs --- ddtrace/contrib/internal/playwright/__init__.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/ddtrace/contrib/internal/playwright/__init__.py b/ddtrace/contrib/internal/playwright/__init__.py index 669e4448057..10b3f92b5da 100644 --- a/ddtrace/contrib/internal/playwright/__init__.py +++ b/ddtrace/contrib/internal/playwright/__init__.py @@ -4,15 +4,18 @@ Enabling ~~~~~~~~ -The Playwright integration is enabled by default in test contexts. Use -:func:`patch()` to enable the integration:: +The Playwright integration is **automatically enabled** when using pytest with the ``--ddtrace`` flag +if the ``pytest-playwright`` plugin is detected. No manual patching is required. + +To disable automatic instrumentation, set the environment variable:: + + DD_TRACE_PLAYWRIGHT_ENABLED=false + +For manual patching (outside of pytest contexts), use :func:`patch()`:: from ddtrace import patch patch(playwright=True) -When using pytest, the `--ddtrace-patch-all` flag is required in order for this integration to -be enabled. - Global Configuration ~~~~~~~~~~~~~~~~~~~~