From 1b0a7e8645598498dbf8c889745a85d5697fdfa9 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Sat, 26 Jul 2025 11:26:44 +0200 Subject: [PATCH 01/86] refactor: Instana module init. Signed-off-by: Paulo Vital --- src/instana/__init__.py | 43 +++++++++++++++++++---------------------- 1 file changed, 20 insertions(+), 23 deletions(-) diff --git a/src/instana/__init__.py b/src/instana/__init__.py index 7a9ec0b1..afda3ab1 100644 --- a/src/instana/__init__.py +++ b/src/instana/__init__.py @@ -11,6 +11,7 @@ """ import importlib +import importlib.util import os import sys from typing import Tuple @@ -70,7 +71,7 @@ def load(_: object) -> None: def apply_gevent_monkey_patch() -> None: from gevent import monkey - if os.environ.get("INSTANA_GEVENT_MONKEY_OPTIONS"): + if provided_options := os.environ.get("INSTANA_GEVENT_MONKEY_OPTIONS"): def short_key(k: str) -> str: return k[3:] if k.startswith("no-") else k @@ -81,12 +82,8 @@ def key_to_bool(k: str) -> bool: import inspect all_accepted_patch_all_args = inspect.getfullargspec(monkey.patch_all)[0] - provided_options = ( - os.environ.get("INSTANA_GEVENT_MONKEY_OPTIONS") - .replace(" ", "") - .replace("--", "") - .split(",") - ) + provided_options.replace(" ", "").replace("--", "").split(",") + provided_options = [ k for k in provided_options if short_key(k) in all_accepted_patch_all_args ] @@ -115,9 +112,7 @@ def get_aws_lambda_handler() -> Tuple[str, str]: handler_function = "lambda_handler" try: - handler = os.environ.get("LAMBDA_HANDLER", False) - - if handler: + if handler := os.environ.get("LAMBDA_HANDLER", None): parts = handler.split(".") handler_function = parts.pop().strip() handler_module = ".".join(parts).strip() @@ -159,13 +154,10 @@ def boot_agent() -> None: import instana.singletons # noqa: F401 - # Instrumentation + # Import & initialize instrumentation if "INSTANA_DISABLE_AUTO_INSTR" not in os.environ: - # TODO: remove the following entries as the migration of the - # instrumentation codes are finalised. - - # Import & initialize instrumentation from instana.instrumentation import ( + aio_pika, # noqa: F401 aioamqp, # noqa: F401 asyncio, # noqa: F401 cassandra, # noqa: F401 @@ -173,7 +165,6 @@ def boot_agent() -> None: couchbase, # noqa: F401 fastapi, # noqa: F401 flask, # noqa: F401 - # gevent_inst, # noqa: F401 grpcio, # noqa: F401 httpx, # noqa: F401 logging, # noqa: F401 @@ -186,11 +177,10 @@ def boot_agent() -> None: pyramid, # noqa: F401 redis, # noqa: F401 sanic, # noqa: F401 + spyne, # noqa: F401 sqlalchemy, # noqa: F401 starlette, # noqa: F401 urllib3, # noqa: F401 - spyne, # noqa: F401 - aio_pika, # noqa: F401 ) from instana.instrumentation.aiohttp import ( client as aiohttp_client, # noqa: F401 @@ -218,6 +208,8 @@ def boot_agent() -> None: server as tornado_server, # noqa: F401 ) + # from instana.instrumentation import gevent_inst # noqa: F401 + # Hooks from instana.hooks import ( hook_gunicorn, # noqa: F401 @@ -225,6 +217,14 @@ def boot_agent() -> None: ) +def _start_profiler() -> None: + """Start the Instana Auto Profile.""" + from instana.singletons import get_profiler + + if profiler := get_profiler(): + profiler.start() + + if "INSTANA_DISABLE" not in os.environ: # There are cases when sys.argv may not be defined at load time. Seems to happen in embedded Python, # and some Pipenv installs. If this is the case, it's best effort. @@ -246,12 +246,9 @@ def boot_agent() -> None: and importlib.util.find_spec("gevent") ): apply_gevent_monkey_patch() + # AutoProfile if "INSTANA_AUTOPROFILE" in os.environ: - from instana.singletons import get_profiler - - profiler = get_profiler() - if profiler: - profiler.start() + _start_profiler() boot_agent() From 1992162d650a289b5a2eba42e99cb7f6cf5c469e Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 29 Jul 2025 17:19:08 +0200 Subject: [PATCH 02/86] feat: Add Span Disabling. The span disabling allows the exclusion of specific traces or calls from tracing based on the category (technology) or type (frameworks, libraries, instrumentations) supported by the traces. This commit adds support to handle the configuration from the INSTANA_TRACING_DISABLE, INSTANA_CONFIG_PATH, and Agent-provided configuration. Signed-off-by: Paulo Vital --- src/instana/__init__.py | 5 +- src/instana/options.py | 103 +++++++++++++++++- src/instana/util/config.py | 157 +++++++++++++++++++++++++-- tests/test_options.py | 153 +++++++++++++++++++++++++- tests/test_span_disabling.py | 79 ++++++++++++++ tests/util/test_config_reader.py | 23 +++- tests/util/test_configuration-1.yaml | 5 + tests/util/test_configuration-2.yaml | 4 + 8 files changed, 510 insertions(+), 19 deletions(-) create mode 100644 tests/test_span_disabling.py diff --git a/src/instana/__init__.py b/src/instana/__init__.py index afda3ab1..6b91824d 100644 --- a/src/instana/__init__.py +++ b/src/instana/__init__.py @@ -20,6 +20,7 @@ is_autowrapt_instrumented, is_webhook_instrumented, ) +from instana.util.config import is_truthy from instana.version import VERSION __author__ = "Instana Inc." @@ -225,7 +226,9 @@ def _start_profiler() -> None: profiler.start() -if "INSTANA_DISABLE" not in os.environ: +if "INSTANA_DISABLE" not in os.environ and not is_truthy( + os.environ.get("INSTANA_TRACING_DISABLE", None) +): # There are cases when sys.argv may not be defined at load time. Seems to happen in embedded Python, # and some Pipenv installs. If this is the case, it's best effort. if ( diff --git a/src/instana/options.py b/src/instana/options.py index 356ea961..affaa266 100644 --- a/src/instana/options.py +++ b/src/instana/options.py @@ -16,12 +16,20 @@ import logging import os -from typing import Any, Dict +from typing import Any, Dict, Sequence from instana.configurator import config from instana.log import logger -from instana.util.config import (is_truthy, parse_ignored_endpoints, - parse_ignored_endpoints_from_yaml) +from instana.util.config import ( + SPAN_TYPE_TO_CATEGORY, + get_disable_trace_configurations_from_env, + get_disable_trace_configurations_from_local, + get_disable_trace_configurations_from_yaml, + is_truthy, + parse_ignored_endpoints, + parse_ignored_endpoints_from_yaml, + parse_span_disabling, +) from instana.util.runtime import determine_service_name @@ -37,6 +45,11 @@ def __init__(self, **kwds: Dict[str, Any]) -> None: self.ignore_endpoints = [] self.kafka_trace_correlation = True + # disabled_spans lists all categories and types that should be disabled + self.disabled_spans = [] + # enabled_spans lists all categories and types that should be enabled, preceding disabled_spans + self.enabled_spans = [] + self.set_trace_configurations() # Defaults @@ -75,8 +88,9 @@ def set_trace_configurations(self) -> None: ) # Check if either of the environment variables is truthy - if is_truthy(os.environ.get("INSTANA_ALLOW_EXIT_AS_ROOT", None)) or \ - is_truthy(os.environ.get("INSTANA_ALLOW_ROOT_EXIT_SPAN", None)): + if is_truthy(os.environ.get("INSTANA_ALLOW_EXIT_AS_ROOT", None)) or is_truthy( + os.environ.get("INSTANA_ALLOW_ROOT_EXIT_SPAN", None) + ): self.allow_exit_as_root = True # The priority is as follows: @@ -99,12 +113,69 @@ def set_trace_configurations(self) -> None: ) if "INSTANA_KAFKA_TRACE_CORRELATION" in os.environ: - self.kafka_trace_correlation = is_truthy(os.environ["INSTANA_KAFKA_TRACE_CORRELATION"]) + self.kafka_trace_correlation = is_truthy( + os.environ["INSTANA_KAFKA_TRACE_CORRELATION"] + ) elif isinstance(config.get("tracing"), dict) and "kafka" in config["tracing"]: self.kafka_trace_correlation = config["tracing"]["kafka"].get( "trace_correlation", True ) + self.set_disable_trace_configurations() + + def set_disable_trace_configurations(self) -> None: + disabled_spans = [] + enabled_spans = [] + + # The precedence is as follows: + # environment variables > in-code (local) config > agent config (configuration.yaml) + # For the env vars: INSTANA_TRACING_DISABLE > INSTANA_CONFIG_PATH + if "INSTANA_TRACING_DISABLE" in os.environ: + disabled_spans, enabled_spans = get_disable_trace_configurations_from_env() + elif "INSTANA_CONFIG_PATH" in os.environ: + disabled_spans, enabled_spans = get_disable_trace_configurations_from_yaml() + else: + # In-code (local) config + # The agent config (configuration.yaml) is handled in StandardOptions.set_disable_tracing() + disabled_spans, enabled_spans = ( + get_disable_trace_configurations_from_local() + ) + + self.disabled_spans.extend(disabled_spans) + self.enabled_spans.extend(enabled_spans) + + def is_span_disabled(self, category=None, span_type=None) -> bool: + """ + Check if a span is disabled based on its category and type. + + Args: + category (str): The span category (e.g., "logging", "databases") + span_type (str): The span type (e.g., "redis", "kafka") + + Returns: + bool: True if the span is disabled, False otherwise + """ + # If span_type is provided, check if it's disabled + if span_type and span_type in self.disabled_spans: + return True + + # If category is provided directly, check if it's disabled + if category and category in self.disabled_spans: + return True + + # If span_type is provided but not explicitly configured, + # check if its parent category is disabled. Also check for the precedence rules + if span_type and span_type in SPAN_TYPE_TO_CATEGORY: + parent_category = SPAN_TYPE_TO_CATEGORY[span_type] + if ( + parent_category in self.disabled_spans + and span_type not in self.enabled_spans + ): + return True + + # Default: not disabled + return False + class StandardOptions(BaseOptions): """The options class used when running directly on a host/node with an Instana agent""" @@ -177,6 +248,26 @@ def set_tracing(self, tracing: Dict[str, Any]) -> None: if "extra-http-headers" in tracing: self.extra_http_headers = tracing["extra-http-headers"] + # Handle span disabling configuration + if "disable" in tracing: + self.set_disable_tracing(tracing["disable"]) + + def set_disable_tracing(self, tracing_config: Sequence[Dict[str, Any]]) -> None: + # The precedence is as follows: + # environment variables > in-code (local) config > agent config (configuration.yaml) + if ( + "INSTANA_TRACING_DISABLE" not in os.environ + and "INSTANA_CONFIG_PATH" not in os.environ + and not ( + isinstance(config.get("tracing"), dict) + and "disable" in config["tracing"] + ) + ): + # agent config (configuration.yaml) + disabled_spans, enabled_spans = parse_span_disabling(tracing_config) + self.disabled_spans.extend(disabled_spans) + self.enabled_spans.extend(enabled_spans) + def set_from(self, res_data: Dict[str, Any]) -> None: """ Set the source identifiers given to use by the Instana Host agent. diff --git a/src/instana/util/config.py b/src/instana/util/config.py index 887c2292..6cc1e109 100644 --- a/src/instana/util/config.py +++ b/src/instana/util/config.py @@ -2,11 +2,43 @@ import itertools import os -from typing import Any, Dict, List, Union +from typing import Any, Dict, List, Sequence, Tuple, Union +from instana.configurator import config from instana.log import logger from instana.util.config_reader import ConfigReader +# List of supported span categories (technology or protocol) +SPAN_CATEGORIES = [ + "logging", + "databases", + "messaging", + "protocols", # http, grpc, etc. +] + +# Mapping of span type calls (framework, library name, instrumentation name) to categories +SPAN_TYPE_TO_CATEGORY = { + # Database types + "redis": "databases", + "mysql": "databases", + "postgresql": "databases", + "mongodb": "databases", + "cassandra": "databases", + "couchbase": "databases", + "dynamodb": "databases", + "sqlalchemy": "databases", + # Messaging types + "kafka": "messaging", + "rabbitmq": "messaging", + "pika": "messaging", + "aio_pika": "messaging", + "aioamqp": "messaging", + # Protocol types + "http": "protocols", + "grpc": "protocols", + "graphql": "protocols", +} + def parse_service_pair(pair: str) -> List[str]: """ @@ -151,10 +183,10 @@ def parse_ignored_endpoints_from_yaml(file_path: str) -> List[str]: def is_truthy(value: Any) -> bool: """ Check if a value is truthy, accepting various formats. - + @param value: The value to check @return: True if the value is considered truthy, False otherwise - + Accepts the following as True: - True (Python boolean) - "True", "true" (case-insensitive string) @@ -163,17 +195,128 @@ def is_truthy(value: Any) -> bool: """ if value is None: return False - + if isinstance(value, bool): return value - + if isinstance(value, int): return value == 1 - + if isinstance(value, str): value_lower = value.lower() return value_lower == "true" or value == "1" - + return False + +def parse_span_disabling( + disable_list: Sequence[Union[str, Dict[str, Any]]], +) -> Tuple[List[str], List[str]]: + """ + Process a list of span disabling configurations and return lists of disabled and enabled spans. + + @param disable_list: List of span disabling configurations + @return: Tuple of (disabled_spans, enabled_spans) + """ + if not isinstance(disable_list, list): + logger.debug( + f"parse_span_disabling: Invalid disable_list type: {type(disable_list)}" + ) + return [], [] + + disabled_spans = [] + enabled_spans = [] + + for item in disable_list: + if isinstance(item, str): + disabled = parse_span_disabling_str(item) + disabled_spans.extend(disabled) + elif isinstance(item, dict): + disabled, enabled = parse_span_disabling_dict(item) + disabled_spans.extend(disabled) + enabled_spans.extend(enabled) + else: + logger.debug( + f"parse_span_disabling: Invalid disable_list item type: {type(item)}" + ) + + return disabled_spans, enabled_spans + + +def parse_span_disabling_str(item: str) -> List[str]: + """ + Process a string span disabling configuration and return a list of disabled spans. + + @param item: String span disabling configuration + @return: List of disabled spans + """ + if item.lower() in SPAN_CATEGORIES or item.lower() in SPAN_TYPE_TO_CATEGORY.keys(): + return [item.lower()] + else: + logger.debug(f"set_span_disabling_str: Invalid span category/type: {item}") + return [] + + +def parse_span_disabling_dict(items: Dict[str, bool]) -> Tuple[List[str], List[str]]: + """ + Process a dictionary span disabling configuration and return lists of disabled and enabled spans. + + @param items: Dictionary span disabling configuration + @return: Tuple of (disabled_spans, enabled_spans) + """ + disabled_spans = [] + enabled_spans = [] + + for key, value in items.items(): + if key in SPAN_CATEGORIES or key in SPAN_TYPE_TO_CATEGORY.keys(): + if is_truthy(value): + disabled_spans.append(key) + else: + enabled_spans.append(key) + else: + logger.debug(f"set_span_disabling_dict: Invalid span category/type: {key}") + + return disabled_spans, enabled_spans + + +def get_disable_trace_configurations_from_env() -> Tuple[List[str], List[str]]: + # Read INSTANA_TRACING_DISABLE environment variable + if tracing_disable := os.environ.get("INSTANA_TRACING_DISABLE", None): + if is_truthy(tracing_disable): + # INSTANA_TRACING_DISABLE is True/true/1, then we disable all tracing + disabled_spans = [] + for category in SPAN_CATEGORIES: + disabled_spans.append(category) + return disabled_spans, [] + else: + # INSTANA_TRACING_DISABLE is a comma-separated list of span categories/types + tracing_disable_list = [x.strip() for x in tracing_disable.split(",")] + return parse_span_disabling(tracing_disable_list) + return [], [] + + +def get_disable_trace_configurations_from_yaml() -> Tuple[List[str], List[str]]: + config_reader = ConfigReader(os.environ.get("INSTANA_CONFIG_PATH", "")) + + if "tracing" in config_reader.data: + root_key = "tracing" + elif "com.instana.tracing" in config_reader.data: + logger.warning( + 'Please use "tracing" instead of "com.instana.tracing" for local configuration file.' + ) + root_key = "com.instana.tracing" + else: + return [], [] + + tracing_disable_config = config_reader.data[root_key].get("disable", "") + return parse_span_disabling(tracing_disable_config) + + +def get_disable_trace_configurations_from_local() -> Tuple[List[str], List[str]]: + if "tracing" in config: + if tracing_disable_config := config["tracing"].get("disable", None): + return parse_span_disabling(tracing_disable_config) + return [], [] + + # Made with Bob diff --git a/tests/test_options.py b/tests/test_options.py index a2130c38..4c3d3869 100644 --- a/tests/test_options.py +++ b/tests/test_options.py @@ -4,8 +4,8 @@ import os from typing import Generator -from mock import patch import pytest +from mock import patch from instana.configurator import config from instana.options import ( @@ -41,6 +41,8 @@ def test_base_options(self) -> None: assert self.base_options.secrets_matcher == "contains-ignore-case" assert self.base_options.secrets_list == ["key", "pass", "secret"] assert not self.base_options.secrets + assert self.base_options.disabled_spans == [] + assert self.base_options.enabled_spans == [] def test_base_options_with_config(self) -> None: config["tracing"] = { @@ -62,6 +64,7 @@ def test_base_options_with_config(self) -> None: "INSTANA_EXTRA_HTTP_HEADERS": "SOMETHING;HERE", "INSTANA_IGNORE_ENDPOINTS": "service1;service2:method1,method2", "INSTANA_SECRETS": "secret1:username,password", + "INSTANA_TRACING_DISABLE": "logging, redis,kafka", }, ) def test_base_options_with_env_vars(self) -> None: @@ -80,6 +83,11 @@ def test_base_options_with_env_vars(self) -> None: assert self.base_options.secrets_matcher == "secret1" assert self.base_options.secrets_list == ["username", "password"] + assert "logging" in self.base_options.disabled_spans + assert "redis" in self.base_options.disabled_spans + assert "kafka" in self.base_options.disabled_spans + assert len(self.base_options.enabled_spans) == 0 + @patch.dict( os.environ, {"INSTANA_IGNORE_ENDPOINTS_PATH": "tests/util/test_configuration-1.yaml"}, @@ -108,17 +116,29 @@ def test_base_options_with_endpoint_file(self) -> None: "INSTANA_IGNORE_ENDPOINTS": "env_service1;env_service2:method1,method2", "INSTANA_KAFKA_TRACE_CORRELATION": "false", "INSTANA_IGNORE_ENDPOINTS_PATH": "tests/util/test_configuration-1.yaml", + "INSTANA_TRACING_DISABLE": "logging,redis, kafka", }, ) def test_set_trace_configurations_by_env_variable(self) -> None: # The priority is as follows: # environment variables > in-code configuration > # > agent config (configuration.yaml) > default value + + # in-code configuration + config["tracing"] = {} config["tracing"]["ignore_endpoints"] = ( "config_service1;config_service2:method1,method2" ) config["tracing"]["kafka"] = {"trace_correlation": True} - test_tracing = {"ignore-endpoints": "service1;service2:method1,method2"} + config["tracing"]["disable"] = [{"databases": True}] + + # agent config (configuration.yaml) + test_tracing = { + "ignore-endpoints": "service1;service2:method1,method2", + "disable": [ + {"messaging": True}, + ], + } # Setting by env variable self.base_options = StandardOptions() @@ -131,6 +151,14 @@ def test_set_trace_configurations_by_env_variable(self) -> None: ] assert not self.base_options.kafka_trace_correlation + # Check disabled_spans list + assert "logging" in self.base_options.disabled_spans + assert "redis" in self.base_options.disabled_spans + assert "kafka" in self.base_options.disabled_spans + assert "databases" not in self.base_options.disabled_spans + assert "messaging" not in self.base_options.disabled_spans + assert len(self.base_options.enabled_spans) == 0 + @patch.dict( os.environ, { @@ -138,12 +166,25 @@ def test_set_trace_configurations_by_env_variable(self) -> None: "INSTANA_IGNORE_ENDPOINTS_PATH": "tests/util/test_configuration-1.yaml", }, ) - def test_set_trace_configurations_by_local_configuration_file(self) -> None: + def test_set_trace_configurations_by_in_code_configuration(self) -> None: + # The priority is as follows: + # in-code configuration > agent config (configuration.yaml) > default value + + # in-code configuration + config["tracing"] = {} config["tracing"]["ignore_endpoints"] = ( "config_service1;config_service2:method1,method2" ) config["tracing"]["kafka"] = {"trace_correlation": True} - test_tracing = {"ignore-endpoints": "service1;service2:method1,method2"} + config["tracing"]["disable"] = [{"databases": True}] + + # agent config (configuration.yaml) + test_tracing = { + "ignore-endpoints": "service1;service2:method1,method2", + "disable": [ + {"messaging": True}, + ], + } self.base_options = StandardOptions() self.base_options.set_tracing(test_tracing) @@ -163,7 +204,16 @@ def test_set_trace_configurations_by_local_configuration_file(self) -> None: "kafka.*.topic4", ] + # Check disabled_spans list + assert "databases" in self.base_options.disabled_spans + assert "logging" not in self.base_options.disabled_spans + assert "redis" not in self.base_options.disabled_spans + assert "kafka" not in self.base_options.disabled_spans + assert "messaging" not in self.base_options.disabled_spans + assert len(self.base_options.enabled_spans) == 0 + def test_set_trace_configurations_by_in_code_variable(self) -> None: + config["tracing"] = {} config["tracing"]["ignore_endpoints"] = ( "config_service1;config_service2:method1,method2" ) @@ -184,6 +234,13 @@ def test_set_trace_configurations_by_agent_configuration(self) -> None: test_tracing = { "ignore-endpoints": "service1;service2:method1,method2", "trace-correlation": True, + "disable": [ + { + "messaging": True, + "logging": True, + "kafka": False, + }, + ], } self.base_options = StandardOptions() @@ -196,12 +253,78 @@ def test_set_trace_configurations_by_agent_configuration(self) -> None: ] assert self.base_options.kafka_trace_correlation + # Check disabled_spans list + assert "databases" not in self.base_options.disabled_spans + assert "logging" in self.base_options.disabled_spans + assert "messaging" in self.base_options.disabled_spans + assert "kafka" in self.base_options.enabled_spans + def test_set_trace_configurations_by_default(self) -> None: self.base_options = StandardOptions() self.base_options.set_tracing({}) assert not self.base_options.ignore_endpoints assert self.base_options.kafka_trace_correlation + assert len(self.base_options.disabled_spans) == 0 + assert len(self.base_options.enabled_spans) == 0 + + @patch.dict( + os.environ, + {"INSTANA_TRACING_DISABLE": "true"}, + ) + def test_set_trace_configurations_disable_all_tracing(self) -> None: + self.base_options = BaseOptions() + + # All categories should be disabled + assert "logging" in self.base_options.disabled_spans + assert "databases" in self.base_options.disabled_spans + assert "messaging" in self.base_options.disabled_spans + assert "protocols" in self.base_options.disabled_spans + + # Check is_span_disabled method + assert self.base_options.is_span_disabled(category="logging") + assert self.base_options.is_span_disabled(category="databases") + assert self.base_options.is_span_disabled(span_type="redis") + + @patch.dict( + os.environ, + { + "INSTANA_CONFIG_PATH": "tests/util/test_configuration-1.yaml", + }, + ) + def test_set_trace_configurations_disable_local_yaml(self) -> None: + self.base_options = BaseOptions() + + # All categories should be disabled + assert "logging" in self.base_options.disabled_spans + assert "databases" in self.base_options.disabled_spans + assert "redis" not in self.base_options.disabled_spans + assert "redis" in self.base_options.enabled_spans + + # Check is_span_disabled method + assert self.base_options.is_span_disabled(category="logging") + assert self.base_options.is_span_disabled(category="databases") + assert not self.base_options.is_span_disabled(span_type="redis") + + def test_is_span_disabled_method(self) -> None: + self.base_options = BaseOptions() + + # Default behavior - nothing disabled + assert not self.base_options.is_span_disabled(category="logging") + assert not self.base_options.is_span_disabled(span_type="redis") + + # Disable a category + self.base_options.disabled_spans = ["databases"] + assert not self.base_options.is_span_disabled(category="logging") + assert self.base_options.is_span_disabled(category="databases") + assert self.base_options.is_span_disabled(span_type="redis") + assert self.base_options.is_span_disabled(span_type="mysql") + + # Test precedence rules + self.base_options.enabled_spans = ["redis"] + assert self.base_options.is_span_disabled(category="databases") + assert self.base_options.is_span_disabled(span_type="mysql") + assert not self.base_options.is_span_disabled(span_type="redis") class TestStandardOptions: @@ -258,6 +381,25 @@ def test_set_tracing( ) assert not self.standart_options.extra_http_headers + def test_set_tracing_with_span_disabling(self) -> None: + self.standart_options = StandardOptions() + + test_tracing = { + "disable": [{"logging": True}, {"redis": False}, {"databases": True}] + } + self.standart_options.set_tracing(test_tracing) + + # Check disabled_spans and enabled_spans lists + assert "logging" in self.standart_options.disabled_spans + assert "databases" in self.standart_options.disabled_spans + assert "redis" in self.standart_options.enabled_spans + + # Check is_span_disabled method + assert self.standart_options.is_span_disabled(category="logging") + assert self.standart_options.is_span_disabled(category="databases") + assert self.standart_options.is_span_disabled(span_type="mysql") + assert not self.standart_options.is_span_disabled(span_type="redis") + def test_set_from(self) -> None: self.standart_options = StandardOptions() test_res_data = { @@ -493,3 +635,6 @@ def test_gcr_options_with_env_vars(self) -> None: assert self.gcr_options.endpoint_proxy == {"https": "proxy1"} assert self.gcr_options.timeout == 3 assert self.gcr_options.log_level == logging.INFO + + +# Made with Bob diff --git a/tests/test_span_disabling.py b/tests/test_span_disabling.py new file mode 100644 index 00000000..e1e1cbf5 --- /dev/null +++ b/tests/test_span_disabling.py @@ -0,0 +1,79 @@ +# (c) Copyright IBM Corp. 2025 + +import pytest + +from instana.options import BaseOptions, StandardOptions +from instana.singletons import agent + + +class TestSpanDisabling: + @pytest.fixture(autouse=True) + def setup(self): + # Save original options + self.original_options = agent.options + yield + # Restore original options + agent.options = self.original_options + + def test_is_span_disabled_default(self): + options = BaseOptions() + assert not options.is_span_disabled(category="logging") + assert not options.is_span_disabled(category="databases") + assert not options.is_span_disabled(span_type="redis") + + def test_disable_category(self): + options = BaseOptions() + options.disabled_spans = ["logging"] + assert options.is_span_disabled(category="logging") + assert not options.is_span_disabled(category="databases") + + def test_disable_type(self): + options = BaseOptions() + options.disabled_spans = ["redis"] + assert options.is_span_disabled(span_type="redis") + assert not options.is_span_disabled(span_type="mysql") + + def test_type_category_relationship(self): + options = BaseOptions() + options.disabled_spans = ["databases"] + assert options.is_span_disabled(span_type="redis") + assert options.is_span_disabled(span_type="mysql") + + def test_precedence_rules(self): + options = BaseOptions() + options.disabled_spans = ["databases"] + options.enabled_spans = ["redis"] + assert options.is_span_disabled(category="databases") + assert options.is_span_disabled(span_type="mysql") + assert not options.is_span_disabled(span_type="redis") + + @pytest.mark.parametrize("value", ["True", "true", "1"]) + def test_env_var_disable_all(self, value, monkeypatch): + monkeypatch.setenv("INSTANA_TRACING_DISABLE", value) + options = BaseOptions() + assert options.is_span_disabled(category="logging") is True + assert options.is_span_disabled(category="databases") is True + assert options.is_span_disabled(category="messaging") is True + assert options.is_span_disabled(category="protocols") is True + + def test_env_var_disable_specific(self, monkeypatch): + monkeypatch.setenv("INSTANA_TRACING_DISABLE", "logging, redis") + options = BaseOptions() + assert options.is_span_disabled(category="logging") is True + assert options.is_span_disabled(category="databases") is False + assert options.is_span_disabled(span_type="redis") is True + assert options.is_span_disabled(span_type="mysql") is False + + def test_yaml_config(self): + options = StandardOptions() + tracing_config = { + "disable": [{"logging": True}, {"redis": False}, {"databases": True}] + } + options.set_tracing(tracing_config) + assert options.is_span_disabled(category="logging") + assert options.is_span_disabled(category="databases") + assert options.is_span_disabled(span_type="mysql") + assert not options.is_span_disabled(span_type="redis") + + +# Made with Bob diff --git a/tests/util/test_config_reader.py b/tests/util/test_config_reader.py index b9bb063d..7957efd4 100644 --- a/tests/util/test_config_reader.py +++ b/tests/util/test_config_reader.py @@ -1,10 +1,14 @@ # (c) Copyright IBM Corp. 2025 import logging +import os import pytest -from instana.util.config import parse_ignored_endpoints_from_yaml +from instana.util.config import ( + get_disable_trace_configurations_from_yaml, + parse_ignored_endpoints_from_yaml, +) class TestConfigReader: @@ -32,6 +36,14 @@ def test_load_configuration_with_tracing( "kafka.*.topic4", ] + os.environ["INSTANA_CONFIG_PATH"] = "tests/util/test_configuration-1.yaml" + disabled_spans, enabled_spans = get_disable_trace_configurations_from_yaml() + # Check disabled_spans list + assert "logging" in disabled_spans + assert "databases" in disabled_spans + assert "redis" not in disabled_spans + assert "redis" in enabled_spans + assert ( 'Please use "tracing" instead of "com.instana.tracing" for local configuration file.' not in caplog.messages @@ -58,6 +70,15 @@ def test_load_configuration_legacy(self, caplog: pytest.LogCaptureFixture) -> No "kafka.*.span-topic", "kafka.*.topic4", ] + + os.environ["INSTANA_CONFIG_PATH"] = "tests/util/test_configuration-2.yaml" + disabled_spans, enabled_spans = get_disable_trace_configurations_from_yaml() + # Check disabled_spans list + assert "logging" in disabled_spans + assert "databases" in disabled_spans + assert "redis" not in disabled_spans + assert "redis" in enabled_spans + assert ( 'Please use "tracing" instead of "com.instana.tracing" for local configuration file.' in caplog.messages diff --git a/tests/util/test_configuration-1.yaml b/tests/util/test_configuration-1.yaml index af890a35..ac61d362 100644 --- a/tests/util/test_configuration-1.yaml +++ b/tests/util/test_configuration-1.yaml @@ -17,3 +17,8 @@ tracing: endpoints: ["span-topic", "topic4"] # - methods: ["consume", "send"] # endpoints: ["*"] # Applied to all topics + disable: + - "logging": true + - "databases": true + - "redis": false + \ No newline at end of file diff --git a/tests/util/test_configuration-2.yaml b/tests/util/test_configuration-2.yaml index b418cd55..5ed83ec1 100644 --- a/tests/util/test_configuration-2.yaml +++ b/tests/util/test_configuration-2.yaml @@ -18,3 +18,7 @@ com.instana.tracing: endpoints: ["span-topic", "topic4"] # - methods: ["consume", "send"] # endpoints: ["*"] # Applied to all topics + disable: + - "logging": true + - "databases": true + - "redis": false From d4b2149e8e25dae0b7096816c1625697da6fd5b0 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 29 Jul 2025 18:10:14 +0200 Subject: [PATCH 03/86] feat: Add support to disable log collection. Disabling log spans collection at the tracer level to prevent duplication in the backend when both the application tracer and an OpenTelemetry collector are running on the same system. Signed-off-by: Paulo Vital --- src/instana/instrumentation/logging.py | 13 ++++- tests/clients/test_logging.py | 77 +++++++++++++++++++++++++- 2 files changed, 86 insertions(+), 4 deletions(-) diff --git a/src/instana/instrumentation/logging.py b/src/instana/instrumentation/logging.py index 9bb58885..8bc9acd6 100644 --- a/src/instana/instrumentation/logging.py +++ b/src/instana/instrumentation/logging.py @@ -10,6 +10,7 @@ import wrapt from instana.log import logger +from instana.singletons import agent from instana.util.runtime import get_runtime_env_info from instana.util.traceutils import get_tracer_tuple, tracing_is_off @@ -27,12 +28,18 @@ def log_with_instana( # We take into consideration if `stacklevel` is already present in `kwargs`. # This prevents the error `_log() got multiple values for keyword argument 'stacklevel'` - stacklevel_in = kwargs.pop("stacklevel", 1 if get_runtime_env_info()[0] not in ["ppc64le", "s390x"] else 2) + stacklevel_in = kwargs.pop( + "stacklevel", 1 if get_runtime_env_info()[0] not in ["ppc64le", "s390x"] else 2 + ) stacklevel = stacklevel_in + 1 + (sys.version_info >= (3, 14)) try: - # Only needed if we're tracing and serious log - if tracing_is_off() or argv[0] < logging.WARN: + # Only needed if we're tracing and serious log and logging spans are not disabled + if ( + tracing_is_off() + or argv[0] < logging.WARN + or agent.options.is_span_disabled(category="logging") + ): return wrapped(*argv, **kwargs, stacklevel=stacklevel) tracer, parent_span, _ = get_tracer_tuple() diff --git a/tests/clients/test_logging.py b/tests/clients/test_logging.py index e924ac1c..0fa5d2dc 100644 --- a/tests/clients/test_logging.py +++ b/tests/clients/test_logging.py @@ -70,7 +70,7 @@ def test_parameters(self) -> None: try: a = 42 b = 0 - c = a / b + c = a / b # noqa: F841 except Exception as e: self.logger.exception("Exception: %s", str(e)) @@ -168,3 +168,78 @@ def main(): assert spans[0].k is SpanKind.CLIENT assert spans[0].data["log"].get("message") == "foo bar" + + +class TestLoggingDisabling: + @pytest.fixture(autouse=True) + def _resource(self) -> Generator[None, None, None]: + # Setup + self.recorder = tracer.span_processor + self.recorder.clear_spans() + self.logger = logging.getLogger("unit test") + + # Save original options + self.original_options = agent.options + + yield + + # Teardown + agent.options = self.original_options + agent.options.allow_exit_as_root = False + + def test_logging_enabled(self) -> None: + with tracer.start_as_current_span("test"): + self.logger.warning("test message") + + spans = self.recorder.queued_spans() + assert len(spans) == 2 + assert spans[0].k is SpanKind.CLIENT + assert spans[0].data["log"].get("message") == "test message" + + def test_logging_disabled(self) -> None: + # Disable logging spans + agent.options.disabled_spans = ["logging"] + + with tracer.start_as_current_span("test"): + self.logger.warning("test message") + + spans = self.recorder.queued_spans() + assert len(spans) == 1 # Only the parent span, no logging span + + def test_logging_disabled_via_env_var(self, monkeypatch): + # Disable logging spans via environment variable + monkeypatch.setenv("INSTANA_TRACING_DISABLE", "logging") + + # Create new options to read from environment + original_options = agent.options + agent.options = type(original_options)() + + with tracer.start_as_current_span("test"): + self.logger.warning("test message") + + spans = self.recorder.queued_spans() + assert len(spans) == 1 # Only the parent span, no logging span + + # Restore original options + agent.options = original_options + + def test_logging_disabled_via_yaml(self) -> None: + # Disable logging spans via YAML configuration + original_options = agent.options + agent.options = type(original_options)() + + # Simulate YAML configuration + tracing_config = {"disable": [{"logging": True}]} + agent.options.set_tracing(tracing_config) + + with tracer.start_as_current_span("test"): + self.logger.warning("test message") + + spans = self.recorder.queued_spans() + assert len(spans) == 1 # Only the parent span, no logging span + + # Restore original options + agent.options = original_options + + +# Made with Bob From 5f7b80cbc55f6ba2d8646dee9fc268a1081228ec Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 29 Jul 2025 23:58:59 +0200 Subject: [PATCH 04/86] refactor: ConfigReader Signed-off-by: Paulo Vital --- src/instana/util/config_reader.py | 19 +++++---- tests/util/test_config_reader.py | 64 +++++++++++++++++++++++++++++-- 2 files changed, 73 insertions(+), 10 deletions(-) diff --git a/src/instana/util/config_reader.py b/src/instana/util/config_reader.py index ddec31ec..87b5f8c1 100644 --- a/src/instana/util/config_reader.py +++ b/src/instana/util/config_reader.py @@ -1,15 +1,18 @@ # (c) Copyright IBM Corp. 2025 -from typing import Union -from instana.log import logger import yaml +from instana.log import logger + class ConfigReader: - def __init__(self, file_path: Union[str]) -> None: + def __init__(self, file_path: str) -> None: self.file_path = file_path - self.data = None - self.load_file() + self.data = {} + if file_path: + self.load_file() + else: + logger.warning("ConfigReader: No configuration file specified") def load_file(self) -> None: """Loads and parses the YAML file""" @@ -17,6 +20,8 @@ def load_file(self) -> None: with open(self.file_path, "r") as file: self.data = yaml.safe_load(file) except FileNotFoundError: - logger.error(f"Configuration file has not found: {self.file_path}") + logger.error( + f"ConfigReader: Configuration file has not found: {self.file_path}" + ) except yaml.YAMLError as e: - logger.error(f"Error parsing YAML file: {e}") + logger.error(f"ConfigReader: Error parsing YAML file: {e}") diff --git a/tests/util/test_config_reader.py b/tests/util/test_config_reader.py index 7957efd4..c5753f8e 100644 --- a/tests/util/test_config_reader.py +++ b/tests/util/test_config_reader.py @@ -2,19 +2,77 @@ import logging import os +from typing import TYPE_CHECKING, Generator import pytest +from yaml import YAMLError from instana.util.config import ( get_disable_trace_configurations_from_yaml, parse_ignored_endpoints_from_yaml, ) +from instana.util.config_reader import ConfigReader + +if TYPE_CHECKING: + from pytest import LogCaptureFixture + from pytest_mock import MockerFixture class TestConfigReader: - def test_load_configuration_with_tracing( - self, caplog: pytest.LogCaptureFixture + @pytest.fixture(autouse=True) + def _resource( + self, + caplog: "LogCaptureFixture", + ) -> Generator[None, None, None]: + yield + caplog.clear() + if "INSTANA_CONFIG_PATH" in os.environ: + os.environ.pop("INSTANA_CONFIG_PATH") + + def test_config_reader_null(self, caplog: "LogCaptureFixture") -> None: + config_reader = ConfigReader(os.environ.get("INSTANA_CONFIG_PATH", "")) + assert config_reader.file_path == "" + assert config_reader.data == {} + assert "ConfigReader: No configuration file specified" in caplog.messages + + def test_config_reader_default(self) -> None: + filename = "tests/util/test_configuration-1.yaml" + os.environ["INSTANA_CONFIG_PATH"] = filename + config_reader = ConfigReader(os.environ.get("INSTANA_CONFIG_PATH", "")) + assert config_reader.file_path == filename + assert "tracing" in config_reader.data + assert len(config_reader.data["tracing"]) == 2 + + def test_config_reader_file_not_found_error( + self, caplog: "LogCaptureFixture" ) -> None: + filename = "tests/util/test_configuration-3.yaml" + os.environ["INSTANA_CONFIG_PATH"] = filename + config_reader = ConfigReader(os.environ.get("INSTANA_CONFIG_PATH", "")) + assert config_reader.file_path == filename + assert config_reader.data == {} + assert ( + f"ConfigReader: Configuration file has not found: {filename}" + in caplog.messages + ) + + def test_config_reader_yaml_error( + self, caplog: "LogCaptureFixture", mocker: "MockerFixture" + ) -> None: + filename = "tests/util/test_configuration-1.yaml" + exception_message = "BLAH" + mocker.patch( + "instana.util.config_reader.yaml.safe_load", + side_effect=YAMLError(exception_message), + ) + + config_reader = ConfigReader(filename) # noqa: F841 + assert ( + f"ConfigReader: Error parsing YAML file: {exception_message}" + in caplog.messages + ) + + def test_load_configuration_with_tracing(self, caplog: "LogCaptureFixture") -> None: caplog.set_level(logging.DEBUG, logger="instana") ignore_endpoints = parse_ignored_endpoints_from_yaml( @@ -49,7 +107,7 @@ def test_load_configuration_with_tracing( not in caplog.messages ) - def test_load_configuration_legacy(self, caplog: pytest.LogCaptureFixture) -> None: + def test_load_configuration_legacy(self, caplog: "LogCaptureFixture") -> None: caplog.set_level(logging.DEBUG, logger="instana") ignore_endpoints = parse_ignored_endpoints_from_yaml( From 4beed5f78390e929ae5daec0f3d96d1c3ae989ff Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Wed, 30 Jul 2025 12:21:20 +0200 Subject: [PATCH 05/86] refactor: Remove legacy `INSTANA_DISABLE` handling. The `INSTANA_DISABLE` is not a general environment variable from Instana, and with the adoption of the new `INSTANA_TRACING_DISABLE` we can have the same effect. So this commit removes the legacy one. Signed-off-by: Paulo Vital --- src/instana/__init__.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/src/instana/__init__.py b/src/instana/__init__.py index 6b91824d..7add8c29 100644 --- a/src/instana/__init__.py +++ b/src/instana/__init__.py @@ -11,9 +11,9 @@ """ import importlib -import importlib.util import os import sys +from importlib import util as importlib_util from typing import Tuple from instana.collector.helpers.runtime import ( @@ -226,9 +226,15 @@ def _start_profiler() -> None: profiler.start() -if "INSTANA_DISABLE" not in os.environ and not is_truthy( - os.environ.get("INSTANA_TRACING_DISABLE", None) -): +if "INSTANA_DISABLE" in os.environ: # pragma: no cover + import warnings + + message = "Instana: The INSTANA_DISABLE environment variable is deprecated. Please use INSTANA_TRACING_DISABLE=True instead." + warnings.simplefilter("always") + warnings.warn(message, DeprecationWarning) + + +if not is_truthy(os.environ.get("INSTANA_TRACING_DISABLE", None)): # There are cases when sys.argv may not be defined at load time. Seems to happen in embedded Python, # and some Pipenv installs. If this is the case, it's best effort. if ( @@ -246,7 +252,7 @@ def _start_profiler() -> None: if ( (is_autowrapt_instrumented() or is_webhook_instrumented()) and "INSTANA_DISABLE_AUTO_INSTR" not in os.environ - and importlib.util.find_spec("gevent") + and importlib_util.find_spec("gevent") ): apply_gevent_monkey_patch() From b6e50d19809f7d916c237d3901af91c5260ce6d3 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Thu, 31 Jul 2025 11:33:44 +0530 Subject: [PATCH 06/86] Remove `six` from project dependencies Signed-off-by: Varsha GS --- pyproject.toml | 1 - src/instana/span/base_span.py | 5 ++--- tests/clients/test_google-cloud-pubsub.py | 7 +++---- tests/clients/test_google-cloud-storage.py | 2 +- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 19ca2507..bcd86863 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,6 @@ dependencies = [ "autowrapt>=1.0", "fysom>=2.1.2", "requests>=2.6.0", - "six>=1.12.0", "urllib3>=1.26.5", "opentelemetry-api>=1.27.0", "opentelemetry-semantic-conventions>=0.48b0", diff --git a/src/instana/span/base_span.py b/src/instana/span/base_span.py index 0d8491c2..b0c58080 100644 --- a/src/instana/span/base_span.py +++ b/src/instana/span/base_span.py @@ -1,7 +1,6 @@ # (c) Copyright IBM Corp. 2024 from typing import TYPE_CHECKING, Type -import six from instana.log import logger from instana.util import DictionaryOfStan @@ -83,12 +82,12 @@ def _validate_attribute(self, key, value): try: # Attribute keys must be some type of text or string type - if isinstance(key, (six.text_type, six.string_types)): + if isinstance(key, str): validated_key = key[0:1024] # Max key length of 1024 characters if isinstance( value, - (bool, float, int, list, dict, six.text_type, six.string_types), + (bool, float, int, list, dict, str), ): validated_value = value else: diff --git a/tests/clients/test_google-cloud-pubsub.py b/tests/clients/test_google-cloud-pubsub.py index 678fc64d..db262e70 100644 --- a/tests/clients/test_google-cloud-pubsub.py +++ b/tests/clients/test_google-cloud-pubsub.py @@ -7,7 +7,6 @@ from typing import Generator import pytest -import six from google.api_core.exceptions import AlreadyExists from google.cloud.pubsub_v1 import PublisherClient, SubscriberClient from google.cloud.pubsub_v1.publisher import exceptions @@ -51,7 +50,7 @@ def test_publish(self) -> None: ) time.sleep(2.0) # for sanity result = future.result() - assert isinstance(result, six.string_types) + assert isinstance(result, str) spans = self.recorder.queued_spans() gcps_span, test_span = spans[0], spans[1] @@ -80,7 +79,7 @@ def test_publish_as_root_exit_span(self) -> None: ) time.sleep(2.0) # for sanity result = future.result() - assert isinstance(result, six.string_types) + assert isinstance(result, str) spans = self.recorder.queued_spans() assert len(spans) == 1 @@ -161,7 +160,7 @@ def test_subscribe(self) -> None: future = self.publisher.publish( self.topic_path, b"Test Message to PubSub", origin="instana" ) - assert isinstance(future.result(), six.string_types) + assert isinstance(future.result(), str) time.sleep(2.0) # for sanity diff --git a/tests/clients/test_google-cloud-storage.py b/tests/clients/test_google-cloud-storage.py index 15ce2e22..51b560ba 100644 --- a/tests/clients/test_google-cloud-storage.py +++ b/tests/clients/test_google-cloud-storage.py @@ -14,7 +14,7 @@ from opentelemetry.trace import SpanKind from mock import patch, Mock -from six.moves import http_client +from http import client as http_client from google.cloud import storage from google.api_core import iam, page_iterator From d0b43f192b149a16ae58919a7d5a4e76f7209bb6 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 1 Aug 2025 10:05:17 +0200 Subject: [PATCH 07/86] chore(version): Bump version to 3.7.0 Signed-off-by: Paulo Vital --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index b28f22de..a72121ca 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.6.0" +VERSION = "3.7.0" From 3aa22f5af17b51443ae2e61a08d7e99abcc3d5c3 Mon Sep 17 00:00:00 2001 From: Michael Honaker Date: Wed, 30 Jul 2025 12:09:14 -0400 Subject: [PATCH 08/86] Update AWS wrapper to not hide exceptions Signed-off-by: Michael Honaker --- src/instana/instrumentation/aws/s3.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/instana/instrumentation/aws/s3.py b/src/instana/instrumentation/aws/s3.py index 932d902a..7e237957 100644 --- a/src/instana/instrumentation/aws/s3.py +++ b/src/instana/instrumentation/aws/s3.py @@ -2,6 +2,7 @@ # (c) Copyright Instana Inc. 2020 try: + import contextlib from typing import TYPE_CHECKING, Any, Callable, Dict, Sequence, Type from instana.span_context import SpanContext @@ -57,16 +58,21 @@ def collect_s3_injected_attributes( with tracer.start_as_current_span("s3", span_context=parent_context) as span: try: span.set_attribute("s3.op", operations[wrapped.__name__]) - if wrapped.__name__ in ["download_file", "download_fileobj"]: - span.set_attribute("s3.bucket", args[0]) - else: - span.set_attribute("s3.bucket", args[1]) + # Suppress key/index errors to all the function to still happen + with contextlib.suppress(IndexError, KeyError): + if "Bucket" in kwargs: + span.set_attribute("s3.bucket", kwargs["Bucket"]) + elif wrapped.__name__ in ["download_file", "download_fileobj"]: + span.set_attribute("s3.bucket", args[0]) + else: + span.set_attribute("s3.bucket", args[1]) return wrapped(*args, **kwargs) except Exception as exc: span.record_exception(exc) logger.debug( "collect_s3_injected_attributes: collect error", exc_info=True ) + raise exc for method in [ "upload_file", From ee42830adc78e9a32d1a99fc2c9328e22014707a Mon Sep 17 00:00:00 2001 From: Michael Honaker Date: Thu, 31 Jul 2025 18:57:02 -0400 Subject: [PATCH 09/86] Cleanup exception handling code Signed-off-by: Michael Honaker --- src/instana/instrumentation/aws/s3.py | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/instana/instrumentation/aws/s3.py b/src/instana/instrumentation/aws/s3.py index 7e237957..2b7fc30f 100644 --- a/src/instana/instrumentation/aws/s3.py +++ b/src/instana/instrumentation/aws/s3.py @@ -2,7 +2,6 @@ # (c) Copyright Instana Inc. 2020 try: - import contextlib from typing import TYPE_CHECKING, Any, Callable, Dict, Sequence, Type from instana.span_context import SpanContext @@ -58,21 +57,19 @@ def collect_s3_injected_attributes( with tracer.start_as_current_span("s3", span_context=parent_context) as span: try: span.set_attribute("s3.op", operations[wrapped.__name__]) - # Suppress key/index errors to all the function to still happen - with contextlib.suppress(IndexError, KeyError): - if "Bucket" in kwargs: - span.set_attribute("s3.bucket", kwargs["Bucket"]) - elif wrapped.__name__ in ["download_file", "download_fileobj"]: - span.set_attribute("s3.bucket", args[0]) - else: - span.set_attribute("s3.bucket", args[1]) - return wrapped(*args, **kwargs) + if "Bucket" in kwargs: + span.set_attribute("s3.bucket", kwargs["Bucket"]) + elif wrapped.__name__ in ["download_file", "download_fileobj"]: + span.set_attribute("s3.bucket", args[0]) + else: + span.set_attribute("s3.bucket", args[1]) except Exception as exc: span.record_exception(exc) logger.debug( "collect_s3_injected_attributes: collect error", exc_info=True ) - raise exc + + return wrapped(*args, **kwargs) for method in [ "upload_file", From 1541e9ffb844580fb495df8e768347b4afdccf0a Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Fri, 1 Aug 2025 13:42:45 +0530 Subject: [PATCH 10/86] tests(s3): Add new testcase to verify boto3.resource().Bucket().upload_fileobj() Signed-off-by: Varsha GS --- tests/clients/boto3/test_boto3_s3.py | 127 +++++++++++++++++---------- 1 file changed, 81 insertions(+), 46 deletions(-) diff --git a/tests/clients/boto3/test_boto3_s3.py b/tests/clients/boto3/test_boto3_s3.py index b772ab42..d20b51cd 100644 --- a/tests/clients/boto3/test_boto3_s3.py +++ b/tests/clients/boto3/test_boto3_s3.py @@ -2,10 +2,12 @@ # (c) Copyright Instana Inc. 2020 import os +from io import BytesIO + import pytest +import boto3 from typing import Generator from moto import mock_aws -import boto3 from instana.singletons import tracer, agent from tests.helpers import get_first_span_by_filter @@ -18,13 +20,18 @@ class TestS3: + @classmethod + def setup_class(cls) -> None: + cls.bucket_name = "aws_bucket_name" + cls.object_name = "aws_key_name" + cls.recorder = tracer.span_processor + cls.mock = mock_aws() + @pytest.fixture(autouse=True) def _resource(self) -> Generator[None, None, None]: """Setup and Teardown""" # Clear all spans before a test run - self.recorder = tracer.span_processor self.recorder.clear_spans() - self.mock = mock_aws() self.mock.start() self.s3 = boto3.client("s3", region_name="us-east-1") yield @@ -33,19 +40,19 @@ def _resource(self) -> Generator[None, None, None]: agent.options.allow_exit_as_root = False def test_vanilla_create_bucket(self) -> None: - self.s3.create_bucket(Bucket="aws_bucket_name") + self.s3.create_bucket(Bucket=self.bucket_name) result = self.s3.list_buckets() assert len(result["Buckets"]) == 1 - assert result["Buckets"][0]["Name"] == "aws_bucket_name" + assert result["Buckets"][0]["Name"] == self.bucket_name def test_s3_create_bucket(self) -> None: with tracer.start_as_current_span("test"): - self.s3.create_bucket(Bucket="aws_bucket_name") + self.s3.create_bucket(Bucket=self.bucket_name) result = self.s3.list_buckets() assert len(result["Buckets"]) == 1 - assert result["Buckets"][0]["Name"] == "aws_bucket_name" + assert result["Buckets"][0]["Name"] == self.bucket_name spans = self.recorder.queued_spans() assert len(spans) == 2 @@ -65,11 +72,11 @@ def test_s3_create_bucket(self) -> None: assert not s3_span.ec assert s3_span.data["s3"]["op"] == "CreateBucket" - assert s3_span.data["s3"]["bucket"] == "aws_bucket_name" + assert s3_span.data["s3"]["bucket"] == self.bucket_name def test_s3_create_bucket_as_root_exit_span(self) -> None: agent.options.allow_exit_as_root = True - self.s3.create_bucket(Bucket="aws_bucket_name") + self.s3.create_bucket(Bucket=self.bucket_name) agent.options.allow_exit_as_root = False self.s3.list_buckets() @@ -83,7 +90,7 @@ def test_s3_create_bucket_as_root_exit_span(self) -> None: assert not s3_span.ec assert s3_span.data["s3"]["op"] == "CreateBucket" - assert s3_span.data["s3"]["bucket"] == "aws_bucket_name" + assert s3_span.data["s3"]["bucket"] == self.bucket_name def test_s3_list_buckets(self) -> None: with tracer.start_as_current_span("test"): @@ -113,21 +120,15 @@ def test_s3_list_buckets(self) -> None: assert not s3_span.data["s3"]["bucket"] def test_s3_vanilla_upload_file(self) -> None: - object_name = "aws_key_name" - bucket_name = "aws_bucket_name" - - self.s3.create_bucket(Bucket=bucket_name) - result = self.s3.upload_file(upload_filename, bucket_name, object_name) + self.s3.create_bucket(Bucket=self.bucket_name) + result = self.s3.upload_file(upload_filename, self.bucket_name, self.object_name) assert not result def test_s3_upload_file(self) -> None: - object_name = "aws_key_name" - bucket_name = "aws_bucket_name" - - self.s3.create_bucket(Bucket=bucket_name) + self.s3.create_bucket(Bucket=self.bucket_name) with tracer.start_as_current_span("test"): - self.s3.upload_file(upload_filename, bucket_name, object_name) + self.s3.upload_file(upload_filename, self.bucket_name, self.object_name) spans = self.recorder.queued_spans() assert len(spans) == 2 @@ -147,17 +148,14 @@ def test_s3_upload_file(self) -> None: assert not s3_span.ec assert s3_span.data["s3"]["op"] == "UploadFile" - assert s3_span.data["s3"]["bucket"] == "aws_bucket_name" + assert s3_span.data["s3"]["bucket"] == self.bucket_name def test_s3_upload_file_obj(self) -> None: - object_name = "aws_key_name" - bucket_name = "aws_bucket_name" - - self.s3.create_bucket(Bucket=bucket_name) + self.s3.create_bucket(Bucket=self.bucket_name) with tracer.start_as_current_span("test"): with open(upload_filename, "rb") as fd: - self.s3.upload_fileobj(fd, bucket_name, object_name) + self.s3.upload_fileobj(fd, self.bucket_name, self.object_name) spans = self.recorder.queued_spans() assert len(spans) == 2 @@ -177,17 +175,14 @@ def test_s3_upload_file_obj(self) -> None: assert not s3_span.ec assert s3_span.data["s3"]["op"] == "UploadFileObj" - assert s3_span.data["s3"]["bucket"] == "aws_bucket_name" + assert s3_span.data["s3"]["bucket"] == self.bucket_name def test_s3_download_file(self) -> None: - object_name = "aws_key_name" - bucket_name = "aws_bucket_name" - - self.s3.create_bucket(Bucket=bucket_name) - self.s3.upload_file(upload_filename, bucket_name, object_name) + self.s3.create_bucket(Bucket=self.bucket_name) + self.s3.upload_file(upload_filename, self.bucket_name, self.object_name) with tracer.start_as_current_span("test"): - self.s3.download_file(bucket_name, object_name, download_target_filename) + self.s3.download_file(self.bucket_name, self.object_name, download_target_filename) spans = self.recorder.queued_spans() assert len(spans) == 2 @@ -207,18 +202,15 @@ def test_s3_download_file(self) -> None: assert not s3_span.ec assert s3_span.data["s3"]["op"] == "DownloadFile" - assert s3_span.data["s3"]["bucket"] == "aws_bucket_name" + assert s3_span.data["s3"]["bucket"] == self.bucket_name def test_s3_download_file_obj(self) -> None: - object_name = "aws_key_name" - bucket_name = "aws_bucket_name" - - self.s3.create_bucket(Bucket=bucket_name) - self.s3.upload_file(upload_filename, bucket_name, object_name) + self.s3.create_bucket(Bucket=self.bucket_name) + self.s3.upload_file(upload_filename, self.bucket_name, self.object_name) with tracer.start_as_current_span("test"): with open(download_target_filename, "wb") as fd: - self.s3.download_fileobj(bucket_name, object_name, fd) + self.s3.download_fileobj(self.bucket_name, self.object_name, fd) spans = self.recorder.queued_spans() assert len(spans) == 2 @@ -238,15 +230,13 @@ def test_s3_download_file_obj(self) -> None: assert not s3_span.ec assert s3_span.data["s3"]["op"] == "DownloadFileObj" - assert s3_span.data["s3"]["bucket"] == "aws_bucket_name" + assert s3_span.data["s3"]["bucket"] == self.bucket_name def test_s3_list_obj(self) -> None: - bucket_name = "aws_bucket_name" - - self.s3.create_bucket(Bucket=bucket_name) + self.s3.create_bucket(Bucket=self.bucket_name) with tracer.start_as_current_span("test"): - self.s3.list_objects(Bucket=bucket_name) + self.s3.list_objects(Bucket=self.bucket_name) spans = self.recorder.queued_spans() assert len(spans) == 2 @@ -266,4 +256,49 @@ def test_s3_list_obj(self) -> None: assert not s3_span.ec assert s3_span.data["s3"]["op"] == "ListObjects" - assert s3_span.data["s3"]["bucket"] == "aws_bucket_name" + assert s3_span.data["s3"]["bucket"] == self.bucket_name + + def test_s3_resource_bucket_upload_fileobj(self) -> None: + """ + Verify boto3.resource().Bucket().upload_fileobj() works correctly with BytesIO objects + """ + test_data = b"somedata" + + # Create a bucket using the client first + self.s3.create_bucket(Bucket=self.bucket_name) + + s3_resource = boto3.resource( + "s3", + region_name="us-east-1" + ) + bucket = s3_resource.Bucket(name=self.bucket_name) + + with tracer.start_as_current_span("test"): + bucket.upload_fileobj(BytesIO(test_data), self.object_name) + + # Verify the upload was successful by retrieving the object + response = bucket.Object(self.object_name).get() + file_content = response["Body"].read() + + # Assert the content matches what we uploaded + assert file_content == test_data + + # Verify the spans were created correctly + spans = self.recorder.queued_spans() + assert len(spans) >= 2 + + filter = lambda span: span.n == "sdk" # noqa: E731 + test_span = get_first_span_by_filter(spans, filter) + assert test_span + + filter = lambda span: span.n == "s3" and span.data["s3"]["op"] == "UploadFileObj" # noqa: E731 + s3_span = get_first_span_by_filter(spans, filter) + assert s3_span + + assert s3_span.t == test_span.t + assert s3_span.p == test_span.s + + assert not test_span.ec + assert not s3_span.ec + + assert s3_span.data["s3"]["bucket"] == self.bucket_name From 89dd1027bc4c6365f5de7130837f5e810b420f8a Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Wed, 6 Aug 2025 10:49:11 +0530 Subject: [PATCH 11/86] refactor(s3): Fix exception handling and logging Signed-off-by: Varsha GS --- src/instana/instrumentation/aws/s3.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/src/instana/instrumentation/aws/s3.py b/src/instana/instrumentation/aws/s3.py index 2b7fc30f..d13b8bff 100644 --- a/src/instana/instrumentation/aws/s3.py +++ b/src/instana/instrumentation/aws/s3.py @@ -59,17 +59,24 @@ def collect_s3_injected_attributes( span.set_attribute("s3.op", operations[wrapped.__name__]) if "Bucket" in kwargs: span.set_attribute("s3.bucket", kwargs["Bucket"]) - elif wrapped.__name__ in ["download_file", "download_fileobj"]: - span.set_attribute("s3.bucket", args[0]) - else: - span.set_attribute("s3.bucket", args[1]) + elif len(args) > 1: + if wrapped.__name__ in ["download_file", "download_fileobj"]: + span.set_attribute("s3.bucket", args[0]) + else: + span.set_attribute("s3.bucket", args[1]) + except Exception: + logger.debug( + f"collect_s3_injected_attributes collect error: {wrapped.__name__}", exc_info=True + ) + + try: + return wrapped(*args, **kwargs) except Exception as exc: span.record_exception(exc) logger.debug( - "collect_s3_injected_attributes: collect error", exc_info=True + f"collect_s3_injected_attributes error: {wrapped.__name__}", exc_info=True ) - - return wrapped(*args, **kwargs) + raise for method in [ "upload_file", From d13442a20467e463f053248f366b7df03218bb2b Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 8 Aug 2025 08:22:25 +0200 Subject: [PATCH 12/86] chore(version): Bump version to 3.7.1 Signed-off-by: Paulo Vital --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index a72121ca..29c92543 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.7.0" +VERSION = "3.7.1" From 425ae70bfa9862c0918f73571893e61650296812 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 1 Aug 2025 16:07:35 +0200 Subject: [PATCH 13/86] chore: Update Slack announcement GH Action. Signed-off-by: Paulo Vital --- .github/scripts/announce_release_on_slack.py | 92 +++++++++++++++++++ .../release-notification-on-slack.yml | 71 ++++++++++---- bin/announce_release_on_slack.py | 73 --------------- 3 files changed, 143 insertions(+), 93 deletions(-) create mode 100755 .github/scripts/announce_release_on_slack.py delete mode 100755 bin/announce_release_on_slack.py diff --git a/.github/scripts/announce_release_on_slack.py b/.github/scripts/announce_release_on_slack.py new file mode 100755 index 00000000..a54ac6de --- /dev/null +++ b/.github/scripts/announce_release_on_slack.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 + +import logging +import os +import sys + +import httpx +from github import Github + + +def ensure_environment_variables_are_present() -> None: + required_env_vars = ( + "GITHUB_RELEASE_TAG", + "GITHUB_TOKEN", + "SLACK_TOKEN", + "SLACK_SERVICE", + "SLACK_TEAM", + ) + + for env_var in required_env_vars: + if env_var not in os.environ: + logging.fatal(f"❌ A required environment variable is missing: {env_var}") + sys.exit(1) + + +def get_gh_release_info_text_with_token(release_tag: str, access_token: str) -> str: + gh = Github(access_token) + repo_name = "instana/python-sensor" + repo = gh.get_repo(repo_name) + release = repo.get_release(release_tag) + + logging.info("GH Release fetched successfully %s", release) + + msg = ( + f":mega: Oyez! Oyez! Oyez!\n" + f":package: A new version of the Python Tracer has been released.\n" + f"Name: Instana Python Tracer {release.title}\n" + f"Tag: {release.tag_name}\n" + f"Created at: {release.created_at}\n" + f"Published at: {release.published_at}\n" + f"{release.body}\n" + ) + + logging.info(msg) + return msg + + +def post_on_slack_channel( + slack_team: str, slack_service: str, slack_token: str, message_text: str +) -> None: + """Send a message to Slack channel.""" + + url = ( + f"https://hooks.slack.com/services/T%7Bslack_team%7D/B%7Bslack_service%7D/%7Bslack_token%7D" + ) + + headers = { + "Content-Type": "application/json", + } + body = {"text": message_text} + + with httpx.Client() as client: + response = client.post(url, headers=headers, json=body) + response.raise_for_status() + + result = response.text + if "ok" in result: + print("✅ Slack message sent successfully") + else: + print(f"❌ Slack API error: {result}") + + +def main() -> None: + # Setting this globally to DEBUG will also debug PyGithub, + # which will produce even more log output + logging.basicConfig(level=logging.INFO) + ensure_environment_variables_are_present() + + msg = get_gh_release_info_text_with_token( + os.environ["GITHUB_RELEASE_TAG"], os.environ["GITHUB_TOKEN"] + ) + + post_on_slack_channel( + os.environ["SLACK_TEAM"], + os.environ["SLACK_SERVICE"], + os.environ["SLACK_TOKEN"], + msg, + ) + + +if __name__ == "__main__": + main() diff --git a/.github/workflows/release-notification-on-slack.yml b/.github/workflows/release-notification-on-slack.yml index 186466e1..3952dc8e 100644 --- a/.github/workflows/release-notification-on-slack.yml +++ b/.github/workflows/release-notification-on-slack.yml @@ -9,26 +9,57 @@ on: # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#release release: - types: [published] + types: [published, released] jobs: - build: - name: Slack Post + notify-slack: runs-on: ubuntu-latest + steps: - - name: 'Checkout the needed file only ./bin/announce_release_on_slack.py' - uses: actions/checkout@v3 - - run: | - if [[ ${{ github.event_name == 'workflow_dispatch' }} == true ]]; then - export GITHUB_RELEASE_TAG=${{ inputs.github_ref }} - else # release event - export GITHUB_RELEASE_TAG=$(basename ${GITHUB_REF}) - fi - echo "New release published ${GITHUB_RELEASE_TAG}" - pip3 install PyGithub - echo $PWD - ls -lah - ./bin/announce_release_on_slack.py - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - SLACK_CHANNEL_ID_RELEASES: ${{ secrets.SLACK_CHANNEL_ID_RELEASES }} + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Fetch all history to access commit messages + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.13' + + - name: Install dependencies + run: | + pip install httpx PyGithub + + # Set environment variables safely + - name: Set event name + id: set-event-name + env: + EVENT_NAME: ${{ github.event_name }} + run: echo "EVENT_NAME=$EVENT_NAME" >> $GITHUB_ENV + + # Handle workflow_dispatch event + - name: Set GitHub ref for workflow dispatch + if: ${{ github.event_name == 'workflow_dispatch' }} + env: + INPUT_REF: ${{ inputs.github_ref }} + run: echo "GITHUB_RELEASE_TAG=$INPUT_REF" >> $GITHUB_ENV + + # Handle release event + - name: Set GitHub ref for release event + if: ${{ github.event_name != 'workflow_dispatch' }} + env: + GH_REF: ${{ github.ref }} + run: | + REF_NAME=$(basename "$GH_REF") + echo "GITHUB_RELEASE_TAG=$REF_NAME" >> $GITHUB_ENV + + # Send notification using the safely set environment variables + - name: Send Slack notification + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SLACK_TOKEN: ${{ secrets.RUPY_TRACER_RELEASES_TOKEN }} + SLACK_SERVICE: ${{ secrets.RUPY_TRACER_RELEASES_CHANNEL_ID }} + SLACK_TEAM: ${{ secrets.RUPY_TOWN_CRIER_SERVICE_ID }} + run: | + echo "New release published ${GITHUB_RELEASE_TAG}" + python .github/scripts/announce_release_on_slack.py + \ No newline at end of file diff --git a/bin/announce_release_on_slack.py b/bin/announce_release_on_slack.py deleted file mode 100755 index 2c6625dd..00000000 --- a/bin/announce_release_on_slack.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python3 - -import json -import logging -import os -import requests -import sys - -from github import Github - - -def ensure_environment_variables_are_present(): - required_env_vars = ('GITHUB_RELEASE_TAG', 'GITHUB_TOKEN', - 'SLACK_BOT_TOKEN', 'SLACK_CHANNEL_ID_RELEASES') - - for v in required_env_vars: - if not os.environ.get(v): - logging.fatal("A required environment variable is missing: %s", v) - sys.exit(1) - - -def get_gh_release_info_text_with_token(release_tag, access_token): - g = Github(access_token) - repo_name = "instana/python-sensor" - repo = g.get_repo(repo_name) - release = repo.get_release(release_tag) - - logging.info("GH Release fetched successfully %s", release) - - msg = ( - f":mega: :package: A new version is released in {repo_name}\n" - f"Name: {release.title}\n" - f"Tag: {release.tag_name}\n" - f"Created at: {release.created_at}\n" - f"Published at: {release.published_at}\n" - f"{release.body}\n") - - logging.info(msg) - return msg - - -def post_on_slack_channel(slack_token, slack_channel_id, message_text): - api_url = "https://slack.com/api/chat.postMessage" - - headers = {"Authorization": f"Bearer {slack_token}", - "Content-Type": "application/json"} - body = {"channel": slack_channel_id, "text": message_text} - - response = requests.post(api_url, headers=headers, data=json.dumps(body)) - response_data = json.loads(response.text) - - if response_data["ok"]: - logging.info("Message sent successfully!") - else: - logging.fatal("Error sending message: %s", response_data['error']) - - -def main(): - # Setting this globally to DEBUG will also debug PyGithub, - # which will produce even more log output - logging.basicConfig(level=logging.INFO) - ensure_environment_variables_are_present() - - msg = get_gh_release_info_text_with_token(os.environ['GITHUB_RELEASE_TAG'], - os.environ['GITHUB_TOKEN']) - - post_on_slack_channel(os.environ['SLACK_BOT_TOKEN'], - os.environ['SLACK_CHANNEL_ID_RELEASES'], - msg) - - -if __name__ == "__main__": - main() From 232b381be4a512988b5b9393828cf6dc4a46a1aa Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Wed, 6 Aug 2025 22:42:07 +0200 Subject: [PATCH 14/86] chore: New PR announcement GH Action. Add a new GH Action to announce to specific Slack channel every PR that is opened, reopened, and review_requested. Signed-off-by: Paulo Vital --- .github/scripts/announce_pr_on_slack.py | 153 ++++++++++++++++++ .../opened-pr-notification-on-slack.yml | 41 +++++ 2 files changed, 194 insertions(+) create mode 100644 .github/scripts/announce_pr_on_slack.py create mode 100644 .github/workflows/opened-pr-notification-on-slack.yml diff --git a/.github/scripts/announce_pr_on_slack.py b/.github/scripts/announce_pr_on_slack.py new file mode 100644 index 00000000..71ae75f2 --- /dev/null +++ b/.github/scripts/announce_pr_on_slack.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 +""" +GitHub Actions script to send Slack notifications for new pull requests. +""" + +import os +import sys +from typing import Tuple + +import httpx + + +def send_slack_message( + slack_team: str, slack_service: str, slack_token: str, message: str +) -> bool: + """Send a message to Slack channel.""" + + url = ( + f"https://hooks.slack.com/services/T%7Bslack_team%7D/B%7Bslack_service%7D/%7Bslack_token%7D" + ) + + headers = { + "Content-Type": "application/json", + } + + data = {"text": message} + + try: + with httpx.Client() as client: + response = client.post(url, headers=headers, json=data) + response.raise_for_status() + + result = response.text + if "ok" in result: + print("✅ Slack message sent successfully") + return True + else: + print(f"❌ Slack API error: {result}") + return False + + except httpx.HTTPError as e: + print(f"❌ Request error: {e}") + return False + + +def ensure_environment_variables_are_present() -> ( + Tuple[str, str, str, str, str, str, str, str] +): + """ + Ensures that all necessary environment variables are present for the application to run. + + This function checks for the presence of required environment variables related to Slack bot token, + Pull Request (PR) details, and repository name. It also validates that the Slack channel is set. + + Raises: + SystemExit: If any of the required environment variables are missing. + + Returns: + A tuple containing the values of the following environment variables: + - SLACK_TOKEN: The token for the Slack bot. + - SLACK_TEAM: The ID of the Slack team. + - SLACK_SERVICE: The ID of the Slack service. + - PR_NUMBER: The number of the Pull Request. + - PR_TITLE: The title of the Pull Request. + - PR_URL: The URL of the Pull Request. + - PR_AUTHOR: The author of the Pull Request. + - REPO_NAME: The name of the repository. + """ + # Get environment variables + slack_token = os.getenv("SLACK_TOKEN") + slack_team = os.getenv("SLACK_TEAM") + slack_service = os.getenv("SLACK_SERVICE") + pr_number = os.getenv("PR_NUMBER") + pr_title = os.getenv("PR_TITLE") + pr_url = os.getenv("PR_URL") + pr_author = os.getenv("PR_AUTHOR") + repo_name = os.getenv("REPO_NAME") + + # Validate required environment variables + if not slack_token: + print("❌ SLACK_TOKEN environment variable is required") + sys.exit(1) + + if not slack_team: + print("❌ SLACK_TEAM environment variable is required") + sys.exit(1) + + if not slack_service: + print("❌ SLACK_SERVICE environment variable is required") + sys.exit(1) + + if not all([pr_number, pr_title, pr_url, pr_author, repo_name]): + print( + "❌ Missing required PR information (PR_NUMBER, PR_TITLE, PR_URL, PR_AUTHOR, REPO_NAME)" + ) + sys.exit(1) + + # Since we're validating these variables, we can assert they're not None + assert pr_number is not None + assert pr_title is not None + assert pr_url is not None + assert pr_author is not None + assert repo_name is not None + + return ( + slack_token, + slack_team, + slack_service, + pr_number, + pr_title, + pr_url, + pr_author, + repo_name, + ) + + +def main() -> None: + """Main function to process PR and send Slack notification.""" + + ( + slack_token, + slack_team, + slack_service, + pr_number, + pr_title, + pr_url, + pr_author, + repo_name, + ) = ensure_environment_variables_are_present() + + print(f"Processing PR #{pr_number}") + + # Create Slack message + message = ( + f":mega: Oyez! Oyez! Oyez!\n" + f"Hello Team. Please, review the opened PR #{pr_number} in {repo_name}\n" + f"*{pr_title}* by @{pr_author}\n" + f":pull-request-opened: {pr_url}" + ) + + # Send to Slack + success = send_slack_message(slack_service, slack_team, slack_token, message) + + if not success: + sys.exit(1) + + print("✅ Process completed successfully") + + +if __name__ == "__main__": + main() + +# Made with Bob diff --git a/.github/workflows/opened-pr-notification-on-slack.yml b/.github/workflows/opened-pr-notification-on-slack.yml new file mode 100644 index 00000000..33b23d7f --- /dev/null +++ b/.github/workflows/opened-pr-notification-on-slack.yml @@ -0,0 +1,41 @@ +name: PR Slack Notification + +permissions: + contents: read + pull-requests: read + +on: + pull_request: + types: [opened, reopened, review_requested] + +jobs: + notify-slack: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Fetch all history to access commit messages + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.13' + + - name: Install dependencies + run: | + pip install httpx + + - name: Send Slack notification + env: + SLACK_TOKEN: ${{ secrets.RUPY_PR_ANNOUNCEMENT_TOKEN }} + SLACK_SERVICE: ${{ secrets.RUPY_PR_ANNOUNCEMENT_CHANNEL_ID }} + SLACK_TEAM: ${{ secrets.RUPY_TOWN_CRIER_SERVICE_ID }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ github.event.number }} + PR_TITLE: ${{ github.event.pull_request.title }} + PR_URL: ${{ github.event.pull_request.html_url }} + PR_AUTHOR: ${{ github.event.pull_request.user.login }} + REPO_NAME: ${{ github.repository }} + run: python .github/scripts/announce_pr_on_slack.py From b302623d3475ad8c045c8da4f89a83002b4f1043 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 8 Aug 2025 15:44:17 +0200 Subject: [PATCH 15/86] chore(ci): Enhance the release announcement message. Signed-off-by: Paulo Vital --- .github/scripts/announce_release_on_slack.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/scripts/announce_release_on_slack.py b/.github/scripts/announce_release_on_slack.py index a54ac6de..5d97cb5a 100755 --- a/.github/scripts/announce_release_on_slack.py +++ b/.github/scripts/announce_release_on_slack.py @@ -33,11 +33,10 @@ def get_gh_release_info_text_with_token(release_tag: str, access_token: str) -> msg = ( f":mega: Oyez! Oyez! Oyez!\n" - f":package: A new version of the Python Tracer has been released.\n" - f"Name: Instana Python Tracer {release.title}\n" - f"Tag: {release.tag_name}\n" - f"Created at: {release.created_at}\n" - f"Published at: {release.published_at}\n" + f"The Instana Python Tracer {release_tag} has been released.\n" + f":package: https://pypi.org/project/instana/ \n" + f":github: {release.html_url} \n" + f"**Release Notes:**\n" f"{release.body}\n" ) From ec9be1c9404cf9905941d4c7b1d8fb5b4c1045c9 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 8 Aug 2025 15:47:35 +0200 Subject: [PATCH 16/86] fix(ci): Arguments sequence to send Slack announcement. Signed-off-by: Paulo Vital --- .github/scripts/announce_pr_on_slack.py | 28 +++++++++---------- .../opened-pr-notification-on-slack.yml | 2 +- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/.github/scripts/announce_pr_on_slack.py b/.github/scripts/announce_pr_on_slack.py index 71ae75f2..8745988b 100644 --- a/.github/scripts/announce_pr_on_slack.py +++ b/.github/scripts/announce_pr_on_slack.py @@ -25,22 +25,20 @@ def send_slack_message( data = {"text": message} - try: - with httpx.Client() as client: - response = client.post(url, headers=headers, json=data) - response.raise_for_status() + ret = False + with httpx.Client() as client: + response = client.post(url, headers=headers, json=data) + response.raise_for_status() - result = response.text - if "ok" in result: - print("✅ Slack message sent successfully") - return True - else: - print(f"❌ Slack API error: {result}") - return False + result = response.text + if "ok" in result: + print("✅ Slack message sent successfully") + ret = True + else: + print(f"❌ Slack API error: {result}") + ret = False - except httpx.HTTPError as e: - print(f"❌ Request error: {e}") - return False + return ret def ensure_environment_variables_are_present() -> ( @@ -139,7 +137,7 @@ def main() -> None: ) # Send to Slack - success = send_slack_message(slack_service, slack_team, slack_token, message) + success = send_slack_message(slack_team, slack_service, slack_token, message) if not success: sys.exit(1) diff --git a/.github/workflows/opened-pr-notification-on-slack.yml b/.github/workflows/opened-pr-notification-on-slack.yml index 33b23d7f..c4723e2d 100644 --- a/.github/workflows/opened-pr-notification-on-slack.yml +++ b/.github/workflows/opened-pr-notification-on-slack.yml @@ -6,7 +6,7 @@ permissions: on: pull_request: - types: [opened, reopened, review_requested] + types: [opened, reopened] jobs: notify-slack: From eb82e76203fc10b890046edd9f06a60d8a1fd50b Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Mon, 11 Aug 2025 12:08:42 +0530 Subject: [PATCH 17/86] Release new version only on `3.x` tags that does not contain the string `post` - Do not release new version for fedramp specific code - Remove redundant check for v3 tags Signed-off-by: Varsha GS --- .github/workflows/pkg_release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pkg_release.yml b/.github/workflows/pkg_release.yml index 7e8a91e7..046dbf61 100644 --- a/.github/workflows/pkg_release.yml +++ b/.github/workflows/pkg_release.yml @@ -11,13 +11,13 @@ name: Release new version on: push: tags: - - v3.* + - 'v3.*' + - '!v3.*post*' jobs: build: name: Build package runs-on: ubuntu-latest - if: ${{ startsWith(github.ref_name, 'v3') }} steps: - uses: actions/checkout@v4 - name: Set up Python From 2d878ad668e4324bae0be5924192eb8ecb5aace4 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Tue, 12 Aug 2025 13:00:55 +0530 Subject: [PATCH 18/86] fix: stop notifying draft PRs Signed-off-by: Varsha GS --- .github/workflows/opened-pr-notification-on-slack.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/opened-pr-notification-on-slack.yml b/.github/workflows/opened-pr-notification-on-slack.yml index c4723e2d..11b392db 100644 --- a/.github/workflows/opened-pr-notification-on-slack.yml +++ b/.github/workflows/opened-pr-notification-on-slack.yml @@ -6,12 +6,13 @@ permissions: on: pull_request: - types: [opened, reopened] + types: [opened, reopened, ready_for_review] jobs: notify-slack: runs-on: ubuntu-latest - + + if: ${{ !github.event.pull_request.draft }} steps: - name: Checkout code uses: actions/checkout@v4 From b0100cc8e14d5224e9cdafd441e2a5ba5255552a Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Thu, 7 Aug 2025 16:28:50 +0530 Subject: [PATCH 19/86] fix uwsgi error with AUTOWRAPT_BOOTSTRAP Signed-off-by: Varsha GS --- src/instana/hooks/hook_uwsgi.py | 5 ++++- src/instana/util/runtime.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/instana/hooks/hook_uwsgi.py b/src/instana/hooks/hook_uwsgi.py index 6287a9f9..1995ffeb 100644 --- a/src/instana/hooks/hook_uwsgi.py +++ b/src/instana/hooks/hook_uwsgi.py @@ -44,8 +44,11 @@ def uwsgi_handle_fork() -> None: logger.debug( f"uWSGI --master={opt_master} --lazy-apps={opt_lazy_apps}: postfork hooks not applied" ) + except ImportError: logger.debug( "uwsgi hooks: decorators not available: likely not running under uWSGI" ) - pass + +except AttributeError: + logger.debug("uwsgi hooks: Running under uWSGI but decorators not available") diff --git a/src/instana/util/runtime.py b/src/instana/util/runtime.py index 832d37c2..a49cdfa3 100644 --- a/src/instana/util/runtime.py +++ b/src/instana/util/runtime.py @@ -135,7 +135,7 @@ def determine_service_name() -> str: uwsgi_type = "uWSGI worker%s" app_name = uwsgi_type % app_name - except ImportError: + except (ImportError, AttributeError): pass except Exception: logger.debug("non-fatal get_application_name: ", exc_info=True) From dddb8219b7cc8ae6edb3c73d00c979e7df77c3c3 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 15 Aug 2025 10:13:50 +0200 Subject: [PATCH 20/86] chore(version): Bump version to 3.7.2 Signed-off-by: Paulo Vital --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index 29c92543..62b8e993 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.7.1" +VERSION = "3.7.2" From b18c004df5fa2af5cb02b722cb428d2675f45286 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 15 Aug 2025 14:07:20 +0200 Subject: [PATCH 21/86] refactor(ci): Update Release announcement workflow. Make it part of the `Release new version` GH Action workflow, dependent of the `github-release` and `publish-to-pypi` job steps. This will avoid announcement of the AWS Lambda releases. Signed-off-by: Paulo Vital --- .github/workflows/pkg_release.yml | 37 +++++++++++ .../release-notification-on-slack.yml | 65 ------------------- 2 files changed, 37 insertions(+), 65 deletions(-) delete mode 100644 .github/workflows/release-notification-on-slack.yml diff --git a/.github/workflows/pkg_release.yml b/.github/workflows/pkg_release.yml index 046dbf61..bd37c54c 100644 --- a/.github/workflows/pkg_release.yml +++ b/.github/workflows/pkg_release.yml @@ -80,3 +80,40 @@ jobs: path: dist/ - name: Publish to PyPI uses: pypa/gh-action-pypi-publish@release/v1 + + notify-slack: + name: Notify on Slack + needs: + - github-release + - publish-to-pypi + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Fetch all history to access commit messages + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.13' + + - name: Install dependencies + run: | + pip install httpx PyGithub + + # Send notification using the safely set environment variables + - name: Send Slack notification + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_RELEASE_TAG: ${{ github.ref_name }} + SLACK_TOKEN: ${{ secrets.RUPY_TRACER_RELEASES_TOKEN }} + SLACK_SERVICE: ${{ secrets.RUPY_TRACER_RELEASES_CHANNEL_ID }} + SLACK_TEAM: ${{ secrets.RUPY_TOWN_CRIER_SERVICE_ID }} + run: | + echo "New release published ${GITHUB_RELEASE_TAG}" + python .github/scripts/announce_release_on_slack.py + \ No newline at end of file diff --git a/.github/workflows/release-notification-on-slack.yml b/.github/workflows/release-notification-on-slack.yml deleted file mode 100644 index 3952dc8e..00000000 --- a/.github/workflows/release-notification-on-slack.yml +++ /dev/null @@ -1,65 +0,0 @@ -name: Slack Post -on: - workflow_dispatch: # Manual trigger - inputs: - github_ref: - description: 'Manually provided value for GITHUB_RELEASE_TAG of a release' - required: true - type: string - - # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#release - release: - types: [published, released] -jobs: - notify-slack: - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Fetch all history to access commit messages - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.13' - - - name: Install dependencies - run: | - pip install httpx PyGithub - - # Set environment variables safely - - name: Set event name - id: set-event-name - env: - EVENT_NAME: ${{ github.event_name }} - run: echo "EVENT_NAME=$EVENT_NAME" >> $GITHUB_ENV - - # Handle workflow_dispatch event - - name: Set GitHub ref for workflow dispatch - if: ${{ github.event_name == 'workflow_dispatch' }} - env: - INPUT_REF: ${{ inputs.github_ref }} - run: echo "GITHUB_RELEASE_TAG=$INPUT_REF" >> $GITHUB_ENV - - # Handle release event - - name: Set GitHub ref for release event - if: ${{ github.event_name != 'workflow_dispatch' }} - env: - GH_REF: ${{ github.ref }} - run: | - REF_NAME=$(basename "$GH_REF") - echo "GITHUB_RELEASE_TAG=$REF_NAME" >> $GITHUB_ENV - - # Send notification using the safely set environment variables - - name: Send Slack notification - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SLACK_TOKEN: ${{ secrets.RUPY_TRACER_RELEASES_TOKEN }} - SLACK_SERVICE: ${{ secrets.RUPY_TRACER_RELEASES_CHANNEL_ID }} - SLACK_TEAM: ${{ secrets.RUPY_TOWN_CRIER_SERVICE_ID }} - run: | - echo "New release published ${GITHUB_RELEASE_TAG}" - python .github/scripts/announce_release_on_slack.py - \ No newline at end of file From 4a56ca7756b0e1f31e4c15bd0a49b3b58770301d Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 15 Aug 2025 14:09:09 +0200 Subject: [PATCH 22/86] refactor(ci): Remove GH Action for Python 3.14.0 Signed-off-by: Paulo Vital --- .github/workflows/py3140_build.yml | 58 ------------------------------ 1 file changed, 58 deletions(-) delete mode 100644 .github/workflows/py3140_build.yml diff --git a/.github/workflows/py3140_build.yml b/.github/workflows/py3140_build.yml deleted file mode 100644 index 1cff4a73..00000000 --- a/.github/workflows/py3140_build.yml +++ /dev/null @@ -1,58 +0,0 @@ -# This workflow builds a container image on top of the Python 3.14.0 RC images -# with all dependencies already compiled and installed to be used in the tests -# CI pipelines. - -name: Build Instana python-sensor-test-py3.14.0 -on: - workflow_dispatch: # Manual trigger. - schedule: - - cron: '1 0 * * 1,3' # Every Monday and Wednesday at midnight and one. -env: - IMAGE_NAME: python-sensor-test-py3.14.0 - IMAGE_TAG: latest - CONTAINER_FILE: ./Dockerfile-py3140 - IMAGE_REGISTRY: ghcr.io/${{ github.repository_owner }} - REGISTRY_USER: ${{ github.actor }} - REGISTRY_PASSWORD: ${{ github.token }} -jobs: - build-and-push: - name: Build container image. - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - steps: - - uses: actions/checkout@v4 - - - name: Build image - id: build_image - uses: redhat-actions/buildah-build@v2 - with: - image: ${{ env.IMAGE_NAME }} - tags: ${{ env.IMAGE_TAG }} - containerfiles: ${{ env.CONTAINER_FILE }} - - - name: Echo Outputs - run: | - echo "Image: ${{ steps.build_image.outputs.image }}" - echo "Tags: ${{ steps.build_image.outputs.tags }}" - echo "Tagged Image: ${{ steps.build_image.outputs.image-with-tag }}" - - - name: Check images created - run: buildah images | grep '${{ env.IMAGE_NAME }}' - - # Push the image to GHCR (Image Registry) - - name: Push To GHCR - uses: redhat-actions/push-to-registry@v2 - id: push-to-ghcr - with: - image: ${{ steps.build_image.outputs.image }} - tags: ${{ steps.build_image.outputs.tags }} - registry: ${{ env.IMAGE_REGISTRY }} - username: ${{ env.REGISTRY_USER }} - password: ${{ env.REGISTRY_PASSWORD }} - extra-args: | - --disable-content-trust - - - name: Print image URL - run: echo "Image pushed to ${{ steps.push-to-ghcr.outputs.registry-paths }}" From 5fd729b5c9d2ed3a95da840e700ca1ec06e7629d Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 19 Aug 2025 16:41:07 +0200 Subject: [PATCH 23/86] fix: Logging stacklevel for Python >= 3.14.0. Reverting commit e57ab45 as closing the final release of Python 3.14.0. Signed-off-by: Paulo Vital --- src/instana/instrumentation/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/instrumentation/logging.py b/src/instana/instrumentation/logging.py index 8bc9acd6..fdbaaa58 100644 --- a/src/instana/instrumentation/logging.py +++ b/src/instana/instrumentation/logging.py @@ -31,7 +31,7 @@ def log_with_instana( stacklevel_in = kwargs.pop( "stacklevel", 1 if get_runtime_env_info()[0] not in ["ppc64le", "s390x"] else 2 ) - stacklevel = stacklevel_in + 1 + (sys.version_info >= (3, 14)) + stacklevel = stacklevel_in + 1 try: # Only needed if we're tracing and serious log and logging spans are not disabled From 65e106e798869f9a0258a8641391ec567842f537 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Mon, 18 Aug 2025 23:59:09 +0200 Subject: [PATCH 24/86] ci: Update Python versions for Tekton testcases. Signed-off-by: Paulo Vital --- .tekton/pipeline.yaml | 93 +++++++++++++++----------- .tekton/python-tracer-prepuller.yaml | 48 +++++--------- .tekton/task.yaml | 98 +++++++++++++++++++++------- 3 files changed, 145 insertions(+), 94 deletions(-) diff --git a/.tekton/pipeline.yaml b/.tekton/pipeline.yaml index d76916c2..14cb96d4 100644 --- a/.tekton/pipeline.yaml +++ b/.tekton/pipeline.yaml @@ -6,10 +6,32 @@ spec: params: - name: revision type: string + - name: py-38-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.8-bookworm + - name: py-39-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.9-bookworm + - name: py-310-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.10-bookworm + - name: py-311-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.11-bookworm + - name: py-312-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.12-bookworm + - name: py-313-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.13-bookworm + - name: py-314-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.14.0rc2 workspaces: - name: python-tracer-ci-pipeline-pvc tasks: - name: clone + displayName: "clone $(params.revision)" params: - name: revision value: $(params.revision) @@ -19,27 +41,20 @@ spec: - name: task-pvc workspace: python-tracer-ci-pipeline-pvc - name: unittest-default - displayName: "Platforms and Browsers: $(params.platform) and $(params.browser)" + displayName: "Python $(params.imageDigest)" runAfter: - clone matrix: params: - name: imageDigest value: - # public.ecr.aws/docker/library/python:3.8.20-bookworm - - "sha256:7aa279fb41dad2962d3c915aa6f6615134baa412ab5aafa9d4384dcaaa0af15d" - # public.ecr.aws/docker/library/python:3.9.22-bookworm - - "sha256:a847112640804ed2d03bb774d46bb1619bd37862fb2b7e48eebe425a168c153b" - # public.ecr.aws/docker/library/python:3.10.17-bookworm - - "sha256:e2c7fb05741c735679b26eda7dd34575151079f8c615875fbefe401972b14d85" - # public.ecr.aws/docker/library/python:3.11.12-bookworm - - "sha256:a3e280261e448b95d49423532ccd6e5329c39d171c10df1457891ff7c5e2301b" - # public.ecr.aws/docker/library/python:3.12.10-bookworm - - "sha256:4ea730e54e2a87b716ffc58a426bd627baa182a3d4d5696d05c1bca2dde775aa" - # public.ecr.aws/docker/library/python:3.13.3-bookworm - - "sha256:07bf1bd38e191e3ed18b5f3eb0006d5ab260cb8c967f49d3bf947e5c2e44d8a9" - # public.ecr.aws/docker/library/python:3.14.0b2-bookworm - - "sha256:4f8ae0a7847680b269d8ef51528053b2cfc9242377f349cbc3a36eacf579903f" + - $(params.py-38-imageDigest) + - $(params.py-39-imageDigest) + - $(params.py-310-imageDigest) + - $(params.py-311-imageDigest) + - $(params.py-312-imageDigest) + - $(params.py-313-imageDigest) + # - $(params.py-314-imageDigest) taskRef: name: python-tracer-unittest-default-task workspaces: @@ -48,12 +63,9 @@ spec: - name: unittest-cassandra runAfter: - clone - matrix: - params: - - name: imageDigest - value: - # public.ecr.aws/docker/library/python:3.9.22-bookworm - - "sha256:a847112640804ed2d03bb774d46bb1619bd37862fb2b7e48eebe425a168c153b" + params: + - name: imageDigest + value: $(params.py-312-imageDigest) taskRef: name: python-tracer-unittest-cassandra-task workspaces: @@ -62,12 +74,9 @@ spec: - name: unittest-gevent-starlette runAfter: - clone - matrix: - params: - - name: imageDigest - value: - # public.ecr.aws/docker/library/python:3.9.22-bookworm - - "sha256:a847112640804ed2d03bb774d46bb1619bd37862fb2b7e48eebe425a168c153b" + params: + - name: imageDigest + value: $(params.py-313-imageDigest) taskRef: name: python-tracer-unittest-gevent-starlette-task workspaces: @@ -76,12 +85,9 @@ spec: - name: unittest-aws runAfter: - clone - matrix: - params: - - name: imageDigest - value: - # public.ecr.aws/docker/library/python:3.12.10-bookworm - - "sha256:4ea730e54e2a87b716ffc58a426bd627baa182a3d4d5696d05c1bca2dde775aa" + params: + - name: imageDigest + value: $(params.py-313-imageDigest) taskRef: name: python-tracer-unittest-aws-task workspaces: @@ -90,14 +96,23 @@ spec: - name: unittest-kafka runAfter: - clone - matrix: - params: - - name: imageDigest - value: - # public.ecr.aws/docker/library/python:3.12.10-bookworm - - "sha256:4ea730e54e2a87b716ffc58a426bd627baa182a3d4d5696d05c1bca2dde775aa" + params: + - name: imageDigest + value: $(params.py-313-imageDigest) taskRef: name: python-tracer-unittest-kafka-task workspaces: - name: task-pvc workspace: python-tracer-ci-pipeline-pvc + - name: unittest-python-next + displayName: "Python next $(params.imageDigest)" + runAfter: + - clone + params: + - name: py-version + value: 3.14.0rc2 + taskRef: + name: python-tracer-unittest-python-next-task + workspaces: + - name: task-pvc + workspace: python-tracer-ci-pipeline-pvc diff --git a/.tekton/python-tracer-prepuller.yaml b/.tekton/python-tracer-prepuller.yaml index db1ab34c..76b0609a 100644 --- a/.tekton/python-tracer-prepuller.yaml +++ b/.tekton/python-tracer-prepuller.yaml @@ -14,68 +14,52 @@ spec: # Configure an init container for each image you want to pull initContainers: - name: prepuller-git - # public.ecr.aws/docker/library/alpine:3.20.3 - image: public.ecr.aws/docker/library/alpine@sha256:029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85 + image: public.ecr.aws/docker/library/alpine:latest command: ["sh", "-c", "'true'"] - name: prepuller-google-cloud-pubsub - # quay.io/thekevjames/gcloud-pubsub-emulator:501.0.0 - image: quay.io/thekevjames/gcloud-pubsub-emulator@sha256:9bad1f28e6a3d6cd5f462c654c736faa4cf49732d9422ddb427ad30f3037c0ff + image: quay.io/thekevjames/gcloud-pubsub-emulator:501.0.0 command: ["sh", "-c", "'true'"] - name: prepuller-cassandra - # public.ecr.aws/docker/library/cassandra:3.11.16-jammy - image: public.ecr.aws/docker/library/cassandra@sha256:b175d99b80f8108594d00c705288fdb3186b9fc07b30b4c292c3592cddb5f0b5 + image: public.ecr.aws/docker/library/cassandra:3.11.16-jammy command: ["sh", "-c", "'true'"] - name: prepuller-rabbitmq - # public.ecr.aws/docker/library/rabbitmq:3.13.0 - image: public.ecr.aws/docker/library/rabbitmq@sha256:39de1a4fc6c72d12bd5dfa23e8576536fd1c0cc8418344cd5a51addfc9a1145d + image: public.ecr.aws/docker/library/rabbitmq:3.13.0 command: ["sh", "-c", "'true'"] - name: prepuller-redis - # public.ecr.aws/docker/library/redis:7.2.4-bookworm - image: public.ecr.aws/docker/library/redis@sha256:9341b6548cc35b64a6de0085555264336e2f570e17ecff20190bf62222f2bd64 + image: public.ecr.aws/docker/library/redis:7.2.4-bookworm command: ["sh", "-c", "'true'"] - name: prepuller-mongo - # public.ecr.aws/docker/library/mongo:7.0.6 - image: public.ecr.aws/docker/library/mongo@sha256:3a023748ee30e915dd51642f1ef430c73c4e54937060054ca84c70417f510cc5 + image: public.ecr.aws/docker/library/mongo:7.0.6 command: ["sh", "-c", "'true'"] - name: prepuller-mariadb - # public.ecr.aws/docker/library/mariadb:11.3.2 - image: public.ecr.aws/docker/library/mariadb@sha256:a4a81ab6d190db84b67f286fd0511cdea619a24b63790b3db4fb69d263a5cd37 + image: public.ecr.aws/docker/library/mariadb:11.3.2 command: ["sh", "-c", "'true'"] - name: prepuller-postgres - # public.ecr.aws/docker/library/postgres:16.2-bookworm - image: public.ecr.aws/docker/library/postgres@sha256:07572430dbcd821f9f978899c3ab3a727f5029be9298a41662e1b5404d5b73e0 + image: public.ecr.aws/docker/library/postgres:16.2-bookworm command: ["sh", "-c", "'true'"] - name: prepuller-kafka - # public.ecr.aws/bitnami/kafka:3.9.0 - image: public.ecr.aws/docker/library/kafka@sha256:d2890d68f96b36da3c8413fa94294f018b2f95d87cf108cbf71eab510572d9be + image: public.ecr.aws/bitnami/kafka:3.9.0 command: ["sh", "-c", "'true'"] - name: prepuller-38 - # public.ecr.aws/docker/library/python:3.8.20-bookworm - image: public.ecr.aws/docker/library/python@ + image: public.ecr.aws/docker/library/python:3.8-bookworm command: ["sh", "-c", "'true'"] - name: prepuller-39 - # public.ecr.aws/docker/library/python:3.9.22-bookworm - image: public.ecr.aws/docker/library/python@sha256:a847112640804ed2d03bb774d46bb1619bd37862fb2b7e48eebe425a168c153b + image: public.ecr.aws/docker/library/python:3.9-bookworm command: ["sh", "-c", "'true'"] - name: prepuller-310 - # public.ecr.aws/docker/library/python:3.10.17-bookworm - image: public.ecr.aws/docker/library/python@sha256:e2c7fb05741c735679b26eda7dd34575151079f8c615875fbefe401972b14d85 + image: public.ecr.aws/docker/library/python:3.10-bookworm command: ["sh", "-c", "'true'"] - name: prepuller-311 - # public.ecr.aws/docker/library/python:3.11.12-bookworm - image: public.ecr.aws/docker/library/python@sha256:a3e280261e448b95d49423532ccd6e5329c39d171c10df1457891ff7c5e2301b + image: public.ecr.aws/docker/library/python:3.11-bookworm command: ["sh", "-c", "'true'"] - name: prepuller-312 - # public.ecr.aws/docker/library/python:3.12.10-bookworm - image: public.ecr.aws/docker/library/python@sha256:4ea730e54e2a87b716ffc58a426bd627baa182a3d4d5696d05c1bca2dde775aa + image: public.ecr.aws/docker/library/python:3.12-bookworm command: ["sh", "-c", "'true'"] - name: prepuller-313 - # public.ecr.aws/docker/library/python:3.13.3-bookworm - image: public.ecr.aws/docker/library/python@sha256:07bf1bd38e191e3ed18b5f3eb0006d5ab260cb8c967f49d3bf947e5c2e44d8a9 + image: public.ecr.aws/docker/library/python:3.13-bookworm command: ["sh", "-c", "'true'"] - name: prepuller-314 - # public.ecr.aws/docker/library/python:3.14.0b2-bookworm - image: public.ecr.aws/docker/library/python@sha256:4f8ae0a7847680b269d8ef51528053b2cfc9242377f349cbc3a36eacf579903f + image: public.ecr.aws/docker/library/python:3.14.0rc2 command: ["sh", "-c", "'true'"] # Use the pause container to ensure the Pod goes into a `Running` phase diff --git a/.tekton/task.yaml b/.tekton/task.yaml index b68593bf..e5d79c92 100644 --- a/.tekton/task.yaml +++ b/.tekton/task.yaml @@ -12,8 +12,7 @@ spec: mountPath: /workspace steps: - name: clone - # public.ecr.aws/docker/library/alpine:3.20.3 - image: public.ecr.aws/docker/library/alpine@sha256:029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85 + image: public.ecr.aws/docker/library/alpine:latest script: | #!/bin/sh echo "Installing git" @@ -29,8 +28,7 @@ metadata: spec: sidecars: - name: cassandra - # public.ecr.aws/docker/library/cassandra:3.11.16-jammy - image: public.ecr.aws/docker/library/cassandra@sha256:b175d99b80f8108594d00c705288fdb3186b9fc07b30b4c292c3592cddb5f0b5 + image: public.ecr.aws/docker/library/cassandra:3.11.16-jammy env: - name: MAX_HEAP_SIZE value: 2048m @@ -51,7 +49,7 @@ spec: mountPath: /workspace steps: - name: unittest - image: public.ecr.aws/docker/library/python@$(params.imageDigest) + image: $(params.imageDigest) env: - name: TEST_CONFIGURATION value: cassandra @@ -72,7 +70,7 @@ spec: mountPath: /workspace steps: - name: unittest - image: public.ecr.aws/docker/library/python@$(params.imageDigest) + image: $(params.imageDigest) env: - name: TEST_CONFIGURATION value: gevent_starlette @@ -87,8 +85,7 @@ metadata: spec: sidecars: - name: google-cloud-pubsub - # quay.io/thekevjames/gcloud-pubsub-emulator - image: quay.io/thekevjames/gcloud-pubsub-emulator@sha256:9bad1f28e6a3d6cd5f462c654c736faa4cf49732d9422ddb427ad30f3037c0ff + image: quay.io/thekevjames/gcloud-pubsub-emulator:latest env: - name: PUBSUB_EMULATOR_HOST value: 0.0.0.0:8681 @@ -98,19 +95,16 @@ spec: - containerPort: 8681 hostPort: 8681 - name: mariadb - # public.ecr.aws/docker/library/mariadb:11.3.2 - image: public.ecr.aws/docker/library/mariadb@sha256:a4a81ab6d190db84b67f286fd0511cdea619a24b63790b3db4fb69d263a5cd37 + image: public.ecr.aws/docker/library/mariadb:11.3.2 env: - name: MYSQL_ROOT_PASSWORD # or MARIADB_ROOT_PASSWORD value: passw0rd - name: MYSQL_DATABASE # or MARIADB_DATABASE value: instana_test_db - name: mongo - # public.ecr.aws/docker/library/mongo:7.0.6 - image: public.ecr.aws/docker/library/mongo@sha256:3a023748ee30e915dd51642f1ef430c73c4e54937060054ca84c70417f510cc5 + image: public.ecr.aws/docker/library/mongo:7.0.6 - name: postgres - # public.ecr.aws/docker/library/postgres:16.2-bookworm - image: public.ecr.aws/docker/library/postgres@sha256:07572430dbcd821f9f978899c3ab3a727f5029be9298a41662e1b5404d5b73e0 + image: public.ecr.aws/docker/library/postgres:16.2-bookworm env: - name: POSTGRES_USER value: root @@ -126,11 +120,9 @@ spec: - pg_isready --host 127.0.0.1 --port 5432 --dbname=${POSTGRES_DB} timeoutSeconds: 10 - name: redis - # public.ecr.aws/docker/library/redis:7.2.4-bookworm - image: public.ecr.aws/docker/library/redis@sha256:9341b6548cc35b64a6de0085555264336e2f570e17ecff20190bf62222f2bd64 + image: public.ecr.aws/docker/library/redis:7.2.4-bookworm - name: rabbitmq - # public.ecr.aws/docker/library/rabbitmq:3.13.0 - image: public.ecr.aws/docker/library/rabbitmq@sha256:39de1a4fc6c72d12bd5dfa23e8576536fd1c0cc8418344cd5a51addfc9a1145d + image: public.ecr.aws/docker/library/rabbitmq:3.13.0 params: - name: imageDigest type: string @@ -139,7 +131,7 @@ spec: mountPath: /workspace steps: - name: unittest - image: public.ecr.aws/docker/library/python@$(params.imageDigest) + image: $(params.imageDigest) env: - name: TEST_CONFIGURATION value: default @@ -160,7 +152,7 @@ spec: mountPath: /workspace steps: - name: unittest - image: public.ecr.aws/docker/library/python@$(params.imageDigest) + image: $(params.imageDigest) env: - name: TEST_CONFIGURATION value: aws @@ -175,8 +167,7 @@ metadata: spec: sidecars: - name: kafka - # public.ecr.aws/bitnami/kafka:3.9.0 - image: public.ecr.aws/bitnami/kafka@sha256:d2890d68f96b36da3c8413fa94294f018b2f95d87cf108cbf71eab510572d9be + image: public.ecr.aws/bitnami/kafka:3.9.0 env: - name: KAFKA_CFG_NODE_ID value: "0" @@ -200,10 +191,71 @@ spec: mountPath: /workspace steps: - name: unittest - image: public.ecr.aws/docker/library/python@$(params.imageDigest) + image: $(params.imageDigest) env: - name: TEST_CONFIGURATION value: kafka workingDir: /workspace/python-sensor/ command: - /workspace/python-sensor/.tekton/run_unittests.sh +--- +apiVersion: tekton.dev/v1 +kind: Task +metadata: + name: python-tracer-unittest-python-next-task +spec: + sidecars: + - name: google-cloud-pubsub + image: quay.io/thekevjames/gcloud-pubsub-emulator:latest + env: + - name: PUBSUB_EMULATOR_HOST + value: 0.0.0.0:8681 + - name: PUBSUB_PROJECT1 + value: test-project,test-topic + ports: + - containerPort: 8681 + hostPort: 8681 + - name: mariadb + image: public.ecr.aws/docker/library/mariadb:11.3.2 + env: + - name: MYSQL_ROOT_PASSWORD # or MARIADB_ROOT_PASSWORD + value: passw0rd + - name: MYSQL_DATABASE # or MARIADB_DATABASE + value: instana_test_db + - name: mongo + image: public.ecr.aws/docker/library/mongo:7.0.6 + - name: postgres + image: public.ecr.aws/docker/library/postgres:16.2-bookworm + env: + - name: POSTGRES_USER + value: root + - name: POSTGRES_PASSWORD + value: passw0rd + - name: POSTGRES_DB + value: instana_test_db + readinessProbe: + exec: + command: + - sh + - -c + - pg_isready --host 127.0.0.1 --port 5432 --dbname=${POSTGRES_DB} + timeoutSeconds: 10 + - name: redis + image: public.ecr.aws/docker/library/redis:7.2.4-bookworm + - name: rabbitmq + image: public.ecr.aws/docker/library/rabbitmq:3.13.0 + params: + - name: py-version + type: string + workspaces: + - name: task-pvc + mountPath: /workspace + steps: + - name: unittest + image: public.ecr.aws/docker/library/python:$(params.py-version) + env: + - name: TEST_CONFIGURATION + value: default + workingDir: /workspace/python-sensor/ + command: + - /workspace/python-sensor/.tekton/run_unittests.sh From 4663e37366eae0fdcc449717ccb663a12e5b0c5e Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 19 Aug 2025 21:26:14 +0200 Subject: [PATCH 25/86] ci: Update Tekton PR pipeline. Signed-off-by: Paulo Vital --- .tekton/github-pr-pipeline.yaml.part | 24 ++++++++++++++++++++++++ .tekton/github-set-status-task.yaml | 3 +-- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/.tekton/github-pr-pipeline.yaml.part b/.tekton/github-pr-pipeline.yaml.part index 5e442b7b..e7c15930 100644 --- a/.tekton/github-pr-pipeline.yaml.part +++ b/.tekton/github-pr-pipeline.yaml.part @@ -8,6 +8,27 @@ spec: type: string - name: git-commit-sha type: string + - name: py-38-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.8-bookworm + - name: py-39-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.9-bookworm + - name: py-310-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.10-bookworm + - name: py-311-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.11-bookworm + - name: py-312-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.12-bookworm + - name: py-313-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.13-bookworm + - name: py-314-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.14.0rc2 workspaces: - name: python-tracer-ci-pipeline-pvc tasks: @@ -28,6 +49,9 @@ spec: - unittest-default - unittest-cassandra - unittest-gevent-starlette + - unittest-aws + - unittest-kafka + - unittest-python-next taskRef: kind: Task name: github-set-status diff --git a/.tekton/github-set-status-task.yaml b/.tekton/github-set-status-task.yaml index 631d234b..f7ea7b4a 100644 --- a/.tekton/github-set-status-task.yaml +++ b/.tekton/github-set-status-task.yaml @@ -14,8 +14,7 @@ spec: secretName: githubtoken steps: - name: set-status - # quay.io/curl/curl:8.11.0 - image: quay.io/curl/curl@sha256:b90c4281fe1a4c6cc2b6a665c531d448bba078d75ffa98187e7d7e530fca5209 + image: quay.io/curl/curl:latest env: - name: SHA value: $(params.SHA) From ea9c383e17a1f59ba39ae758d54c47305ebe0bc3 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 19 Aug 2025 21:43:32 +0200 Subject: [PATCH 26/86] ci: Update Tekton scheduled pipelines. Signed-off-by: Paulo Vital --- .../.currency/currency-scheduled-eventlistener.yaml | 5 ++--- .tekton/.currency/currency-tasks.yaml | 10 ++++------ .tekton/scheduled-eventlistener.yaml | 3 +-- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/.tekton/.currency/currency-scheduled-eventlistener.yaml b/.tekton/.currency/currency-scheduled-eventlistener.yaml index 8bf6e3ed..b410dc94 100644 --- a/.tekton/.currency/currency-scheduled-eventlistener.yaml +++ b/.tekton/.currency/currency-scheduled-eventlistener.yaml @@ -41,15 +41,14 @@ kind: CronJob metadata: name: python-currency-cronjob spec: - schedule: "35 0 * * Mon-Fri" + schedule: "35 1 * * Mon-Fri" jobTemplate: spec: template: spec: containers: - name: http-request-to-el-svc - # quay.io/curl/curl:8.11.0 - image: quay.io/curl/curl@sha256:b90c4281fe1a4c6cc2b6a665c531d448bba078d75ffa98187e7d7e530fca5209 + image: quay.io/curl/curl:latest imagePullPolicy: IfNotPresent args: ["curl", "-X", "POST", "--data", "{}", "el-python-currency-cron-listener.default.svc.cluster.local:8080"] restartPolicy: OnFailure diff --git a/.tekton/.currency/currency-tasks.yaml b/.tekton/.currency/currency-tasks.yaml index 46a41a35..c35a97d2 100644 --- a/.tekton/.currency/currency-tasks.yaml +++ b/.tekton/.currency/currency-tasks.yaml @@ -11,8 +11,7 @@ spec: mountPath: /workspace steps: - name: clone-repo - # public.ecr.aws/docker/library/alpine:3.20.3 - image: public.ecr.aws/docker/library/alpine@sha256:029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85 + image: public.ecr.aws/docker/library/alpine:latest script: | #!/bin/sh echo "Installing git" @@ -33,14 +32,14 @@ spec: mountPath: /workspace steps: - name: generate-currency-report - # public.ecr.aws/docker/library/python:3.12.10-bookworm - image: public.ecr.aws/docker/library/python@sha256:4ea730e54e2a87b716ffc58a426bd627baa182a3d4d5696d05c1bca2dde775aa + image: public.ecr.aws/docker/library/python:3.12-bookworm script: | #!/usr/bin/env bash cd /workspace/python-sensor/.tekton/.currency python -m venv /tmp/venv source /tmp/venv/bin/activate + pip install --upgrade pip pip install -r resources/requirements.txt python scripts/generate_report.py @@ -63,8 +62,7 @@ spec: mountPath: /workspace steps: - name: upload-currency-report - # public.ecr.aws/docker/library/alpine:3.20.3 - image: public.ecr.aws/docker/library/alpine@sha256:029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85 + image: public.ecr.aws/docker/library/alpine:latest env: - name: GH_ENTERPRISE_TOKEN valueFrom: diff --git a/.tekton/scheduled-eventlistener.yaml b/.tekton/scheduled-eventlistener.yaml index 9352fc45..5fdc3129 100644 --- a/.tekton/scheduled-eventlistener.yaml +++ b/.tekton/scheduled-eventlistener.yaml @@ -61,8 +61,7 @@ spec: spec: containers: - name: git - # public.ecr.aws/docker/library/alpine:3.20.3 - image: public.ecr.aws/docker/library/alpine@sha256:029a752048e32e843bd6defe3841186fb8d19a28dae8ec287f433bb9d6d1ad85 + image: public.ecr.aws/docker/library/alpine:latest script: | #!/bin/sh echo "Installing git" From bdc1fd391c48b9f8d382736c03c84ac22180088c Mon Sep 17 00:00:00 2001 From: Cagri Yonca Date: Sat, 23 Aug 2025 16:03:08 +0200 Subject: [PATCH 27/86] fix(urllib3): ignore internal urllib3 span creation Signed-off-by: Cagri Yonca --- src/instana/instrumentation/urllib3.py | 24 ++++++++++++++-- tests/clients/test_urllib3.py | 38 ++++++++++++++++++++++++++ 2 files changed, 60 insertions(+), 2 deletions(-) diff --git a/src/instana/instrumentation/urllib3.py b/src/instana/instrumentation/urllib3.py index 4536d2be..52d3e9c8 100644 --- a/src/instana/instrumentation/urllib3.py +++ b/src/instana/instrumentation/urllib3.py @@ -11,7 +11,11 @@ from instana.propagators.format import Format from instana.singletons import agent from instana.util.secrets import strip_secrets_from_query -from instana.util.traceutils import get_tracer_tuple, tracing_is_off, extract_custom_headers +from instana.util.traceutils import ( + get_tracer_tuple, + tracing_is_off, + extract_custom_headers, +) if TYPE_CHECKING: from instana.span.span import InstanaSpan @@ -91,7 +95,23 @@ def urlopen_with_instana( tracer, parent_span, span_name = get_tracer_tuple() # If we're not tracing, just return; boto3 has it's own visibility - if tracing_is_off() or (span_name == "boto3"): + # Also, skip creating spans for internal Instana calls when + # 'com.instana' appears in either the full URL, the path argument, + # or the connection host. + request_url_or_path = ( + kwargs.get("request_url") + or kwargs.get("url") + or (args[1] if len(args) >= 2 else "") + or "" + ) + host = getattr(instance, "host", "") or "" + + if ( + tracing_is_off() + or span_name == "boto3" + or "com.instana" in request_url_or_path + or "com.instana" in host + ): return wrapped(*args, **kwargs) parent_context = parent_span.get_span_context() if parent_span else None diff --git a/tests/clients/test_urllib3.py b/tests/clients/test_urllib3.py index 62b07d49..6c5fc318 100644 --- a/tests/clients/test_urllib3.py +++ b/tests/clients/test_urllib3.py @@ -992,3 +992,41 @@ def test_collect_kvs_exception( caplog.set_level(logging.DEBUG, logger="instana") collect_kvs({}, (), {}) assert "urllib3 _collect_kvs error: " in caplog.messages + + def test_internal_span_creation_with_url_in_hostname(self) -> None: + internal_url = "https://com.instana.example.com/api/test" + + with tracer.start_as_current_span("test"): + try: + self.http.request("GET", internal_url, retries=False, timeout=1) + except Exception: + pass + + spans = self.recorder.queued_spans() + + assert len(spans) == 1 + + test_span = spans[0] + assert test_span.data["sdk"]["name"] == "test" + + urllib3_spans = [span for span in spans if span.n == "urllib3"] + assert len(urllib3_spans) == 0 + + def test_internal_span_creation_with_url_in_path(self) -> None: + internal_url_path = "https://example.com/com.instana/api/test" + + with tracer.start_as_current_span("test"): + try: + self.http.request("GET", internal_url_path, retries=False, timeout=1) + except Exception: + pass + + spans = self.recorder.queued_spans() + + assert len(spans) == 1 + + test_span = spans[0] + assert test_span.data["sdk"]["name"] == "test" + + urllib3_spans = [span for span in spans if span.n == "urllib3"] + assert len(urllib3_spans) == 0 From 764896f8f85e48c53c1a6fa9365a02892ed0bc41 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Mon, 21 Jul 2025 15:44:48 +0200 Subject: [PATCH 28/86] style(fsm): format TheMachine and Discovery Added type annotations to the fsm.py file and used ruff (vscode) to: - Black-compatible code formatting. - fix all auto-fixable violations, like unused imports. - isort-compatible import sorting. Signed-off-by: Paulo Vital --- src/instana/fsm.py | 158 ++++++++++++++++++++++++++------------------- 1 file changed, 92 insertions(+), 66 deletions(-) diff --git a/src/instana/fsm.py b/src/instana/fsm.py index 1897cf30..11eecb8b 100644 --- a/src/instana/fsm.py +++ b/src/instana/fsm.py @@ -8,61 +8,70 @@ import subprocess import sys import threading +from typing import TYPE_CHECKING, Any, Callable, Optional from fysom import Fysom -from .log import logger -from .util import get_default_gateway -from .version import VERSION +from instana.log import logger +from instana.util import get_default_gateway +from instana.version import VERSION +if TYPE_CHECKING: + from instana.agent.host import HostAgent -class Discovery(object): - pid = 0 - name = None - args = None - fd = -1 - inode = "" - def __init__(self, **kwds): +class Discovery: + pid: int = 0 + name: Optional[str] = None + args: Optional[List[str]] = None + fd: int = -1 + inode: str = "" + + def __init__(self, **kwds: Any) -> None: self.__dict__.update(kwds) - def to_dict(self): - kvs = dict() - kvs['pid'] = self.pid - kvs['name'] = self.name - kvs['args'] = self.args - kvs['fd'] = self.fd - kvs['inode'] = self.inode + def to_dict(self) -> Dict[str, Any]: + kvs: Dict[str, Any] = dict() + kvs["pid"] = self.pid + kvs["name"] = self.name + kvs["args"] = self.args + kvs["fd"] = self.fd + kvs["inode"] = self.inode return kvs -class TheMachine(object): +class TheMachine: RETRY_PERIOD = 30 THREAD_NAME = "Instana Machine" - agent = None + agent: Optional["HostAgent"] = None fsm = None timer = None warnedPeriodic = False - def __init__(self, agent): + def __init__(self, agent: "HostAgent") -> None: logger.debug("Initializing host agent state machine") self.agent = agent - self.fsm = Fysom({ - "events": [ - ("lookup", "*", "found"), - ("announce", "found", "announced"), - ("pending", "announced", "wait4init"), - ("ready", "wait4init", "good2go")], - "callbacks": { - # Can add the following to debug - # "onchangestate": self.print_state_change, - "onlookup": self.lookup_agent_host, - "onannounce": self.announce_sensor, - "onpending": self.on_ready, - "ongood2go": self.on_good2go}}) + self.fsm = Fysom( + { + "events": [ + ("lookup", "*", "found"), + ("announce", "found", "announced"), + ("pending", "announced", "wait4init"), + ("ready", "wait4init", "good2go"), + ], + "callbacks": { + # Can add the following to debug + # "onchangestate": self.print_state_change, + "onlookup": self.lookup_agent_host, + "onannounce": self.announce_sensor, + "onpending": self.on_ready, + "ongood2go": self.on_good2go, + }, + } + ) self.timer = threading.Timer(1, self.fsm.lookup) self.timer.daemon = True @@ -70,11 +79,12 @@ def __init__(self, agent): self.timer.start() @staticmethod - def print_state_change(e): - logger.debug('========= (%i#%s) FSM event: %s, src: %s, dst: %s ==========', - os.getpid(), threading.current_thread().name, e.event, e.src, e.dst) + def print_state_change(e: Any) -> None: + logger.debug( + f"========= ({os.getpid()}#{threading.current_thread().name}) FSM event: {e.event}, src: {e.src}, dst: {e.dst} ==========" + ) - def reset(self): + def reset(self) -> None: """ reset is called to start from scratch in a process. It may be called on first boot or after a detected fork. @@ -87,7 +97,7 @@ def reset(self): logger.debug("State machine being reset. Will start a new announce cycle.") self.fsm.lookup() - def lookup_agent_host(self, e): + def lookup_agent_host(self, e: Any) -> bool: host = self.agent.options.agent_host port = self.agent.options.agent_port @@ -105,39 +115,43 @@ def lookup_agent_host(self, e): return True if self.warnedPeriodic is False: - logger.info("Instana Host Agent couldn't be found. Will retry periodically...") + logger.info( + "Instana Host Agent couldn't be found. Will retry periodically..." + ) self.warnedPeriodic = True - self.schedule_retry(self.lookup_agent_host, e, self.THREAD_NAME + ": agent_lookup") + self.schedule_retry( + self.lookup_agent_host, e, f"{self.THREAD_NAME}: agent_lookup" + ) return False - def announce_sensor(self, e): - logger.debug("Attempting to make an announcement to the agent on %s:%d", - self.agent.options.agent_host, self.agent.options.agent_port) + def announce_sensor(self, e: Any) -> bool: + logger.debug( + f"Attempting to make an announcement to the agent on {self.agent.options.agent_host}:{self.agent.options.agent_port}" + ) pid = os.getpid() try: if os.path.isfile("/proc/self/cmdline"): with open("/proc/self/cmdline") as cmd: cmdinfo = cmd.read() - cmdline = cmdinfo.split('\x00') + cmdline = cmdinfo.split("\x00") else: # Python doesn't provide a reliable method to determine what # the OS process command line may be. Here we are forced to # rely on ps rather than adding a dependency on something like # psutil which requires dev packages, gcc etc... - proc = subprocess.Popen(["ps", "-p", str(pid), "-o", "command"], - stdout=subprocess.PIPE) + proc = subprocess.Popen( + ["ps", "-p", str(pid), "-o", "command"], stdout=subprocess.PIPE + ) (out, _) = proc.communicate() - parts = out.split(b'\n') + parts = out.split(b"\n") cmdline = [parts[1].decode("utf-8")] except Exception: cmdline = sys.argv logger.debug("announce_sensor", exc_info=True) - d = Discovery(pid=self.__get_real_pid(), - name=cmdline[0], - args=cmdline[1:]) + d = Discovery(pid=self.__get_real_pid(), name=cmdline[0], args=cmdline[1:]) # If we're on a system with a procfs if os.path.exists("/proc/"): @@ -146,47 +160,56 @@ def announce_sensor(self, e): # PermissionError: [Errno 13] Permission denied: '/proc/6/fd/8' # Use a try/except as a safety sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.connect((self.agent.options.agent_host, self.agent.options.agent_port)) - path = "/proc/%d/fd/%d" % (pid, sock.fileno()) + sock.connect( + (self.agent.options.agent_host, self.agent.options.agent_port) + ) + path = f"/proc/{pid}/fd/{sock.fileno()}" d.fd = sock.fileno() d.inode = os.readlink(path) - except: + except: # noqa: E722 logger.debug("Error generating file descriptor: ", exc_info=True) payload = self.agent.announce(d) if not payload: logger.debug("Cannot announce sensor. Scheduling retry.") - self.schedule_retry(self.announce_sensor, e, self.THREAD_NAME + ": announce") + self.schedule_retry( + self.announce_sensor, e, f"{self.THREAD_NAME}: announce" + ) return False - + self.agent.set_from(payload) self.fsm.pending() - logger.debug("Announced pid: %s (true pid: %s). Waiting for Agent Ready...", - str(pid), str(self.agent.announce_data.pid)) + logger.debug( + f"Announced PID: {pid} (true PID: {self.agent.announce_data.pid}). Waiting for Agent Ready..." + ) return True - def schedule_retry(self, fun, e, name): + def schedule_retry(self, fun: Callable, e: Any, name: str) -> None: self.timer = threading.Timer(self.RETRY_PERIOD, fun, [e]) self.timer.daemon = True self.timer.name = name self.timer.start() - def on_ready(self, _): + def on_ready(self, _: Any) -> None: self.agent.start() ns_pid = str(os.getpid()) true_pid = str(self.agent.announce_data.pid) - logger.info("Instana host agent available. We're in business. Announced PID: %s (true pid: %s)", ns_pid, true_pid) + logger.info( + f"Instana host agent available. We're in business. Announced PID: {ns_pid} (true PID: {true_pid})" + ) - def on_good2go(self, _): + def on_good2go(self, _: Any) -> None: ns_pid = str(os.getpid()) true_pid = str(self.agent.announce_data.pid) - self.agent.log_message_to_host_agent("Instana Python Package %s: PID %s (true pid: %s) is now online and reporting" % (VERSION, ns_pid, true_pid)) + self.agent.log_message_to_host_agent( + f"Instana Python Package {VERSION}: PID {ns_pid} (true PID: {true_pid}) is now online and reporting" + ) - def __get_real_pid(self): + def __get_real_pid(self) -> int: """ Attempts to determine the true process ID by querying the /proc//sched file. This works on systems with a proc filesystem. @@ -195,14 +218,14 @@ def __get_real_pid(self): pid = None if os.path.exists("/proc/"): - sched_file = "/proc/%d/sched" % os.getpid() + sched_file = f"/proc/{os.getpid()}/sched" if os.path.isfile(sched_file): try: file = open(sched_file) line = file.readline() - g = re.search(r'\((\d+),', line) - if len(g.groups()) == 1: + g = re.search(r"\((\d+),", line) + if g and len(g.groups()) == 1: pid = int(g.groups()[0]) except Exception: logger.debug("parsing sched file failed", exc_info=True) @@ -211,3 +234,6 @@ def __get_real_pid(self): pid = os.getpid() return pid + + +# Made with Bob From 50ee33cccdeed40a3057fe31db8230249fc94a8d Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 22 Jul 2025 10:21:22 +0200 Subject: [PATCH 29/86] refactor: Make Discovery a DataClass. And move it out of the fsm.py file. Signed-off-by: Paulo Vital --- src/instana/agent/host.py | 9 ++++++--- src/instana/fsm.py | 27 ++------------------------- src/instana/util/process_discovery.py | 13 +++++++++++++ tests/agent/test_host.py | 4 +++- 4 files changed, 24 insertions(+), 29 deletions(-) create mode 100644 src/instana/util/process_discovery.py diff --git a/src/instana/agent/host.py b/src/instana/agent/host.py index 177ca44c..ad39440c 100644 --- a/src/instana/agent/host.py +++ b/src/instana/agent/host.py @@ -9,7 +9,7 @@ import json import os from datetime import datetime -from typing import Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union import requests import urllib3 @@ -17,7 +17,7 @@ from instana.agent.base import BaseAgent from instana.collector.host import HostCollector -from instana.fsm import Discovery, TheMachine +from instana.fsm import TheMachine from instana.log import logger from instana.options import StandardOptions from instana.util import to_json @@ -25,6 +25,9 @@ from instana.util.span_utils import get_operation_specifiers from instana.version import VERSION +if TYPE_CHECKING: + from instana.util.process_discovery import Discovery + class AnnounceData(object): """The Announce Payload""" @@ -176,7 +179,7 @@ def is_agent_listening( def announce( self, - discovery: Discovery, + discovery: "Discovery", ) -> Optional[Dict[str, Any]]: """ With the passed in Discovery class, attempt to announce to the host agent. diff --git a/src/instana/fsm.py b/src/instana/fsm.py index 11eecb8b..7355a0ab 100644 --- a/src/instana/fsm.py +++ b/src/instana/fsm.py @@ -8,46 +8,23 @@ import subprocess import sys import threading -from typing import TYPE_CHECKING, Any, Callable, Optional +from typing import TYPE_CHECKING, Any, Callable from fysom import Fysom from instana.log import logger from instana.util import get_default_gateway +from instana.util.process_discovery import Discovery from instana.version import VERSION if TYPE_CHECKING: from instana.agent.host import HostAgent -class Discovery: - pid: int = 0 - name: Optional[str] = None - args: Optional[List[str]] = None - fd: int = -1 - inode: str = "" - - def __init__(self, **kwds: Any) -> None: - self.__dict__.update(kwds) - - def to_dict(self) -> Dict[str, Any]: - kvs: Dict[str, Any] = dict() - kvs["pid"] = self.pid - kvs["name"] = self.name - kvs["args"] = self.args - kvs["fd"] = self.fd - kvs["inode"] = self.inode - return kvs - - class TheMachine: RETRY_PERIOD = 30 THREAD_NAME = "Instana Machine" - agent: Optional["HostAgent"] = None - fsm = None - timer = None - warnedPeriodic = False def __init__(self, agent: "HostAgent") -> None: diff --git a/src/instana/util/process_discovery.py b/src/instana/util/process_discovery.py new file mode 100644 index 00000000..6a83efe5 --- /dev/null +++ b/src/instana/util/process_discovery.py @@ -0,0 +1,13 @@ +# (c) Copyright IBM Corp. 2025 + +from dataclasses import dataclass +from typing import List, Optional + + +@dataclass +class Discovery: + pid: int = 0 # the PID of this process + name: Optional[str] = None # the name of the executable + args: Optional[List[str]] = None # the command line arguments + fd: int = -1 # the file descriptor of the socket associated with the connection to the agent for this HTTP request + inode: str = "" # the inode of the socket associated with the connection to the agent for this HTTP request diff --git a/tests/agent/test_host.py b/tests/agent/test_host.py index 29b5fd10..058c676c 100644 --- a/tests/agent/test_host.py +++ b/tests/agent/test_host.py @@ -14,12 +14,13 @@ from instana.agent.host import AnnounceData, HostAgent from instana.collector.host import HostCollector -from instana.fsm import Discovery, TheMachine +from instana.fsm import TheMachine from instana.options import StandardOptions from instana.recorder import StanRecorder from instana.singletons import get_agent from instana.span.span import InstanaSpan from instana.span_context import SpanContext +from instana.util.process_discovery import Discovery from instana.util.runtime import is_windows @@ -715,3 +716,4 @@ def test_is_service_or_endpoint_ignored(self) -> None: # don't ignore other services assert not self.agent._HostAgent__is_endpoint_ignored("service3") + assert not self.agent._HostAgent__is_endpoint_ignored("service3") From 1da36f4201ee93080bf0443dd754c1a76161100f Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Thu, 24 Jul 2025 15:26:09 +0200 Subject: [PATCH 30/86] feat(fsm): add support to announce Windows processes. This commit adds cross-platform process announcement to Instana Host Agents. The implementation gracefully handles platform differences, ensuring consistent process information on both Unix and Windows environments: - Created a new `_get_cmdline()` function to return the command line of the current monitored process independently of the running platform. - Created the `_get_cmdline_windows()` function to return the command line on Windows machines. - Created `_get_cmdline_unix()` that returns the command line in Unix machines. It decides how to collect the information by running either the ` _get_cmdline_linux_proc()` or the `_get_cmdline_unix_ps()`. - Refactored the `_setup_socket_connection()` function. Signed-off-by: Paulo Vital --- src/instana/fsm.py | 141 +++++++++++++++++++++++++++++++-------------- 1 file changed, 97 insertions(+), 44 deletions(-) diff --git a/src/instana/fsm.py b/src/instana/fsm.py index 7355a0ab..be355b1a 100644 --- a/src/instana/fsm.py +++ b/src/instana/fsm.py @@ -8,13 +8,14 @@ import subprocess import sys import threading -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any, Callable, List from fysom import Fysom from instana.log import logger from instana.util import get_default_gateway from instana.util.process_discovery import Discovery +from instana.util.runtime import is_windows from instana.version import VERSION if TYPE_CHECKING: @@ -103,48 +104,16 @@ def lookup_agent_host(self, e: Any) -> bool: return False def announce_sensor(self, e: Any) -> bool: + pid: int = os.getpid() logger.debug( - f"Attempting to make an announcement to the agent on {self.agent.options.agent_host}:{self.agent.options.agent_port}" + f"Attempting to announce PID {pid} to the agent on {self.agent.options.agent_host}:{self.agent.options.agent_port}" ) - pid = os.getpid() - try: - if os.path.isfile("/proc/self/cmdline"): - with open("/proc/self/cmdline") as cmd: - cmdinfo = cmd.read() - cmdline = cmdinfo.split("\x00") - else: - # Python doesn't provide a reliable method to determine what - # the OS process command line may be. Here we are forced to - # rely on ps rather than adding a dependency on something like - # psutil which requires dev packages, gcc etc... - proc = subprocess.Popen( - ["ps", "-p", str(pid), "-o", "command"], stdout=subprocess.PIPE - ) - (out, _) = proc.communicate() - parts = out.split(b"\n") - cmdline = [parts[1].decode("utf-8")] - except Exception: - cmdline = sys.argv - logger.debug("announce_sensor", exc_info=True) + cmdline = self._get_cmdline(pid) d = Discovery(pid=self.__get_real_pid(), name=cmdline[0], args=cmdline[1:]) - # If we're on a system with a procfs - if os.path.exists("/proc/"): - try: - # In CentOS 7, some odd things can happen such as: - # PermissionError: [Errno 13] Permission denied: '/proc/6/fd/8' - # Use a try/except as a safety - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.connect( - (self.agent.options.agent_host, self.agent.options.agent_port) - ) - path = f"/proc/{pid}/fd/{sock.fileno()}" - d.fd = sock.fileno() - d.inode = os.readlink(path) - except: # noqa: E722 - logger.debug("Error generating file descriptor: ", exc_info=True) + self._setup_socket_connection(d, pid) payload = self.agent.announce(d) @@ -189,28 +158,112 @@ def on_good2go(self, _: Any) -> None: def __get_real_pid(self) -> int: """ Attempts to determine the true process ID by querying the - /proc//sched file. This works on systems with a proc filesystem. - Otherwise default to os default. + /proc//sched file on Linux systems or using the OS default PID. + For Windows, we use the standard OS PID as there's no equivalent concept + of container PIDs vs host PIDs. """ pid = None + # For Linux systems with procfs if os.path.exists("/proc/"): sched_file = f"/proc/{os.getpid()}/sched" if os.path.isfile(sched_file): try: - file = open(sched_file) - line = file.readline() - g = re.search(r"\((\d+),", line) - if g and len(g.groups()) == 1: - pid = int(g.groups()[0]) + with open(sched_file) as file: + line = file.readline() + g = re.search(r"\((\d+),", line) + if g and len(g.groups()) == 1: + pid = int(g.groups()[0]) except Exception: logger.debug("parsing sched file failed", exc_info=True) + # For Windows or if Linux method failed if pid is None: pid = os.getpid() return pid + def _get_cmdline_windows(self) -> List[str]: + """ + Get command line using Windows API + """ + import ctypes + from ctypes import wintypes + + GetCommandLineW = ctypes.windll.kernel32.GetCommandLineW + GetCommandLineW.argtypes = [] + GetCommandLineW.restype = wintypes.LPCWSTR + + cmd = GetCommandLineW() + # Simple parsing - this is a basic approach and might need refinement + # for complex command lines with quotes and spaces + return cmd.split() + + def _get_cmdline_linux_proc(self) -> List[str]: + """ + Get command line from Linux /proc filesystem + """ + with open("/proc/self/cmdline") as cmd: + cmdinfo = cmd.read() + return cmdinfo.split("\x00") + + def _get_cmdline_unix_ps(self, pid: int) -> List[str]: + """ + Get command line using ps command (for Unix-like systems without /proc) + """ + proc = subprocess.Popen( + ["ps", "-p", str(pid), "-o", "command"], stdout=subprocess.PIPE + ) + (out, _) = proc.communicate() + parts = out.split(b"\n") + return [parts[1].decode("utf-8")] + + def _get_cmdline_unix(self, pid: int) -> List[str]: + """ + Get command line using Unix + """ + if os.path.isfile("/proc/self/cmdline"): + return self._get_cmdline_linux_proc() + else: + return self._get_cmdline_unix_ps(pid) + + def _get_cmdline(self, pid: int) -> List[str]: + """ + Get command line in a platform-independent way + """ + try: + if is_windows(): + return self._get_cmdline_windows() + else: + return self._get_cmdline_unix(pid) + except Exception: + logger.debug("Error getting command line", exc_info=True) + return sys.argv + + def _setup_socket_connection(self, discovery: Discovery, pid: int) -> None: + """ + Set up socket connection and populate discovery object with socket details + """ + try: + # In CentOS 7, some odd things can happen such as: + # PermissionError: [Errno 13] Permission denied: '/proc/6/fd/8' + # Use a try/except as a safety + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect((self.agent.options.agent_host, self.agent.options.agent_port)) + discovery.fd = sock.fileno() + + # If we're on a system with a procfs (Linux) + if os.path.exists("/proc/"): + try: + path = "/proc/%d/fd/%d" % (pid, sock.fileno()) + discovery.inode = os.readlink(path) + except Exception: + logger.debug( + "Error generating file descriptor inode: ", exc_info=True + ) + except Exception: + logger.debug("Error creating socket connection: ", exc_info=True) + # Made with Bob From d47eccfe071ecd70333b6541f5556c3867a58d36 Mon Sep 17 00:00:00 2001 From: Cagri Yonca Date: Thu, 28 Aug 2025 13:22:11 +0200 Subject: [PATCH 31/86] fix(kafka): adapt python tracer to trace-test-suite Signed-off-by: Cagri Yonca --- .../instrumentation/kafka/kafka_python.py | 158 ++++++++++++------ tests/clients/kafka/test_kafka_python.py | 136 +++++++++++---- 2 files changed, 218 insertions(+), 76 deletions(-) diff --git a/src/instana/instrumentation/kafka/kafka_python.py b/src/instana/instrumentation/kafka/kafka_python.py index 278390f9..c11e9355 100644 --- a/src/instana/instrumentation/kafka/kafka_python.py +++ b/src/instana/instrumentation/kafka/kafka_python.py @@ -1,23 +1,31 @@ # (c) Copyright IBM Corp. 2025 + try: + import contextvars import inspect from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple import kafka # noqa: F401 import wrapt + from opentelemetry import context, trace from opentelemetry.trace import SpanKind from instana.log import logger from instana.propagators.format import Format + from instana.singletons import get_tracer from instana.util.traceutils import ( get_tracer_tuple, tracing_is_off, ) + from instana.span.span import InstanaSpan if TYPE_CHECKING: from kafka.producer.future import FutureRecordMetadata + consumer_token = None + consumer_span = contextvars.ContextVar("kafka_python_consumer_span") + @wrapt.patch_function_wrapper("kafka", "KafkaProducer.send") def trace_kafka_send( wrapped: Callable[..., "kafka.KafkaProducer.send"], @@ -59,35 +67,86 @@ def trace_kafka_send( kwargs["headers"] = headers try: res = wrapped(*args, **kwargs) + return res except Exception as exc: span.record_exception(exc) - else: - return res def create_span( span_type: str, topic: Optional[str], headers: Optional[List[Tuple[str, bytes]]] = [], - exception: Optional[str] = None, + exception: Optional[Exception] = None, ) -> None: - tracer, parent_span, _ = get_tracer_tuple() - parent_context = ( - parent_span.get_span_context() - if parent_span - else tracer.extract( - Format.KAFKA_HEADERS, - headers, - disable_w3c_trace_context=True, + try: + span = consumer_span.get(None) + if span is not None: + close_consumer_span(span) + + tracer, parent_span, _ = get_tracer_tuple() + + if not tracer: + tracer = get_tracer() + + is_suppressed = False + if topic: + is_suppressed = tracer.exporter._HostAgent__is_endpoint_ignored( + "kafka", + span_type, + topic, + ) + + if not is_suppressed and headers: + for header_name, header_value in headers: + if header_name == "x_instana_l_s" and header_value == b"0": + is_suppressed = True + break + + if is_suppressed: + return + + parent_context = ( + parent_span.get_span_context() + if parent_span + else tracer.extract( + Format.KAFKA_HEADERS, + headers, + disable_w3c_trace_context=True, + ) + ) + span = tracer.start_span( + "kafka-consumer", span_context=parent_context, kind=SpanKind.CONSUMER ) - ) - with tracer.start_as_current_span( - "kafka-consumer", span_context=parent_context, kind=SpanKind.CONSUMER - ) as span: if topic: span.set_attribute("kafka.service", topic) span.set_attribute("kafka.access", span_type) if exception: span.record_exception(exception) + span.end() + + save_consumer_span_into_context(span) + except Exception: + pass + + def save_consumer_span_into_context(span: "InstanaSpan") -> None: + global consumer_token + ctx = trace.set_span_in_context(span) + consumer_token = context.attach(ctx) + consumer_span.set(span) + + def close_consumer_span(span: "InstanaSpan") -> None: + global consumer_token + if span.is_recording(): + span.end() + consumer_span.set(None) + if consumer_token is not None: + context.detach(consumer_token) + consumer_token = None + + def clear_context() -> None: + global consumer_token + context.attach(trace.set_span_in_context(None)) + consumer_token = None + consumer_span.set(None) @wrapt.patch_function_wrapper("kafka", "KafkaConsumer.__next__") def trace_kafka_consume( @@ -96,29 +155,41 @@ def trace_kafka_consume( args: Tuple[int, str, Tuple[Any, ...]], kwargs: Dict[str, Any], ) -> "FutureRecordMetadata": - if tracing_is_off(): - return wrapped(*args, **kwargs) - exception = None res = None try: res = wrapped(*args, **kwargs) + create_span( + "consume", + res.topic if res else list(instance.subscription())[0], + res.headers, + ) + return res + except StopIteration: + pass except Exception as exc: exception = exc - finally: - if res: - create_span( - "consume", - res.topic if res else list(instance.subscription())[0], - res.headers, - ) - else: - create_span( - "consume", list(instance.subscription())[0], exception=exception - ) + create_span( + "consume", list(instance.subscription())[0], exception=exception + ) - return res + @wrapt.patch_function_wrapper("kafka", "KafkaConsumer.close") + def trace_kafka_close( + wrapped: Callable[..., None], + instance: "kafka.KafkaConsumer", + args: Tuple[Any, ...], + kwargs: Dict[str, Any], + ) -> None: + try: + span = consumer_span.get(None) + if span is not None: + close_consumer_span(span) + except Exception as e: + logger.debug( + f"Error while closing kafka-consumer span: {e}" + ) # pragma: no cover + return wrapped(*args, **kwargs) @wrapt.patch_function_wrapper("kafka", "KafkaConsumer.poll") def trace_kafka_poll( @@ -127,9 +198,6 @@ def trace_kafka_poll( args: Tuple[int, str, Tuple[Any, ...]], kwargs: Dict[str, Any], ) -> Optional[Dict[str, Any]]: - if tracing_is_off(): - return wrapped(*args, **kwargs) - # The KafkaConsumer.consume() from the kafka-python-ng call the # KafkaConsumer.poll() internally, so we do not consider it here. if any( @@ -143,23 +211,17 @@ def trace_kafka_poll( try: res = wrapped(*args, **kwargs) + for partition, consumer_records in res.items(): + for message in consumer_records: + create_span( + "poll", + partition.topic, + message.headers if hasattr(message, "headers") else [], + ) + return res except Exception as exc: exception = exc - finally: - if res: - for partition, consumer_records in res.items(): - for message in consumer_records: - create_span( - "poll", - partition.topic, - message.headers if hasattr(message, "headers") else [], - ) - else: - create_span( - "poll", list(instance.subscription())[0], exception=exception - ) - - return res + create_span("poll", list(instance.subscription())[0], exception=exception) logger.debug("Instrumenting Kafka (kafka-python)") except ImportError: diff --git a/tests/clients/kafka/test_kafka_python.py b/tests/clients/kafka/test_kafka_python.py index dd568583..eb3723e3 100644 --- a/tests/clients/kafka/test_kafka_python.py +++ b/tests/clients/kafka/test_kafka_python.py @@ -17,6 +17,15 @@ from instana.util.config import parse_ignored_endpoints_from_yaml from tests.helpers import get_first_span_by_filter, testenv +from instana.instrumentation.kafka import kafka_python +from instana.instrumentation.kafka.kafka_python import ( + clear_context, + save_consumer_span_into_context, + close_consumer_span, + consumer_span, +) +from instana.span.span import InstanaSpan + class TestKafkaPython: @pytest.fixture(autouse=True) @@ -72,6 +81,10 @@ def _resource(self) -> Generator[None, None, None]: agent.options.allow_exit_as_root = False # Close connections self.producer.close() + + # Clear context + clear_context() + self.kafka_client.delete_topics( [ testenv["kafka_topic"], @@ -132,10 +145,17 @@ def test_trace_kafka_python_consume(self) -> None: consumer.close() spans = self.recorder.queued_spans() - assert len(spans) == 4 + assert len(spans) == 3 - kafka_span = spans[0] - test_span = spans[len(spans) - 1] + def filter(span): + return span.n == "kafka" and span.data["kafka"]["access"] == "consume" + + kafka_span = get_first_span_by_filter(spans, filter) + + def filter(span): + return span.n == "sdk" and span.data["sdk"]["name"] == "test" + + test_span = get_first_span_by_filter(spans, filter) # Same traceId assert test_span.t == kafka_span.t @@ -168,15 +188,22 @@ def test_trace_kafka_python_poll(self) -> None: ) with tracer.start_as_current_span("test"): - msg = consumer.poll() # noqa: F841 + msg = consumer.poll(timeout_ms=3000) # noqa: F841 consumer.close() spans = self.recorder.queued_spans() - assert len(spans) == 2 + assert len(spans) == 3 - kafka_span = spans[0] - test_span = spans[1] + def filter(span): + return span.n == "kafka" and span.data["kafka"]["access"] == "poll" + + kafka_span = get_first_span_by_filter(spans, filter) + + def filter(span): + return span.n == "sdk" and span.data["sdk"]["name"] == "test" + + test_span = get_first_span_by_filter(spans, filter) # Same traceId assert test_span.t == kafka_span.t @@ -194,27 +221,36 @@ def test_trace_kafka_python_poll(self) -> None: assert kafka_span.data["kafka"]["access"] == "poll" def test_trace_kafka_python_error(self) -> None: - # Consume the events consumer = KafkaConsumer( "inexistent_kafka_topic", bootstrap_servers=testenv["kafka_bootstrap_servers"], - auto_offset_reset="earliest", # consume earliest available messages - enable_auto_commit=False, # do not auto-commit offsets + auto_offset_reset="earliest", + enable_auto_commit=False, consumer_timeout_ms=1000, ) with tracer.start_as_current_span("test"): - for msg in consumer: - if msg is None: - break + consumer._client = None - consumer.close() + try: + for msg in consumer: + if msg is None: + break + except Exception: + pass spans = self.recorder.queued_spans() assert len(spans) == 2 - kafka_span = spans[0] - test_span = spans[1] + def filter(span): + return span.n == "kafka" and span.data["kafka"]["access"] == "consume" + + kafka_span = get_first_span_by_filter(spans, filter) + + def filter(span): + return span.n == "sdk" and span.data["sdk"]["name"] == "test" + + test_span = get_first_span_by_filter(spans, filter) # Same traceId assert test_span.t == kafka_span.t @@ -230,7 +266,10 @@ def test_trace_kafka_python_error(self) -> None: assert kafka_span.k == SpanKind.SERVER assert kafka_span.data["kafka"]["service"] == "inexistent_kafka_topic" assert kafka_span.data["kafka"]["access"] == "consume" - assert kafka_span.data["kafka"]["error"] == "StopIteration()" + assert ( + kafka_span.data["kafka"]["error"] + == "'NoneType' object has no attribute 'poll'" + ) def consume_from_topic(self, topic_name: str) -> None: consumer = KafkaConsumer( @@ -302,10 +341,7 @@ def test_ignore_kafka_consumer(self) -> None: self.consume_from_topic(testenv["kafka_topic"]) spans = self.recorder.queued_spans() - assert len(spans) == 4 - - filtered_spans = agent.filter_spans(spans) - assert len(filtered_spans) == 1 + assert len(spans) == 1 @patch.dict( os.environ, @@ -326,10 +362,10 @@ def test_ignore_specific_topic(self) -> None: self.consume_from_topic(testenv["kafka_topic"] + "_1") spans = self.recorder.queued_spans() - assert len(spans) == 11 + assert len(spans) == 7 filtered_spans = agent.filter_spans(spans) - assert len(filtered_spans) == 8 + assert len(filtered_spans) == 6 span_to_be_filtered = get_first_span_by_filter( spans, @@ -351,10 +387,7 @@ def test_ignore_specific_topic_with_config_file(self) -> None: self.consume_from_topic(testenv["kafka_topic"]) spans = self.recorder.queued_spans() - assert len(spans) == 3 - - filtered_spans = agent.filter_spans(spans) - assert len(filtered_spans) == 1 + assert len(spans) == 1 def test_kafka_consumer_root_exit(self) -> None: agent.options.allow_exit_as_root = True @@ -378,7 +411,7 @@ def test_kafka_consumer_root_exit(self) -> None: consumer.close() spans = self.recorder.queued_spans() - assert len(spans) == 4 + assert len(spans) == 3 producer_span = spans[0] consumer_span = spans[1] @@ -713,3 +746,50 @@ def test_kafka_downstream_suppression(self) -> None: format_span_id(producer_span_2.s).encode("utf-8"), ), ] + + def test_save_consumer_span_into_context(self, span: "InstanaSpan") -> None: + """Test save_consumer_span_into_context function.""" + # Verify initial state + assert consumer_span.get(None) is None + assert kafka_python.consumer_token is None + + # Save span into context + save_consumer_span_into_context(span) + + # Verify span is saved in context variable + assert consumer_span.get(None) == span + # Verify token is stored + assert kafka_python.consumer_token is not None + + def test_close_consumer_span_recording_span(self, span: "InstanaSpan") -> None: + """Test close_consumer_span with a recording span.""" + # Save span into context first + save_consumer_span_into_context(span) + assert kafka_python.consumer_token is not None + + # Verify span is recording + assert span.is_recording() + + # Close the span + close_consumer_span(span) + + # Verify span was ended and context cleared + assert not span.is_recording() + assert consumer_span.get(None) is None + assert kafka_python.consumer_token is None + + def test_clear_context(self, span: "InstanaSpan") -> None: + """Test clear_context function.""" + # Save span into context + save_consumer_span_into_context(span) + + # Verify context has data + assert consumer_span.get(None) == span + assert kafka_python.consumer_token is not None + + # Clear context + clear_context() + + # Verify all context is cleared + assert consumer_span.get(None) is None + assert kafka_python.consumer_token is None From 685e2ee90c64e911f1e6b831a6a73481481690b6 Mon Sep 17 00:00:00 2001 From: Cagri Yonca Date: Thu, 28 Aug 2025 13:22:19 +0200 Subject: [PATCH 32/86] fix(confluent-kafka): adapt python tracer to trace-test-suite Signed-off-by: Cagri Yonca --- .../kafka/confluent_kafka_python.py | 155 +++++++++++++----- tests/clients/kafka/test_confluent_kafka.py | 146 ++++++++++++----- 2 files changed, 225 insertions(+), 76 deletions(-) diff --git a/src/instana/instrumentation/kafka/confluent_kafka_python.py b/src/instana/instrumentation/kafka/confluent_kafka_python.py index 04b1164c..e5d991d2 100644 --- a/src/instana/instrumentation/kafka/confluent_kafka_python.py +++ b/src/instana/instrumentation/kafka/confluent_kafka_python.py @@ -1,19 +1,27 @@ # (c) Copyright IBM Corp. 2025 + try: + import contextvars from typing import Any, Callable, Dict, List, Optional, Tuple import confluent_kafka # noqa: F401 import wrapt from confluent_kafka import Consumer, Producer + from opentelemetry import context, trace from opentelemetry.trace import SpanKind from instana.log import logger from instana.propagators.format import Format + from instana.singletons import get_tracer from instana.util.traceutils import ( get_tracer_tuple, tracing_is_off, ) + from instana.span.span import InstanaSpan + + consumer_token = None + consumer_span = contextvars.ContextVar("confluent_kafka_consumer_span") # As confluent_kafka is a wrapper around the C-developed librdkafka # (provided automatically via binary wheels), we have to create new classes @@ -47,6 +55,9 @@ def poll( ) -> Optional[confluent_kafka.Message]: return super().poll(timeout) + def close(self) -> None: + return super().close() + def trace_kafka_produce( wrapped: Callable[..., InstanaConfluentKafkaProducer.produce], instance: InstanaConfluentKafkaProducer, @@ -105,25 +116,82 @@ def create_span( headers: Optional[List[Tuple[str, bytes]]] = [], exception: Optional[str] = None, ) -> None: - tracer, parent_span, _ = get_tracer_tuple() - parent_context = ( - parent_span.get_span_context() - if parent_span - else tracer.extract( - Format.KAFKA_HEADERS, - headers, - disable_w3c_trace_context=True, + try: + span = consumer_span.get(None) + if span is not None: + close_consumer_span(span) + + tracer, parent_span, _ = get_tracer_tuple() + + if not tracer: + tracer = get_tracer() + is_suppressed = False + + if topic: + is_suppressed = tracer.exporter._HostAgent__is_endpoint_ignored( + "kafka", + span_type, + topic, + ) + + if not is_suppressed and headers: + for header_name, header_value in headers: + if header_name == "x_instana_l_s" and header_value == b"0": + is_suppressed = True + break + + if is_suppressed: + return + + parent_context = ( + parent_span.get_span_context() + if parent_span + else ( + tracer.extract( + Format.KAFKA_HEADERS, + headers, + disable_w3c_trace_context=True, + ) + if tracer.exporter.options.kafka_trace_correlation + else None + ) + ) + span = tracer.start_span( + "kafka-consumer", span_context=parent_context, kind=SpanKind.CONSUMER ) - ) - with tracer.start_as_current_span( - "kafka-consumer", span_context=parent_context, kind=SpanKind.CONSUMER - ) as span: if topic: span.set_attribute("kafka.service", topic) span.set_attribute("kafka.access", span_type) - if exception: span.record_exception(exception) + span.end() + + save_consumer_span_into_context(span) + except Exception as e: + logger.debug( + f"Error while creating kafka-consumer span: {e}" + ) # pragma: no cover + + def save_consumer_span_into_context(span: "InstanaSpan") -> None: + global consumer_token + ctx = trace.set_span_in_context(span) + consumer_token = context.attach(ctx) + consumer_span.set(span) + + def close_consumer_span(span: "InstanaSpan") -> None: + global consumer_token + if span.is_recording(): + span.end() + consumer_span.set(None) + if consumer_token is not None: + context.detach(consumer_token) + consumer_token = None + + def clear_context() -> None: + global consumer_token + context.attach(trace.set_span_in_context(None)) + consumer_token = None + consumer_span.set(None) def trace_kafka_consume( wrapped: Callable[..., InstanaConfluentKafkaConsumer.consume], @@ -131,24 +199,41 @@ def trace_kafka_consume( args: Tuple[int, str, Tuple[Any, ...]], kwargs: Dict[str, Any], ) -> List[confluent_kafka.Message]: - if tracing_is_off(): - return wrapped(*args, **kwargs) - res = None exception = None try: res = wrapped(*args, **kwargs) + for message in res: + create_span("consume", message.topic(), message.headers()) + return res except Exception as exc: exception = exc - finally: - if res: - for message in res: - create_span("consume", message.topic(), message.headers()) - else: - create_span("consume", exception=exception) + create_span("consume", exception=exception) - return res + def trace_kafka_close( + wrapped: Callable[..., InstanaConfluentKafkaConsumer.close], + instance: InstanaConfluentKafkaConsumer, + args: Tuple[Any, ...], + kwargs: Dict[str, Any], + ) -> None: + try: + # Close any existing consumer span before closing the consumer + span = consumer_span.get(None) + if span is not None: + close_consumer_span(span) + + # Execute the actual close operation + res = wrapped(*args, **kwargs) + + logger.debug("Kafka consumer closed and spans cleaned up") + return res + + except Exception: + # Still try to clean up the span even if close fails + span = consumer_span.get(None) + if span is not None: + close_consumer_span(span) def trace_kafka_poll( wrapped: Callable[..., InstanaConfluentKafkaConsumer.poll], @@ -156,27 +241,20 @@ def trace_kafka_poll( args: Tuple[int, str, Tuple[Any, ...]], kwargs: Dict[str, Any], ) -> Optional[confluent_kafka.Message]: - if tracing_is_off(): - return wrapped(*args, **kwargs) - res = None exception = None try: res = wrapped(*args, **kwargs) + create_span("poll", res.topic(), res.headers()) + return res except Exception as exc: exception = exc - finally: - if res: - create_span("poll", res.topic(), res.headers()) - else: - create_span( - "poll", - next(iter(instance.list_topics().topics)), - exception=exception, - ) - - return res + create_span( + "poll", + next(iter(instance.list_topics().topics)), + exception=exception, + ) # Apply the monkey patch confluent_kafka.Producer = InstanaConfluentKafkaProducer @@ -189,6 +267,9 @@ def trace_kafka_poll( InstanaConfluentKafkaConsumer, "consume", trace_kafka_consume ) wrapt.wrap_function_wrapper(InstanaConfluentKafkaConsumer, "poll", trace_kafka_poll) + wrapt.wrap_function_wrapper( + InstanaConfluentKafkaConsumer, "close", trace_kafka_close + ) logger.debug("Instrumenting Kafka (confluent_kafka)") except ImportError: diff --git a/tests/clients/kafka/test_confluent_kafka.py b/tests/clients/kafka/test_confluent_kafka.py index fb9ab4c8..61f31bce 100644 --- a/tests/clients/kafka/test_confluent_kafka.py +++ b/tests/clients/kafka/test_confluent_kafka.py @@ -11,7 +11,7 @@ Producer, ) from confluent_kafka.admin import AdminClient, NewTopic -from mock import patch +from mock import patch, Mock from opentelemetry.trace import SpanKind from opentelemetry.trace.span import format_span_id @@ -20,6 +20,15 @@ from instana.singletons import agent, tracer from instana.util.config import parse_ignored_endpoints_from_yaml from tests.helpers import get_first_span_by_filter, testenv +from instana.instrumentation.kafka import confluent_kafka_python +from instana.instrumentation.kafka.confluent_kafka_python import ( + clear_context, + save_consumer_span_into_context, + close_consumer_span, + trace_kafka_close, + consumer_span, +) +from instana.span.span import InstanaSpan class TestConfluentKafka: @@ -68,8 +77,12 @@ def _resource(self) -> Generator[None, None, None]: agent.options = StandardOptions() yield # teardown - # Ensure that allow_exit_as_root has the default value""" - agent.options.allow_exit_as_root = False + # Clear spans before resetting options + self.recorder.clear_spans() + + # Clear context + clear_context() + # Close connections self.kafka_client.delete_topics( [ @@ -129,24 +142,6 @@ def test_trace_confluent_kafka_consume(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 2 - kafka_span = spans[0] - test_span = spans[1] - - # Same traceId - assert test_span.t == kafka_span.t - - # Parent relationships - assert kafka_span.p == test_span.s - - # Error logging - assert not test_span.ec - assert not kafka_span.ec - - assert kafka_span.n == "kafka" - assert kafka_span.k == SpanKind.SERVER - assert kafka_span.data["kafka"]["service"] == testenv["kafka_topic"] - assert kafka_span.data["kafka"]["access"] == "consume" - def test_trace_confluent_kafka_poll(self) -> None: # Produce some events self.producer.produce(testenv["kafka_topic"], b"raw_bytes1") @@ -162,15 +157,22 @@ def test_trace_confluent_kafka_poll(self) -> None: consumer.subscribe([testenv["kafka_topic"]]) with tracer.start_as_current_span("test"): - msg = consumer.poll(timeout=30) # noqa: F841 + msg = consumer.poll(timeout=3) # noqa: F841 consumer.close() spans = self.recorder.queued_spans() assert len(spans) == 2 - kafka_span = spans[0] - test_span = spans[1] + def filter(span): + return span.n == "kafka" and span.data["kafka"]["access"] == "poll" + + kafka_span = get_first_span_by_filter(spans, filter) + + def filter(span): + return span.n == "sdk" and span.data["sdk"]["name"] == "test" + + test_span = get_first_span_by_filter(spans, filter) # Same traceId assert test_span.t == kafka_span.t @@ -282,10 +284,7 @@ def test_ignore_confluent_kafka_consumer(self) -> None: consumer.close() spans = self.recorder.queued_spans() - assert len(spans) == 3 - - filtered_spans = agent.filter_spans(spans) - assert len(filtered_spans) == 1 + assert len(spans) == 1 @patch.dict( os.environ, @@ -323,7 +322,7 @@ def test_ignore_confluent_specific_topic(self) -> None: consumer.close() spans = self.recorder.queued_spans() - assert len(spans) == 5 + assert len(spans) == 4 filtered_spans = agent.filter_spans(spans) assert len(filtered_spans) == 3 @@ -362,7 +361,7 @@ def test_ignore_confluent_specific_topic_with_config_file(self) -> None: consumer.close() spans = self.recorder.queued_spans() - assert len(spans) == 3 + assert len(spans) == 2 filtered_spans = agent.filter_spans(spans) assert len(filtered_spans) == 1 @@ -482,7 +481,7 @@ def test_confluent_kafka_poll_root_exit_without_trace_correlation(self) -> None: agent.options.kafka_trace_correlation = False # Produce some events - self.producer.produce(testenv["kafka_topic"], b"raw_bytes1") + self.producer.produce(f'{testenv["kafka_topic"]}-wo-tc', b"raw_bytes1") self.producer.flush() # Consume the events @@ -491,7 +490,7 @@ def test_confluent_kafka_poll_root_exit_without_trace_correlation(self) -> None: consumer_config["auto.offset.reset"] = "earliest" consumer = Consumer(consumer_config) - consumer.subscribe([testenv["kafka_topic"]]) + consumer.subscribe([f'{testenv["kafka_topic"]}-wo-tc']) msg = consumer.poll(timeout=30) # noqa: F841 @@ -504,14 +503,14 @@ def test_confluent_kafka_poll_root_exit_without_trace_correlation(self) -> None: spans, lambda span: span.n == "kafka" and span.data["kafka"]["access"] == "produce" - and span.data["kafka"]["service"] == "span-topic", + and span.data["kafka"]["service"] == f'{testenv["kafka_topic"]}-wo-tc', ) poll_span = get_first_span_by_filter( spans, lambda span: span.n == "kafka" and span.data["kafka"]["access"] == "poll" - and span.data["kafka"]["service"] == "span-topic", + and span.data["kafka"]["service"] == f'{testenv["kafka_topic"]}-wo-tc', ) # Different traceId @@ -598,7 +597,7 @@ def test_confluent_kafka_downstream_suppression(self) -> None: consumer.close() spans = self.recorder.queued_spans() - assert len(spans) == 3 + assert len(spans) == 2 producer_span_1 = get_first_span_by_filter( spans, @@ -628,10 +627,7 @@ def test_confluent_kafka_downstream_suppression(self) -> None: assert producer_span_1 # consumer has been suppressed assert not consumer_span_1 - - assert producer_span_2.t == consumer_span_2.t - assert producer_span_2.s == consumer_span_2.p - assert producer_span_2.s != consumer_span_2.s + assert not consumer_span_2 for message in messages: if message.topic() == "span-topic_1": @@ -649,3 +645,75 @@ def test_confluent_kafka_downstream_suppression(self) -> None: testenv["kafka_topic"] + "_2", ] ) + + def test_save_consumer_span_into_context(self, span: "InstanaSpan") -> None: + """Test save_consumer_span_into_context function.""" + # Verify initial state + assert consumer_span.get(None) is None + assert confluent_kafka_python.consumer_token is None + + # Save span into context + save_consumer_span_into_context(span) + + # Verify token is stored + assert confluent_kafka_python.consumer_token is not None + + def test_close_consumer_span_recording_span(self, span: "InstanaSpan") -> None: + """Test close_consumer_span with a recording span.""" + # Save span into context first + save_consumer_span_into_context(span) + assert confluent_kafka_python.consumer_token is not None + + # Verify span is recording + assert span.is_recording() + + # Close the span + close_consumer_span(span) + + # Verify span was ended and context cleared + assert not span.is_recording() + assert consumer_span.get(None) is None + assert confluent_kafka_python.consumer_token is None + + def test_clear_context(self, span: "InstanaSpan") -> None: + """Test clear_context function.""" + # Save span into context + save_consumer_span_into_context(span) + + # Verify context has data + assert consumer_span.get(None) == span + assert confluent_kafka_python.consumer_token is not None + + # Clear context + clear_context() + + # Verify all context is cleared + assert consumer_span.get(None) is None + assert confluent_kafka_python.consumer_token is None + + def test_trace_kafka_close_exception_handling(self, span: "InstanaSpan") -> None: + """Test trace_kafka_close handles exceptions and still cleans up spans.""" + # Save span into context + save_consumer_span_into_context(span) + + # Verify span is in context + assert consumer_span.get(None) == span + assert confluent_kafka_python.consumer_token is not None + + # Mock a wrapped function that raises an exception + mock_wrapped = Mock(side_effect=Exception("Close operation failed")) + mock_instance = Mock() + + # Call trace_kafka_close - it should handle the exception gracefully + # and still clean up the span + trace_kafka_close(mock_wrapped, mock_instance, (), {}) + + # Verify the wrapped function was called + mock_wrapped.assert_called_once_with() + + # Verify that despite the exception, the span was cleaned up + assert consumer_span.get(None) is None + assert confluent_kafka_python.consumer_token is None + + # Verify span was ended + assert not span.is_recording() From 44a3828547638684c0c51ea70f7d37a0c05e84ea Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Thu, 28 Aug 2025 13:15:29 +0530 Subject: [PATCH 33/86] feat: Add GEvent instrumentation support with OTel Signed-off-by: Varsha GS --- src/instana/__init__.py | 3 +-- src/instana/instrumentation/gevent.py | 36 ++++++++------------------- 2 files changed, 12 insertions(+), 27 deletions(-) diff --git a/src/instana/__init__.py b/src/instana/__init__.py index 7add8c29..16c7fcc6 100644 --- a/src/instana/__init__.py +++ b/src/instana/__init__.py @@ -182,6 +182,7 @@ def boot_agent() -> None: sqlalchemy, # noqa: F401 starlette, # noqa: F401 urllib3, # noqa: F401 + gevent, # noqa: F401 ) from instana.instrumentation.aiohttp import ( client as aiohttp_client, # noqa: F401 @@ -209,8 +210,6 @@ def boot_agent() -> None: server as tornado_server, # noqa: F401 ) - # from instana.instrumentation import gevent_inst # noqa: F401 - # Hooks from instana.hooks import ( hook_gunicorn, # noqa: F401 diff --git a/src/instana/instrumentation/gevent.py b/src/instana/instrumentation/gevent.py index c083fb84..41ba057e 100644 --- a/src/instana/instrumentation/gevent.py +++ b/src/instana/instrumentation/gevent.py @@ -6,8 +6,11 @@ """ import sys -from ..log import logger -from ..singletons import tracer + +from opentelemetry import context +import contextvars + +from instana.log import logger def instrument_gevent(): @@ -16,26 +19,15 @@ def instrument_gevent(): logger.debug("Instrumenting gevent") import gevent - from opentracing.scope_managers.gevent import GeventScopeManager - from opentracing.scope_managers.gevent import _GeventScope def spawn_callback(new_greenlet): """Handles context propagation for newly spawning greenlets""" - parent_scope = tracer.scope_manager.active - if parent_scope is not None: - # New greenlet, new clean slate. Clone and make active in this new greenlet - # the currently active scope (but don't finish() the span on close - it's a - # clone/not the original and we don't want to close it prematurely) - # TODO: Change to our own ScopeManagers - parent_scope_clone = _GeventScope( - parent_scope.manager, parent_scope.span, finish_on_close=False - ) - tracer._scope_manager._set_greenlet_scope( - parent_scope_clone, new_greenlet - ) - - logger.debug(" -> Updating tracer to use gevent based context management") - tracer._scope_manager = GeventScopeManager() + parent_context = context.get_current() + new_context = contextvars.Context() + + new_context.run(lambda: context.attach(parent_context)) + new_greenlet.gr_context = new_context + gevent.Greenlet.add_spawn_callback(spawn_callback) except Exception: logger.debug("instrument_gevent: ", exc_info=True) @@ -43,11 +35,5 @@ def spawn_callback(new_greenlet): if "gevent" not in sys.modules: logger.debug("Instrumenting gevent: gevent not detected or loaded. Nothing done.") -elif not hasattr(sys.modules["gevent"], "version_info"): - logger.debug("gevent module has no 'version_info'. Skipping instrumentation.") -elif sys.modules["gevent"].version_info < (1, 4): - logger.debug( - "gevent < 1.4 detected. The Instana package supports gevent versions 1.4 and greater." - ) else: instrument_gevent() From f2a22cba43e31899e71174d3ed01988606385c47 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Thu, 28 Aug 2025 13:16:46 +0530 Subject: [PATCH 34/86] tests: Adapt GEvent tests to OTel spec Signed-off-by: Varsha GS --- tests/frameworks/test_gevent.py | 125 ++++++++++++++++---------------- tests/helpers.py | 2 +- 2 files changed, 62 insertions(+), 65 deletions(-) diff --git a/tests/frameworks/test_gevent.py b/tests/frameworks/test_gevent.py index 69a9a6c8..1dee37b2 100644 --- a/tests/frameworks/test_gevent.py +++ b/tests/frameworks/test_gevent.py @@ -2,36 +2,41 @@ # (c) Copyright Instana Inc. 2020 import os -import unittest +import pytest +import urllib3 import gevent from gevent.pool import Group -import urllib3 -from opentracing.scope_managers.gevent import GeventScopeManager +from typing import Generator import tests.apps.flask_app -from instana.span import SDKSpan from instana.singletons import tracer -from ..helpers import testenv, get_spans_by_filter +from tests.helpers import testenv, get_spans_by_filter, filter_test_span -@unittest.skipIf(not os.environ.get("GEVENT_STARLETTE_TEST"), reason="") -class TestGEvent(unittest.TestCase): - def setUp(self): - self.http = urllib3.HTTPConnectionPool('127.0.0.1', port=testenv["flask_port"], maxsize=20) - self.recorder = tracer.recorder - self.recorder.clear_spans() - tracer._scope_manager = GeventScopeManager() +# Skip the tests if the environment variable `GEVENT_STARLETTE_TEST` is not set +pytestmark = pytest.mark.skipif(not os.environ.get("GEVENT_STARLETTE_TEST"), reason="GEVENT_STARLETTE_TEST not set") + - def tearDown(self): - """ Do nothing for now """ - pass +class TestGEvent: + @classmethod + def setup_class(cls) -> None: + """Setup that runs once before all tests in the class""" + cls.http = urllib3.HTTPConnectionPool('127.0.0.1', port=testenv["flask_port"], maxsize=20) + cls.recorder = tracer.span_processor + + @pytest.fixture(autouse=True) + def setUp(self) -> Generator[None, None, None]: + """Clear all spans before each test run""" + self.recorder.clear_spans() def make_http_call(self, n=None): + """Helper function to make HTTP calls""" return self.http.request('GET', testenv["flask_server"] + '/') def spawn_calls(self): - with tracer.start_active_span('spawn_calls'): + """Helper function to spawn multiple HTTP calls""" + with tracer.start_as_current_span('spawn_calls'): jobs = [] jobs.append(gevent.spawn(self.make_http_call)) jobs.append(gevent.spawn(self.make_http_call)) @@ -39,86 +44,78 @@ def spawn_calls(self): gevent.joinall(jobs, timeout=2) def spawn_imap_unordered(self): + """Helper function to test imap_unordered""" igroup = Group() result = [] - with tracer.start_active_span('test'): + with tracer.start_as_current_span('test'): for i in igroup.imap_unordered(self.make_http_call, range(3)): result.append(i) def launch_gevent_chain(self): - with tracer.start_active_span('test'): + """Helper function to launch a chain of gevent calls""" + with tracer.start_as_current_span('test'): gevent.spawn(self.spawn_calls).join() def test_spawning(self): gevent.spawn(self.launch_gevent_chain) - gevent.sleep(2) - + spans = self.recorder.queued_spans() - - self.assertEqual(8, len(spans)) - - span_filter = lambda span: span.n == "sdk" \ - and span.data['sdk']['name'] == 'test' and span.p == None - test_spans = get_spans_by_filter(spans, span_filter) - self.assertIsNotNone(test_spans) - self.assertEqual(len(test_spans), 1) - + + assert len(spans) == 8 + + test_spans = get_spans_by_filter(spans, filter_test_span) + assert test_spans + assert len(test_spans) == 1 + test_span = test_spans[0] - self.assertTrue(type(test_spans[0]) is SDKSpan) - + span_filter = lambda span: span.n == "sdk" \ - and span.data['sdk']['name'] == 'spawn_calls' and span.p == test_span.s + and span.data['sdk']['name'] == 'spawn_calls' and span.p == test_span.s spawn_spans = get_spans_by_filter(spans, span_filter) - self.assertIsNotNone(spawn_spans) - self.assertEqual(len(spawn_spans), 1) - + assert spawn_spans + assert len(spawn_spans) == 1 + spawn_span = spawn_spans[0] - self.assertTrue(type(spawn_spans[0]) is SDKSpan) - + span_filter = lambda span: span.n == "urllib3" urllib3_spans = get_spans_by_filter(spans, span_filter) - + for urllib3_span in urllib3_spans: # spans should all have the same test span parent - self.assertEqual(urllib3_span.t, spawn_span.t) - self.assertEqual(urllib3_span.p, spawn_span.s) - + assert urllib3_span.t == spawn_span.t + assert urllib3_span.p == spawn_span.s + # find the wsgi span generated from this urllib3 request span_filter = lambda span: span.n == "wsgi" and span.p == urllib3_span.s wsgi_spans = get_spans_by_filter(spans, span_filter) - self.assertIsNotNone(wsgi_spans) - self.assertEqual(len(wsgi_spans), 1) + assert wsgi_spans is not None + assert len(wsgi_spans) == 1 def test_imap_unordered(self): - gevent.spawn(self.spawn_imap_unordered()) - + gevent.spawn(self.spawn_imap_unordered) gevent.sleep(2) - + spans = self.recorder.queued_spans() - self.assertEqual(7, len(spans)) - - span_filter = lambda span: span.n == "sdk" \ - and span.data['sdk']['name'] == 'test' and span.p == None - test_spans = get_spans_by_filter(spans, span_filter) - self.assertIsNotNone(test_spans) - self.assertEqual(len(test_spans), 1) - + assert len(spans) == 7 + + test_spans = get_spans_by_filter(spans, filter_test_span) + assert test_spans is not None + assert len(test_spans) == 1 + test_span = test_spans[0] - self.assertTrue(type(test_spans[0]) is SDKSpan) - + span_filter = lambda span: span.n == "urllib3" urllib3_spans = get_spans_by_filter(spans, span_filter) - self.assertEqual(len(urllib3_spans), 3) - + assert len(urllib3_spans) == 3 + for urllib3_span in urllib3_spans: # spans should all have the same test span parent - self.assertEqual(urllib3_span.t, test_span.t) - self.assertEqual(urllib3_span.p, test_span.s) - + assert urllib3_span.t == test_span.t + assert urllib3_span.p == test_span.s + # find the wsgi span generated from this urllib3 request span_filter = lambda span: span.n == "wsgi" and span.p == urllib3_span.s wsgi_spans = get_spans_by_filter(spans, span_filter) - self.assertIsNotNone(wsgi_spans) - self.assertEqual(len(wsgi_spans), 1) - + assert wsgi_spans is not None + assert len(wsgi_spans) == 1 diff --git a/tests/helpers.py b/tests/helpers.py index 850ba59b..f7c2efc4 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -108,7 +108,7 @@ def fail_with_message_and_span_dump(msg, spans): pytest.fail(msg + span_dump, True) -def is_test_span(span): +def filter_test_span(span): """ return the filter for test span """ From afa3f1d69adf62d39011c6529237b29bb5f90689 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Fri, 29 Aug 2025 12:14:27 +0530 Subject: [PATCH 35/86] ci: Run gevent tests after support Signed-off-by: Varsha GS --- .circleci/config.yml | 6 ++---- tests/conftest.py | 1 - tests/frameworks/test_sanic.py | 22 +++++++++++----------- tests/requirements-gevent-starlette.txt | 2 +- tests/requirements-pre314.txt | 2 +- tests/requirements.txt | 2 +- 6 files changed, 16 insertions(+), 19 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 57495e70..4f4403a8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -219,10 +219,8 @@ jobs: - pip-install-tests-deps: requirements: "tests/requirements-gevent-starlette.txt" - run-tests-with-coverage-report: - # TODO: uncomment once gevent instrumentation is done - # gevent: "true" - # tests: "tests/frameworks/test_gevent.py tests/frameworks/test_starlette.py" - tests: "tests/frameworks/test_starlette.py" + gevent: "true" + tests: "tests/frameworks/test_gevent.py tests/frameworks/test_starlette.py" - store-pytest-results - store-coverage-report diff --git a/tests/conftest.py b/tests/conftest.py index 93f89221..651c3995 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -26,7 +26,6 @@ from instana.util.runtime import is_ppc64, is_s390x collect_ignore_glob = [ - "*test_gevent*", "*collector/test_gcr*", "*agent/test_google*", ] diff --git a/tests/frameworks/test_sanic.py b/tests/frameworks/test_sanic.py index 31b98a49..7aa08e21 100644 --- a/tests/frameworks/test_sanic.py +++ b/tests/frameworks/test_sanic.py @@ -7,7 +7,7 @@ from instana.singletons import tracer, agent from instana.util.ids import hex_id -from tests.helpers import get_first_span_by_filter, get_first_span_by_name, is_test_span +from tests.helpers import get_first_span_by_filter, get_first_span_by_name, filter_test_span from tests.test_utils import _TraceContextMixin from tests.apps.sanic_app.server import app @@ -57,7 +57,7 @@ def test_basic_get(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 3 - test_span = get_first_span_by_filter(spans, is_test_span) + test_span = get_first_span_by_filter(spans, filter_test_span) assert test_span httpx_span = get_first_span_by_name(spans, "http") @@ -108,7 +108,7 @@ def test_404(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 3 - test_span = get_first_span_by_filter(spans, is_test_span) + test_span = get_first_span_by_filter(spans, filter_test_span) assert test_span httpx_span = get_first_span_by_name(spans, "http") @@ -159,7 +159,7 @@ def test_sanic_exception(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 4 - test_span = get_first_span_by_filter(spans, is_test_span) + test_span = get_first_span_by_filter(spans, filter_test_span) assert test_span httpx_span = get_first_span_by_name(spans, "http") @@ -210,7 +210,7 @@ def test_500_instana_exception(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 4 - test_span = get_first_span_by_filter(spans, is_test_span) + test_span = get_first_span_by_filter(spans, filter_test_span) assert test_span httpx_span = get_first_span_by_name(spans, "http") @@ -261,7 +261,7 @@ def test_500(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 4 - test_span = get_first_span_by_filter(spans, is_test_span) + test_span = get_first_span_by_filter(spans, filter_test_span) assert test_span httpx_span = get_first_span_by_name(spans, "http") @@ -312,7 +312,7 @@ def test_path_templates(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 3 - test_span = get_first_span_by_filter(spans, is_test_span) + test_span = get_first_span_by_filter(spans, filter_test_span) assert test_span httpx_span = get_first_span_by_name(spans, "http") @@ -363,7 +363,7 @@ def test_secret_scrubbing(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 3 - test_span = get_first_span_by_filter(spans, is_test_span) + test_span = get_first_span_by_filter(spans, filter_test_span) assert test_span httpx_span = get_first_span_by_name(spans, "http") @@ -417,7 +417,7 @@ def test_synthetic_request(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 3 - test_span = get_first_span_by_filter(spans, is_test_span) + test_span = get_first_span_by_filter(spans, filter_test_span) assert test_span httpx_span = get_first_span_by_name(spans, "http") @@ -476,7 +476,7 @@ def test_request_header_capture(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 3 - test_span = get_first_span_by_filter(spans, is_test_span) + test_span = get_first_span_by_filter(spans, filter_test_span) assert test_span httpx_span = get_first_span_by_name(spans, "http") @@ -523,7 +523,7 @@ def test_response_header_capture(self) -> None: spans = self.recorder.queued_spans() assert len(spans) == 3 - test_span = get_first_span_by_filter(spans, is_test_span) + test_span = get_first_span_by_filter(spans, filter_test_span) assert test_span httpx_span = get_first_span_by_name(spans, "http") diff --git a/tests/requirements-gevent-starlette.txt b/tests/requirements-gevent-starlette.txt index 86da4f49..17465bd6 100644 --- a/tests/requirements-gevent-starlette.txt +++ b/tests/requirements-gevent-starlette.txt @@ -1,6 +1,6 @@ -r requirements-minimal.txt flask>=0.12.2 -gevent>=1.4.0 +gevent>=23.9.0.post1 mock>=2.0.0 pyramid>=2.0.1 starlette>=0.12.13 diff --git a/tests/requirements-pre314.txt b/tests/requirements-pre314.txt index 0a025d53..2ad1e026 100644 --- a/tests/requirements-pre314.txt +++ b/tests/requirements-pre314.txt @@ -12,7 +12,7 @@ Django>=4.2.16 # fastapi>=0.115.0; python_version >= "3.13" flask>=2.3.2 # gevent is taking more than 20min to build on 3.14 -# gevent>=1.4.0 +# gevent>=23.9.0.post1 grpcio>=1.14.1 google-cloud-pubsub>=2.0.0 google-cloud-storage>=1.24.0 diff --git a/tests/requirements.txt b/tests/requirements.txt index 48afb6a9..b8a40793 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -10,7 +10,7 @@ Django>=4.2.16 fastapi>=0.92.0; python_version < "3.13" fastapi>=0.115.0; python_version >= "3.13" flask>=2.3.2 -gevent>=1.4.0 +gevent>=23.9.0.post1 grpcio>=1.14.1 google-cloud-pubsub>=2.0.0 google-cloud-storage>=1.24.0 From de1e93958d141a843959f31231e3748b08db6535 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Fri, 29 Aug 2025 12:35:52 +0530 Subject: [PATCH 36/86] fix: Corrected bug where .replace() was treated as an in-place method Signed-off-by: Varsha GS --- .circleci/config.yml | 2 +- src/instana/__init__.py | 2 +- tests/conftest.py | 3 ++- tests/frameworks/test_starlette.py | 1 - 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4f4403a8..b8e18097 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -220,7 +220,7 @@ jobs: requirements: "tests/requirements-gevent-starlette.txt" - run-tests-with-coverage-report: gevent: "true" - tests: "tests/frameworks/test_gevent.py tests/frameworks/test_starlette.py" + tests: "tests/frameworks/test_starlette.py tests/frameworks/test_gevent.py" - store-pytest-results - store-coverage-report diff --git a/src/instana/__init__.py b/src/instana/__init__.py index 16c7fcc6..f9511537 100644 --- a/src/instana/__init__.py +++ b/src/instana/__init__.py @@ -83,7 +83,7 @@ def key_to_bool(k: str) -> bool: import inspect all_accepted_patch_all_args = inspect.getfullargspec(monkey.patch_all)[0] - provided_options.replace(" ", "").replace("--", "").split(",") + provided_options = provided_options.replace(" ", "").replace("--", "").split(",") provided_options = [ k for k in provided_options if short_key(k) in all_accepted_patch_all_args diff --git a/tests/conftest.py b/tests/conftest.py index 651c3995..7c17023a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -28,6 +28,7 @@ collect_ignore_glob = [ "*collector/test_gcr*", "*agent/test_google*", + "*test_gevent_autotrace*" ] # ppc64le and s390x have limitations with some supported libraries. @@ -53,7 +54,7 @@ if not os.environ.get("GEVENT_STARLETTE_TEST"): collect_ignore_glob.extend( [ - "*test_gevent*", + "*test_gevent.py", "*test_starlette*", ] ) diff --git a/tests/frameworks/test_starlette.py b/tests/frameworks/test_starlette.py index e332e024..d44f39d8 100644 --- a/tests/frameworks/test_starlette.py +++ b/tests/frameworks/test_starlette.py @@ -29,7 +29,6 @@ def _resource(self) -> Generator[None, None, None]: # Clear all spans before a test run. self.recorder = tracer.span_processor self.recorder.clear_spans() - yield def test_vanilla_get(self) -> None: result = self.client.get("/") From a795f246264e0cc64834c57d91f885e7733dbcca Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Fri, 29 Aug 2025 12:50:11 +0530 Subject: [PATCH 37/86] ci: separate jobs for gevent and starlette Signed-off-by: Varsha GS --- .circleci/config.yml | 25 +++++++++++++++++++++---- tests/conftest.py | 1 - 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b8e18097..329755f5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -208,7 +208,22 @@ jobs: - store-pytest-results - store-coverage-report - py39gevent_starlette: + py39starlette: + docker: + - image: public.ecr.aws/docker/library/python:3.9 + working_directory: ~/repo + steps: + - checkout + - check-if-tests-needed + - pip-install-deps + - pip-install-tests-deps: + requirements: "tests/requirements-gevent-starlette.txt" + - run-tests-with-coverage-report: + tests: "tests/frameworks/test_starlette.py" + - store-pytest-results + - store-coverage-report + + py39gevent: docker: - image: public.ecr.aws/docker/library/python:3.9 working_directory: ~/repo @@ -220,7 +235,7 @@ jobs: requirements: "tests/requirements-gevent-starlette.txt" - run-tests-with-coverage-report: gevent: "true" - tests: "tests/frameworks/test_starlette.py tests/frameworks/test_gevent.py" + tests: "tests/frameworks/test_gevent.py" - store-pytest-results - store-coverage-report @@ -305,7 +320,8 @@ workflows: py-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] - python314 - py39cassandra - - py39gevent_starlette + - py39gevent + - py39starlette - py312aws - py312kafka - autowrapt: @@ -318,7 +334,8 @@ workflows: # Uncomment the following when giving real support to 3.14 # - python314 - py39cassandra - - py39gevent_starlette + - py39gevent + - py39starlette - py312aws - py312kafka - autowrapt diff --git a/tests/conftest.py b/tests/conftest.py index 7c17023a..9e2df527 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -55,7 +55,6 @@ collect_ignore_glob.extend( [ "*test_gevent.py", - "*test_starlette*", ] ) From e1c620b26e160406253fd1b0a400ddc6d64ad7a6 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Fri, 29 Aug 2025 13:17:55 +0530 Subject: [PATCH 38/86] tests: Modify gevent_autotrace Signed-off-by: Varsha GS --- tests/conftest.py | 3 +- tests/frameworks/test_gevent_autotrace.py | 45 ++++++++++++----------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9e2df527..142c1d5f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -28,7 +28,6 @@ collect_ignore_glob = [ "*collector/test_gcr*", "*agent/test_google*", - "*test_gevent_autotrace*" ] # ppc64le and s390x have limitations with some supported libraries. @@ -54,7 +53,7 @@ if not os.environ.get("GEVENT_STARLETTE_TEST"): collect_ignore_glob.extend( [ - "*test_gevent.py", + "*test_gevent*", ] ) diff --git a/tests/frameworks/test_gevent_autotrace.py b/tests/frameworks/test_gevent_autotrace.py index 41bf5f03..7a7a2b8b 100644 --- a/tests/frameworks/test_gevent_autotrace.py +++ b/tests/frameworks/test_gevent_autotrace.py @@ -3,27 +3,31 @@ import importlib import os -import unittest -import socket +import pytest import gevent from gevent import monkey from instana import apply_gevent_monkey_patch +# Teardown not working as expected, run each testcase separately +class TestGEventAutoTrace: -class TestGEventAutoTrace(unittest.TestCase): - def setUp(self): + @pytest.fixture(autouse=True) + def setup_environment(self): + """Setup test environment before each test""" # Ensure that the test suite is operational even when Django is installed # but not running or configured os.environ['DJANGO_SETTINGS_MODULE'] = '' - + self.default_patched_modules = ('socket', 'time', 'select', 'os', 'threading', 'ssl', 'subprocess', 'signal', 'queue',) - - def tearDown(self): + + yield + + # Teardown if os.environ.get('INSTANA_GEVENT_MONKEY_OPTIONS'): os.environ.pop('INSTANA_GEVENT_MONKEY_OPTIONS') - + # Clean up after gevent monkey patches, by restore from the saved dict for modname in monkey.saved.keys(): try: @@ -35,37 +39,34 @@ def tearDown(self): pass monkey.saved = {} - def test_default_patch_all(self): apply_gevent_monkey_patch() for module_name in self.default_patched_modules: - self.assertTrue(monkey.is_module_patched(module_name), - f"{module_name} is not patched") + assert monkey.is_module_patched(module_name), f"{module_name} is not patched" def test_instana_monkey_options_only_time(self): os.environ['INSTANA_GEVENT_MONKEY_OPTIONS'] = ( 'time,no-socket,no-select,no-os,no-select,no-threading,no-os,' 'no-ssl,no-subprocess,''no-signal,no-queue') apply_gevent_monkey_patch() - - self.assertTrue(monkey.is_module_patched('time'), "time module is not patched") + + assert monkey.is_module_patched('time'), "time module is not patched" not_patched_modules = (m for m in self.default_patched_modules if m not in ('time', 'threading')) - + for module_name in not_patched_modules: - self.assertFalse(monkey.is_module_patched(module_name), - f"{module_name} is patched, when it shouldn't be") - + assert not monkey.is_module_patched(module_name), \ + f"{module_name} is patched, when it shouldn't be" def test_instana_monkey_options_only_socket(self): os.environ['INSTANA_GEVENT_MONKEY_OPTIONS'] = ( '--socket, --no-time, --no-select, --no-os, --no-queue, --no-threading,' '--no-os, --no-ssl, no-subprocess, --no-signal, --no-select,') apply_gevent_monkey_patch() - - self.assertTrue(monkey.is_module_patched('socket'), "socket module is not patched") + + assert monkey.is_module_patched('socket'), "socket module is not patched" not_patched_modules = (m for m in self.default_patched_modules if m not in ('socket', 'threading')) - + for module_name in not_patched_modules: - self.assertFalse(monkey.is_module_patched(module_name), - f"{module_name} is patched, when it shouldn't be") + assert not monkey.is_module_patched(module_name), \ + f"{module_name} is patched, when it shouldn't be" From 580a4169ddc834d03d1f17d84a640a90b3b97890 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Fri, 29 Aug 2025 13:57:35 +0530 Subject: [PATCH 39/86] chore: rename `GEVENT_STARLETTE_TEST` - remove `py39starlette` job - remove `gevent` from `tests/requirements` file Signed-off-by: Varsha GS --- .circleci/config.yml | 19 +------------------ tests/__init__.py | 2 +- tests/apps/aiohttp_app/__init__.py | 2 +- tests/apps/aiohttp_app2/__init__.py | 2 +- tests/apps/grpc_server/__init__.py | 2 +- tests/apps/tornado_server/__init__.py | 2 +- tests/conftest.py | 2 +- tests/frameworks/test_gevent.py | 4 ++-- tests/requirements.txt | 1 - 9 files changed, 9 insertions(+), 27 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 329755f5..e6711151 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -66,7 +66,7 @@ commands: name: Run Tests With Coverage Report environment: CASSANDRA_TEST: "<>" - GEVENT_STARLETTE_TEST: "<>" + GEVENT_TEST: "<>" KAFKA_TEST: "<>" command: | . venv/bin/activate @@ -208,21 +208,6 @@ jobs: - store-pytest-results - store-coverage-report - py39starlette: - docker: - - image: public.ecr.aws/docker/library/python:3.9 - working_directory: ~/repo - steps: - - checkout - - check-if-tests-needed - - pip-install-deps - - pip-install-tests-deps: - requirements: "tests/requirements-gevent-starlette.txt" - - run-tests-with-coverage-report: - tests: "tests/frameworks/test_starlette.py" - - store-pytest-results - - store-coverage-report - py39gevent: docker: - image: public.ecr.aws/docker/library/python:3.9 @@ -321,7 +306,6 @@ workflows: - python314 - py39cassandra - py39gevent - - py39starlette - py312aws - py312kafka - autowrapt: @@ -335,7 +319,6 @@ workflows: # - python314 - py39cassandra - py39gevent - - py39starlette - py312aws - py312kafka - autowrapt diff --git a/tests/__init__.py b/tests/__init__.py index 39799ddb..a38754a7 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -3,7 +3,7 @@ import os -if os.environ.get('GEVENT_STARLETTE_TEST'): +if os.environ.get('GEVENT_TEST'): from gevent import monkey monkey.patch_all() diff --git a/tests/apps/aiohttp_app/__init__.py b/tests/apps/aiohttp_app/__init__.py index 7429a949..b9cf68a2 100644 --- a/tests/apps/aiohttp_app/__init__.py +++ b/tests/apps/aiohttp_app/__init__.py @@ -8,7 +8,7 @@ APP_THREAD = None -if not any((os.environ.get('GEVENT_STARLETTE_TEST'), +if not any((os.environ.get('GEVENT_TEST'), os.environ.get('CASSANDRA_TEST'), sys.version_info < (3, 5, 3))): APP_THREAD = launch_background_thread(server, "AIOHTTP") diff --git a/tests/apps/aiohttp_app2/__init__.py b/tests/apps/aiohttp_app2/__init__.py index e382343a..96ce3f82 100644 --- a/tests/apps/aiohttp_app2/__init__.py +++ b/tests/apps/aiohttp_app2/__init__.py @@ -7,7 +7,7 @@ APP_THREAD = None -if not any((os.environ.get('GEVENT_STARLETTE_TEST'), +if not any((os.environ.get('GEVENT_TEST'), os.environ.get('CASSANDRA_TEST'), sys.version_info < (3, 5, 3))): APP_THREAD = launch_background_thread(server, "AIOHTTP") diff --git a/tests/apps/grpc_server/__init__.py b/tests/apps/grpc_server/__init__.py index 5a222deb..78439e5e 100644 --- a/tests/apps/grpc_server/__init__.py +++ b/tests/apps/grpc_server/__init__.py @@ -6,7 +6,7 @@ import time import threading -if not any((os.environ.get('GEVENT_STARLETTE_TEST'), +if not any((os.environ.get('GEVENT_TEST'), os.environ.get('CASSANDRA_TEST'), sys.version_info < (3, 5, 3))): # Background RPC application diff --git a/tests/apps/tornado_server/__init__.py b/tests/apps/tornado_server/__init__.py index 20a27361..7b0d6c76 100644 --- a/tests/apps/tornado_server/__init__.py +++ b/tests/apps/tornado_server/__init__.py @@ -8,7 +8,7 @@ app_thread = None -if not any((app_thread, os.environ.get('GEVENT_STARLETTE_TEST'), os.environ.get('CASSANDRA_TEST'))): +if not any((app_thread, os.environ.get('GEVENT_TEST'), os.environ.get('CASSANDRA_TEST'))): testenv["tornado_port"] = 10813 testenv["tornado_server"] = ("http://127.0.0.1/" + str(testenv["tornado_port"])) diff --git a/tests/conftest.py b/tests/conftest.py index 142c1d5f..44088c85 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -50,7 +50,7 @@ if not os.environ.get("COUCHBASE_TEST"): collect_ignore_glob.append("*test_couchbase*") -if not os.environ.get("GEVENT_STARLETTE_TEST"): +if not os.environ.get("GEVENT_TEST"): collect_ignore_glob.extend( [ "*test_gevent*", diff --git a/tests/frameworks/test_gevent.py b/tests/frameworks/test_gevent.py index 1dee37b2..31847024 100644 --- a/tests/frameworks/test_gevent.py +++ b/tests/frameworks/test_gevent.py @@ -14,8 +14,8 @@ from tests.helpers import testenv, get_spans_by_filter, filter_test_span -# Skip the tests if the environment variable `GEVENT_STARLETTE_TEST` is not set -pytestmark = pytest.mark.skipif(not os.environ.get("GEVENT_STARLETTE_TEST"), reason="GEVENT_STARLETTE_TEST not set") +# Skip the tests if the environment variable `GEVENT_TEST` is not set +pytestmark = pytest.mark.skipif(not os.environ.get("GEVENT_TEST"), reason="GEVENT_TEST not set") class TestGEvent: diff --git a/tests/requirements.txt b/tests/requirements.txt index b8a40793..6e8fc6ca 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -10,7 +10,6 @@ Django>=4.2.16 fastapi>=0.92.0; python_version < "3.13" fastapi>=0.115.0; python_version >= "3.13" flask>=2.3.2 -gevent>=23.9.0.post1 grpcio>=1.14.1 google-cloud-pubsub>=2.0.0 google-cloud-storage>=1.24.0 From 3c6edd1c0b323af11f7892a86112c45359740f12 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Fri, 29 Aug 2025 14:50:05 +0530 Subject: [PATCH 40/86] chore(version): Bump version to 3.8.0 Signed-off-by: Varsha GS --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index 62b8e993..1354ffa7 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.7.2" +VERSION = "3.8.0" From d53fdd8dda397fd6cf82d7361b00a5575a00e1b1 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 29 Aug 2025 14:27:17 +0200 Subject: [PATCH 41/86] Revert "feat(fsm): add support to announce Windows processes." This reverts commit 1da36f4201ee93080bf0443dd754c1a76161100f. Signed-off-by: Paulo Vital --- src/instana/fsm.py | 141 ++++++++++++++------------------------------- 1 file changed, 44 insertions(+), 97 deletions(-) diff --git a/src/instana/fsm.py b/src/instana/fsm.py index be355b1a..7355a0ab 100644 --- a/src/instana/fsm.py +++ b/src/instana/fsm.py @@ -8,14 +8,13 @@ import subprocess import sys import threading -from typing import TYPE_CHECKING, Any, Callable, List +from typing import TYPE_CHECKING, Any, Callable from fysom import Fysom from instana.log import logger from instana.util import get_default_gateway from instana.util.process_discovery import Discovery -from instana.util.runtime import is_windows from instana.version import VERSION if TYPE_CHECKING: @@ -104,16 +103,48 @@ def lookup_agent_host(self, e: Any) -> bool: return False def announce_sensor(self, e: Any) -> bool: - pid: int = os.getpid() logger.debug( - f"Attempting to announce PID {pid} to the agent on {self.agent.options.agent_host}:{self.agent.options.agent_port}" + f"Attempting to make an announcement to the agent on {self.agent.options.agent_host}:{self.agent.options.agent_port}" ) + pid = os.getpid() - cmdline = self._get_cmdline(pid) + try: + if os.path.isfile("/proc/self/cmdline"): + with open("/proc/self/cmdline") as cmd: + cmdinfo = cmd.read() + cmdline = cmdinfo.split("\x00") + else: + # Python doesn't provide a reliable method to determine what + # the OS process command line may be. Here we are forced to + # rely on ps rather than adding a dependency on something like + # psutil which requires dev packages, gcc etc... + proc = subprocess.Popen( + ["ps", "-p", str(pid), "-o", "command"], stdout=subprocess.PIPE + ) + (out, _) = proc.communicate() + parts = out.split(b"\n") + cmdline = [parts[1].decode("utf-8")] + except Exception: + cmdline = sys.argv + logger.debug("announce_sensor", exc_info=True) d = Discovery(pid=self.__get_real_pid(), name=cmdline[0], args=cmdline[1:]) - self._setup_socket_connection(d, pid) + # If we're on a system with a procfs + if os.path.exists("/proc/"): + try: + # In CentOS 7, some odd things can happen such as: + # PermissionError: [Errno 13] Permission denied: '/proc/6/fd/8' + # Use a try/except as a safety + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect( + (self.agent.options.agent_host, self.agent.options.agent_port) + ) + path = f"/proc/{pid}/fd/{sock.fileno()}" + d.fd = sock.fileno() + d.inode = os.readlink(path) + except: # noqa: E722 + logger.debug("Error generating file descriptor: ", exc_info=True) payload = self.agent.announce(d) @@ -158,112 +189,28 @@ def on_good2go(self, _: Any) -> None: def __get_real_pid(self) -> int: """ Attempts to determine the true process ID by querying the - /proc//sched file on Linux systems or using the OS default PID. - For Windows, we use the standard OS PID as there's no equivalent concept - of container PIDs vs host PIDs. + /proc//sched file. This works on systems with a proc filesystem. + Otherwise default to os default. """ pid = None - # For Linux systems with procfs if os.path.exists("/proc/"): sched_file = f"/proc/{os.getpid()}/sched" if os.path.isfile(sched_file): try: - with open(sched_file) as file: - line = file.readline() - g = re.search(r"\((\d+),", line) - if g and len(g.groups()) == 1: - pid = int(g.groups()[0]) + file = open(sched_file) + line = file.readline() + g = re.search(r"\((\d+),", line) + if g and len(g.groups()) == 1: + pid = int(g.groups()[0]) except Exception: logger.debug("parsing sched file failed", exc_info=True) - # For Windows or if Linux method failed if pid is None: pid = os.getpid() return pid - def _get_cmdline_windows(self) -> List[str]: - """ - Get command line using Windows API - """ - import ctypes - from ctypes import wintypes - - GetCommandLineW = ctypes.windll.kernel32.GetCommandLineW - GetCommandLineW.argtypes = [] - GetCommandLineW.restype = wintypes.LPCWSTR - - cmd = GetCommandLineW() - # Simple parsing - this is a basic approach and might need refinement - # for complex command lines with quotes and spaces - return cmd.split() - - def _get_cmdline_linux_proc(self) -> List[str]: - """ - Get command line from Linux /proc filesystem - """ - with open("/proc/self/cmdline") as cmd: - cmdinfo = cmd.read() - return cmdinfo.split("\x00") - - def _get_cmdline_unix_ps(self, pid: int) -> List[str]: - """ - Get command line using ps command (for Unix-like systems without /proc) - """ - proc = subprocess.Popen( - ["ps", "-p", str(pid), "-o", "command"], stdout=subprocess.PIPE - ) - (out, _) = proc.communicate() - parts = out.split(b"\n") - return [parts[1].decode("utf-8")] - - def _get_cmdline_unix(self, pid: int) -> List[str]: - """ - Get command line using Unix - """ - if os.path.isfile("/proc/self/cmdline"): - return self._get_cmdline_linux_proc() - else: - return self._get_cmdline_unix_ps(pid) - - def _get_cmdline(self, pid: int) -> List[str]: - """ - Get command line in a platform-independent way - """ - try: - if is_windows(): - return self._get_cmdline_windows() - else: - return self._get_cmdline_unix(pid) - except Exception: - logger.debug("Error getting command line", exc_info=True) - return sys.argv - - def _setup_socket_connection(self, discovery: Discovery, pid: int) -> None: - """ - Set up socket connection and populate discovery object with socket details - """ - try: - # In CentOS 7, some odd things can happen such as: - # PermissionError: [Errno 13] Permission denied: '/proc/6/fd/8' - # Use a try/except as a safety - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.connect((self.agent.options.agent_host, self.agent.options.agent_port)) - discovery.fd = sock.fileno() - - # If we're on a system with a procfs (Linux) - if os.path.exists("/proc/"): - try: - path = "/proc/%d/fd/%d" % (pid, sock.fileno()) - discovery.inode = os.readlink(path) - except Exception: - logger.debug( - "Error generating file descriptor inode: ", exc_info=True - ) - except Exception: - logger.debug("Error creating socket connection: ", exc_info=True) - # Made with Bob From 44d0decc40990940e86008ecbc7c8b31e64b8f4b Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 29 Aug 2025 14:40:33 +0200 Subject: [PATCH 42/86] chore(version): Bump version to 3.8.1 Signed-off-by: Paulo Vital --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index 1354ffa7..975d3186 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.8.0" +VERSION = "3.8.1" From 2d775b763b99d93bb012c36f364a3120d34fd9e2 Mon Sep 17 00:00:00 2001 From: Arjun Rajappa Date: Thu, 21 Aug 2025 18:31:34 +0530 Subject: [PATCH 43/86] ci: enable sonar scanning and report results to sonacloud.io Signed-off-by: Arjun Rajappa --- .circleci/config.yml | 39 +++++++++++++++++++++++---------------- sonar-project.properties | 6 +++--- 2 files changed, 26 insertions(+), 19 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e6711151..e3e63306 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -90,29 +90,36 @@ commands: steps: - attach_workspace: at: . - - run: - name: Install Java - command: | - sudo apt-get update - sudo apt-get install openjdk-11-jdk - run: name: Run SonarQube to report the coverage command: | . venv/bin/activate coverage combine ./coverage_results coverage xml -i - wget -O /tmp/sonar-scanner-cli.zip https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-4.8.1.3023.zip - unzip -d /tmp /tmp/sonar-scanner-cli.zip - if [[ -n "${CIRCLE_PR_NUMBER}" ]]; then - /tmp/sonar-scanner-4.8.1.3023/bin/sonar-scanner \ - -Dsonar.host.url=${SONARQUBE_URL} \ - -Dsonar.login="${SONARQUBE_LOGIN}" \ - -Dsonar.pullrequest.key="${CIRCLE_PR_NUMBER}" \ + + PR_NUMBER=$(echo ${CIRCLE_PULL_REQUEST} | sed 's/.*\///') + SONAR_SCANNER_VERSION=7.2.0.5079 + export SONAR_SCANNER_HOME=$HOME/.sonar/sonar-scanner-$SONAR_SCANNER_VERSION-linux-x64 + SONAR_TOKEN=${SONAR_TOKEN} + + curl --create-dirs -sSLo $HOME/.sonar/sonar-scanner.zip https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-$SONAR_SCANNER_VERSION-linux-x64.zip + unzip -o $HOME/.sonar/sonar-scanner.zip -d $HOME/.sonar/ + export PATH=$SONAR_SCANNER_HOME/bin:$PATH + export SONAR_SCANNER_OPTS="-server" + if [[ -n "${PR_NUMBER}" ]]; then + sonar-scanner \ + -Dsonar.organization=instana \ + -Dsonar.projectKey=instana_python-sensor \ + -Dsonar.sources=. \ + -Dsonar.host.url="${SONARQUBE_URL}" \ + -Dsonar.pullrequest.key="${PR_NUMBER}" \ -Dsonar.pullrequest.branch="${CIRCLE_BRANCH}" else - /tmp/sonar-scanner-4.8.1.3023/bin/sonar-scanner \ - -Dsonar.host.url=${SONARQUBE_URL} \ - -Dsonar.login="${SONARQUBE_LOGIN}" \ + sonar-scanner \ + -Dsonar.organization=instana \ + -Dsonar.projectKey=instana_python-sensor \ + -Dsonar.sources=. \ + -Dsonar.host.url="${SONARQUBE_URL}" \ -Dsonar.branch.name="${CIRCLE_BRANCH}" fi - store_artifacts: @@ -294,7 +301,7 @@ jobs: - pip-install-deps - pip-install-tests-deps - store-pytest-results - # - run_sonarqube + - run_sonarqube workflows: tests: diff --git a/sonar-project.properties b/sonar-project.properties index 56b3d211..fcb6f56d 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -1,5 +1,6 @@ -sonar.projectKey=Python-Tracer -sonar.projectName=Python Tracer +sonar.projectKey=instana_python-sensor +sonar.organization=instana +sonar.projectName=python-sensor sonar.sourceEncoding=utf-8 sonar.sources=src/instana/ sonar.tests=tests/ @@ -8,4 +9,3 @@ sonar.python.version=3 sonar.links.homepage=https://github.com/instana/python-sensor/ sonar.links.ci=https://circleci.com/gh/instana/python-sensor sonar.links.issue=https://github.com/instana/python-sensor/issues -sonar.links.scm=https://github.com/instana/python-sensor/ From d0a093d3b4ec618b630fab07f1f2671aae5f2ec4 Mon Sep 17 00:00:00 2001 From: Arjun Rajappa Date: Thu, 21 Aug 2025 18:42:08 +0530 Subject: [PATCH 44/86] ci: update test inclusions in sonar scan Signed-off-by: Arjun Rajappa --- sonar-project.properties | 1 + 1 file changed, 1 insertion(+) diff --git a/sonar-project.properties b/sonar-project.properties index fcb6f56d..217abbee 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -4,6 +4,7 @@ sonar.projectName=python-sensor sonar.sourceEncoding=utf-8 sonar.sources=src/instana/ sonar.tests=tests/ +sonar.test.inclusions=test/**/* sonar.python.coverage.reportPaths=coverage.xml sonar.python.version=3 sonar.links.homepage=https://github.com/instana/python-sensor/ From 092998f7d13f65bef0237cf0255779de43f38dd4 Mon Sep 17 00:00:00 2001 From: Arjun Rajappa Date: Fri, 22 Aug 2025 12:53:37 +0530 Subject: [PATCH 45/86] ci: update pyhton dep Installation Signed-off-by: Arjun Rajappa --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e3e63306..6f85749c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,9 +93,9 @@ commands: - run: name: Run SonarQube to report the coverage command: | + python -m venv venv . venv/bin/activate - coverage combine ./coverage_results - coverage xml -i + pip install --upgrade pip coverage PR_NUMBER=$(echo ${CIRCLE_PULL_REQUEST} | sed 's/.*\///') SONAR_SCANNER_VERSION=7.2.0.5079 From 0f4fc1a25efba14f3d9e3f830e7b334e2d330d04 Mon Sep 17 00:00:00 2001 From: Arjun Rajappa Date: Fri, 22 Aug 2025 13:09:08 +0530 Subject: [PATCH 46/86] ci: use python 3.13 for final_job Signed-off-by: Arjun Rajappa --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6f85749c..a64b36b6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -293,7 +293,7 @@ jobs: final_job: docker: - - image: public.ecr.aws/docker/library/python:3.9 + - image: public.ecr.aws/docker/library/python:3.13 working_directory: ~/repo steps: - checkout From 569cfc9906e262664017b5c252b2d746e932d4cb Mon Sep 17 00:00:00 2001 From: Arjun Rajappa Date: Mon, 25 Aug 2025 09:44:13 +0530 Subject: [PATCH 47/86] ci: use pysonar scanner a python library to scan the repo Signed-off-by: Arjun Rajappa --- .circleci/config.yml | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a64b36b6..067896d4 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -96,18 +96,17 @@ commands: python -m venv venv . venv/bin/activate pip install --upgrade pip coverage + coverage combine ./coverage_results + coverage xml -i PR_NUMBER=$(echo ${CIRCLE_PULL_REQUEST} | sed 's/.*\///') - SONAR_SCANNER_VERSION=7.2.0.5079 - export SONAR_SCANNER_HOME=$HOME/.sonar/sonar-scanner-$SONAR_SCANNER_VERSION-linux-x64 SONAR_TOKEN=${SONAR_TOKEN} - curl --create-dirs -sSLo $HOME/.sonar/sonar-scanner.zip https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-$SONAR_SCANNER_VERSION-linux-x64.zip - unzip -o $HOME/.sonar/sonar-scanner.zip -d $HOME/.sonar/ - export PATH=$SONAR_SCANNER_HOME/bin:$PATH + pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ pysonar-scanner export SONAR_SCANNER_OPTS="-server" + if [[ -n "${PR_NUMBER}" ]]; then - sonar-scanner \ + pysonar-scanner \ -Dsonar.organization=instana \ -Dsonar.projectKey=instana_python-sensor \ -Dsonar.sources=. \ @@ -115,7 +114,7 @@ commands: -Dsonar.pullrequest.key="${PR_NUMBER}" \ -Dsonar.pullrequest.branch="${CIRCLE_BRANCH}" else - sonar-scanner \ + pysonar-scanner \ -Dsonar.organization=instana \ -Dsonar.projectKey=instana_python-sensor \ -Dsonar.sources=. \ From 9eea82b02a440d8b0c9773a1d583761af5bf9cfc Mon Sep 17 00:00:00 2001 From: Arjun Rajappa Date: Mon, 25 Aug 2025 10:21:37 +0530 Subject: [PATCH 48/86] ci: read sonar.sources from properties file Signed-off-by: Arjun Rajappa --- .circleci/config.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 067896d4..06574fb5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,9 +93,7 @@ commands: - run: name: Run SonarQube to report the coverage command: | - python -m venv venv . venv/bin/activate - pip install --upgrade pip coverage coverage combine ./coverage_results coverage xml -i @@ -109,7 +107,6 @@ commands: pysonar-scanner \ -Dsonar.organization=instana \ -Dsonar.projectKey=instana_python-sensor \ - -Dsonar.sources=. \ -Dsonar.host.url="${SONARQUBE_URL}" \ -Dsonar.pullrequest.key="${PR_NUMBER}" \ -Dsonar.pullrequest.branch="${CIRCLE_BRANCH}" @@ -117,7 +114,6 @@ commands: pysonar-scanner \ -Dsonar.organization=instana \ -Dsonar.projectKey=instana_python-sensor \ - -Dsonar.sources=. \ -Dsonar.host.url="${SONARQUBE_URL}" \ -Dsonar.branch.name="${CIRCLE_BRANCH}" fi From ad0fb4b5d9adaf273a2a9f719bf916b2ccc3e5eb Mon Sep 17 00:00:00 2001 From: Arjun Rajappa Date: Mon, 25 Aug 2025 14:25:51 +0530 Subject: [PATCH 49/86] ci: remove unwanted steps from final job Signed-off-by: Arjun Rajappa --- .circleci/config.yml | 6 +++--- sonar-project.properties | 2 -- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 06574fb5..5fef25d9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,7 +93,10 @@ commands: - run: name: Run SonarQube to report the coverage command: | + python -m venv venv . venv/bin/activate + + pip install --upgrade pip coverage coverage combine ./coverage_results coverage xml -i @@ -293,9 +296,6 @@ jobs: steps: - checkout - check-if-tests-needed - - pip-install-deps - - pip-install-tests-deps - - store-pytest-results - run_sonarqube workflows: diff --git a/sonar-project.properties b/sonar-project.properties index 217abbee..b373d6be 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -3,8 +3,6 @@ sonar.organization=instana sonar.projectName=python-sensor sonar.sourceEncoding=utf-8 sonar.sources=src/instana/ -sonar.tests=tests/ -sonar.test.inclusions=test/**/* sonar.python.coverage.reportPaths=coverage.xml sonar.python.version=3 sonar.links.homepage=https://github.com/instana/python-sensor/ From b48f5fd10d6e4f822dc864120f4573843f52dde1 Mon Sep 17 00:00:00 2001 From: Tobias Michels <66688058+tobmi1@users.noreply.github.com> Date: Tue, 2 Sep 2025 20:36:10 +0200 Subject: [PATCH 50/86] Add test to check if errors are recorded in aio-pika consumer Signed-off-by: Tobias Michels <66688058+tobmi1@users.noreply.github.com> --- tests/clients/test_aio_pika.py | 58 ++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/tests/clients/test_aio_pika.py b/tests/clients/test_aio_pika.py index 75c1afff..4071e568 100644 --- a/tests/clients/test_aio_pika.py +++ b/tests/clients/test_aio_pika.py @@ -86,6 +86,22 @@ async def consume_message(self, connect_method) -> None: if queue.name in message.body.decode(): break + async def consume_with_exception(self, connect_method) -> None: + connection = await connect_method() + + async def on_message(msg): + raise RuntimeError("Simulated Exception") + + async with connection: + # Creating channel + channel = await connection.channel() + + # Declaring queue + queue = await channel.declare_queue(self.queue_name) + + await queue.consume(on_message) + await asyncio.sleep(1) # Wait to ensure the message is processed + @pytest.mark.parametrize( "params_combination", ["both_args", "both_kwargs", "arg_kwarg"], @@ -184,3 +200,45 @@ def assert_span_info(rabbitmq_span: "ReadableSpan", sort: str) -> None: assert_span_info(rabbitmq_publisher_span, "publish") assert_span_info(rabbitmq_consumer_span, "consume") + + @pytest.mark.parametrize( + "connect_method", + [connect, connect_robust], + ) + def test_consume_with_exception(self, connect_method) -> None: + with tracer.start_as_current_span("test"): + self.loop.run_until_complete(self.publish_message()) + self.loop.run_until_complete(self.consume_with_exception(connect_method)) + + spans = self.recorder.queued_spans() + assert len(spans) == 3 + + rabbitmq_publisher_span = spans[0] + rabbitmq_consumer_span = spans[1] + test_span = spans[2] + + # Same traceId + assert test_span.t == rabbitmq_publisher_span.t + assert rabbitmq_publisher_span.t == rabbitmq_consumer_span.t + + # Parent relationships + assert rabbitmq_publisher_span.p == test_span.s + assert rabbitmq_consumer_span.p == rabbitmq_publisher_span.s + + # Error logging + assert not rabbitmq_publisher_span.ec + assert rabbitmq_consumer_span.ec == 1 + assert not test_span.ec + + # Span attributes + def assert_span_info(rabbitmq_span: "ReadableSpan", sort: str) -> None: + assert rabbitmq_span.data["rabbitmq"]["exchange"] == "test.exchange" + assert rabbitmq_span.data["rabbitmq"]["sort"] == sort + assert rabbitmq_span.data["rabbitmq"]["address"] + assert rabbitmq_span.data["rabbitmq"]["key"] == "test.queue" + assert rabbitmq_span.stack + assert isinstance(rabbitmq_span.stack, list) + assert len(rabbitmq_span.stack) > 0 + + assert_span_info(rabbitmq_publisher_span, "publish") + assert_span_info(rabbitmq_consumer_span, "consume") From d412bd78eed7c6812fdb7ef1c0ce4a6ca9a248d6 Mon Sep 17 00:00:00 2001 From: Tobias Michels <66688058+tobmi1@users.noreply.github.com> Date: Tue, 2 Sep 2025 20:37:29 +0200 Subject: [PATCH 51/86] Fix aio-pika instrumentation bug causing the consumer callback to not be included in the trace Signed-off-by: Tobias Michels <66688058+tobmi1@users.noreply.github.com> --- src/instana/instrumentation/aio_pika.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/instana/instrumentation/aio_pika.py b/src/instana/instrumentation/aio_pika.py index 5e3f58d0..ef16dfa9 100644 --- a/src/instana/instrumentation/aio_pika.py +++ b/src/instana/instrumentation/aio_pika.py @@ -100,12 +100,12 @@ async def callback_wrapper( _extract_span_attributes( span, connection, "consume", message.routing_key, message.exchange ) - try: - response = await wrapped(*args, **kwargs) - except Exception as exc: - span.record_exception(exc) - else: - return response + try: + response = await wrapped(*args, **kwargs) + except Exception as exc: + span.record_exception(exc) + else: + return response wrapped_callback = callback_wrapper(callback) if kwargs.get("callback"): From c245b1c5362f74babed8d13b7a1efd2c7271fcb8 Mon Sep 17 00:00:00 2001 From: Cagri Yonca Date: Mon, 8 Sep 2025 16:52:14 +0200 Subject: [PATCH 52/86] fix: fixed reading suppression header from message's headers Signed-off-by: Cagri Yonca --- src/instana/instrumentation/kafka/kafka_python.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/instana/instrumentation/kafka/kafka_python.py b/src/instana/instrumentation/kafka/kafka_python.py index c11e9355..3b1423d3 100644 --- a/src/instana/instrumentation/kafka/kafka_python.py +++ b/src/instana/instrumentation/kafka/kafka_python.py @@ -51,6 +51,9 @@ def trace_kafka_send( # context propagation headers = kwargs.get("headers", []) + if not is_suppressed and ("x_instana_l_s", b"0") in headers: + is_suppressed = True + suppression_header = {"x_instana_l_s": "0" if is_suppressed else "1"} headers.append(suppression_header) @@ -96,10 +99,8 @@ def create_span( ) if not is_suppressed and headers: - for header_name, header_value in headers: - if header_name == "x_instana_l_s" and header_value == b"0": - is_suppressed = True - break + if ("x_instana_l_s", b"0") in headers: + is_suppressed = True if is_suppressed: return From 433d94c0310d73c58d2d40db96e8319be001cdc0 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Tue, 9 Sep 2025 16:52:07 +0530 Subject: [PATCH 53/86] fix: `Immutable type, ignoring call to set attribute` on span_context Signed-off-by: Varsha GS --- src/instana/propagators/http_propagator.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/src/instana/propagators/http_propagator.py b/src/instana/propagators/http_propagator.py index 76ca3114..c6491076 100644 --- a/src/instana/propagators/http_propagator.py +++ b/src/instana/propagators/http_propagator.py @@ -5,6 +5,7 @@ from instana.log import logger from instana.propagators.base_propagator import BasePropagator from instana.util.ids import define_server_timing, hex_id_limited +from instana.span_context import SpanContext from opentelemetry.trace.span import format_span_id @@ -27,7 +28,26 @@ def inject(self, span_context, carrier, disable_w3c_trace_context=False): # Suppression `level` made in the child context or in the parent context # has priority over any non-suppressed `level` setting child_level = int(self.extract_instana_headers(dictionary_carrier)[2] or "1") - span_context.level = min(child_level, span_context.level) + new_level = min(child_level, span_context.level) + + if new_level != span_context.level: + # Create a new span context with the updated level + span_context = SpanContext( + trace_id=span_context.trace_id, + span_id=span_context.span_id, + is_remote=span_context.is_remote, + trace_flags=span_context.trace_flags, + trace_state=span_context.trace_state, + level=new_level, + synthetic=span_context.synthetic, + trace_parent=span_context.trace_parent, + instana_ancestor=span_context.instana_ancestor, + long_trace_id=span_context.long_trace_id, + correlation_type=span_context.correlation_type, + correlation_id=span_context.correlation_id, + traceparent=span_context.traceparent, + tracestate=span_context.tracestate + ) serializable_level = str(span_context.level) From 1c6f5ecd5cd5c20e8c5a19e90667af7205408d81 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Wed, 10 Sep 2025 16:42:20 +0530 Subject: [PATCH 54/86] tests: Add testcase to verify suppression Signed-off-by: Varsha GS --- tests/propagators/test_http_propagator.py | 42 +++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/tests/propagators/test_http_propagator.py b/tests/propagators/test_http_propagator.py index 25b36635..bac0a173 100644 --- a/tests/propagators/test_http_propagator.py +++ b/tests/propagators/test_http_propagator.py @@ -340,3 +340,45 @@ def test_w3c_off_x_instana_l_0( if "tracestate" in carrier_header.keys(): assert "tracestate" in downstream_carrier assert carrier_header["tracestate"] == downstream_carrier["tracestate"] + + def test_suppression_when_child_level_is_lower( + self, + _trace_id: int, + _span_id: int, + ) -> None: + """ + Test that span_context.level is updated when the child level (extracted from carrier) is lower than the current span_context.level. + """ + # Create a span context with level=1 + original_span_context = SpanContext( + trace_id=_trace_id, + span_id=_span_id, + is_remote=False, + level=1, + ) + + # Create a carrier with level=0 (suppression) + carrier_header = {"x-instana-l": "0"} + + # Inject the span context into the carrier + self.hptc.inject(original_span_context, carrier_header) + + # Extract the span context from the carrier to verify the level was updated + extracted_context = self.hptc.extract(carrier_header) + + # Verify that the level is 0 (suppressed) + assert extracted_context.level == 0 + assert extracted_context.suppression + + # Create a new carrier to test the propagation + downstream_carrier = {} + + # Inject the extracted context into the downstream carrier + self.hptc.inject(extracted_context, downstream_carrier) + + # Verify that the downstream carrier has the correct level + assert downstream_carrier.get("X-INSTANA-L") == "0" + + # Verify that no trace or span IDs are injected when suppressed + assert "X-INSTANA-T" not in downstream_carrier + assert "X-INSTANA-S" not in downstream_carrier From 6f6d35c756545374b7274666dbd35aa531c5b132 Mon Sep 17 00:00:00 2001 From: Cagri Yonca Date: Wed, 10 Sep 2025 16:26:51 +0200 Subject: [PATCH 55/86] chore(version): Bump version to 3.8.2 Signed-off-by: Cagri Yonca --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index 975d3186..adb951cb 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.8.1" +VERSION = "3.8.2" From 51baea85aa24e4c60811c94f2aaf604f36b04cfe Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Thu, 11 Sep 2025 14:16:56 +0530 Subject: [PATCH 56/86] fix: suppression propagation in kafka Signed-off-by: Varsha GS --- src/instana/propagators/kafka_propagator.py | 38 +++++++++++++++------ 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/src/instana/propagators/kafka_propagator.py b/src/instana/propagators/kafka_propagator.py index 9ba27940..97bae58c 100644 --- a/src/instana/propagators/kafka_propagator.py +++ b/src/instana/propagators/kafka_propagator.py @@ -1,15 +1,12 @@ # (c) Copyright IBM Corp. 2025 -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import Any, Dict, Optional from opentelemetry.trace.span import format_span_id from instana.log import logger from instana.propagators.base_propagator import BasePropagator, CarrierT from instana.util.ids import hex_id_limited - -if TYPE_CHECKING: - from instana.span_context import SpanContext - +from instana.span_context import SpanContext class KafkaPropagator(BasePropagator): """ @@ -53,7 +50,7 @@ def extract_carrier_headers(self, carrier: CarrierT) -> Dict[str, Any]: def extract( self, carrier: CarrierT, disable_w3c_trace_context: bool = False - ) -> Optional["SpanContext"]: + ) -> Optional[SpanContext]: """ This method overrides one of the Base classes as with the introduction of W3C trace context for the Kafka requests more extracting steps and @@ -64,7 +61,7 @@ def extract( disable_w3c_trace_context (bool): A flag to disable the W3C trace context. Returns: - Optional["SpanContext"]: The extracted span context or None. + Optional[SpanContext]: The extracted span context or None. """ try: headers = self.extract_carrier_headers(carrier=carrier) @@ -79,7 +76,7 @@ def extract( # Assisted by watsonx Code Assistant def inject( self, - span_context: "SpanContext", + span_context: SpanContext, carrier: CarrierT, disable_w3c_trace_context: bool = True, ) -> None: @@ -103,7 +100,26 @@ def inject( # Suppression `level` made in the child context or in the parent context # has priority over any non-suppressed `level` setting suppression_level = int(self.extract_instana_headers(dictionary_carrier)[2]) - span_context.level = min(suppression_level, span_context.level) + new_level = min(suppression_level, span_context.level) + + if new_level != span_context.level: + # Create a new span context with the updated level + span_context = SpanContext( + trace_id=span_context.trace_id, + span_id=span_context.span_id, + is_remote=span_context.is_remote, + trace_flags=span_context.trace_flags, + trace_state=span_context.trace_state, + level=new_level, + synthetic=span_context.synthetic, + trace_parent=span_context.trace_parent, + instana_ancestor=span_context.instana_ancestor, + long_trace_id=span_context.long_trace_id, + correlation_type=span_context.correlation_type, + correlation_id=span_context.correlation_id, + traceparent=span_context.traceparent, + tracestate=span_context.tracestate + ) def inject_key_value(carrier, key, value): if isinstance(carrier, list): @@ -119,9 +135,9 @@ def inject_key_value(carrier, key, value): inject_key_value( carrier, self.KAFKA_HEADER_KEY_L_S, - str(suppression_level).encode("utf-8"), + str(span_context.level).encode("utf-8"), ) - if suppression_level == 1: + if span_context.level == 1: inject_key_value( carrier, self.KAFKA_HEADER_KEY_T, From 1465089bd4699ec314709197f7f5f21dca744c57 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Mon, 22 Sep 2025 13:48:01 +0200 Subject: [PATCH 57/86] chore: Update compatible runtimes and arch for AWS Lambda layer publishing script. Signed-off-by: Paulo Vital --- bin/aws-lambda/build_and_publish_lambda_layer.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bin/aws-lambda/build_and_publish_lambda_layer.py b/bin/aws-lambda/build_and_publish_lambda_layer.py index 22fd7ad6..bd3de221 100755 --- a/bin/aws-lambda/build_and_publish_lambda_layer.py +++ b/bin/aws-lambda/build_and_publish_lambda_layer.py @@ -170,12 +170,14 @@ "--zip-file", aws_zip_filename, "--compatible-runtimes", - "python3.8", "python3.9", "python3.10", "python3.11", "python3.12", "python3.13", + "--compatible-architectures", + "x86_64", + "arm64", "--region", region, "--profile", From 7c76bdbdb99988afa19ee88ee8f7aebd64077f43 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Mon, 22 Sep 2025 13:50:09 +0200 Subject: [PATCH 58/86] chore: Update dev region for AWS Lambda layer publishing script. Signed-off-by: Paulo Vital --- bin/aws-lambda/build_and_publish_lambda_layer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/aws-lambda/build_and_publish_lambda_layer.py b/bin/aws-lambda/build_and_publish_lambda_layer.py index bd3de221..e57fe386 100755 --- a/bin/aws-lambda/build_and_publish_lambda_layer.py +++ b/bin/aws-lambda/build_and_publish_lambda_layer.py @@ -107,7 +107,7 @@ ] if dev_mode: - target_regions = ["us-west-1"] + target_regions = ["us-east-1"] LAYER_NAME = "instana-py-dev" else: target_regions = [ From 72b77916d82b3a07fa6a7eff90e132c6bf512efa Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Mon, 22 Sep 2025 14:17:30 +0200 Subject: [PATCH 59/86] chore: Print the AWS Lambda layer list as MD. Signed-off-by: Paulo Vital --- bin/aws-lambda/build_and_publish_lambda_layer.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bin/aws-lambda/build_and_publish_lambda_layer.py b/bin/aws-lambda/build_and_publish_lambda_layer.py index e57fe386..0c8dad2d 100755 --- a/bin/aws-lambda/build_and_publish_lambda_layer.py +++ b/bin/aws-lambda/build_and_publish_lambda_layer.py @@ -149,6 +149,7 @@ LAYER_NAME = "instana-python" published = dict() +version = 0 for region in target_regions: print(f"===> Uploading layer to AWS {region} ") @@ -219,5 +220,8 @@ print("===> Published list:") +print(f"AWS Lambda Layer v{version}") +print("| AWS Region | ARN |") +print("| :-- | :-- |") for key in published.keys(): - print(f"{key}\t{published[key]}") + print(f"| {key} | {published[key]} |") From 588958e8774d947ce7c66579cf310d1f8482fd1a Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Mon, 22 Sep 2025 11:39:49 +0530 Subject: [PATCH 60/86] fix(aio-pika): implement `_bind_args` method to fetch values from both args and kwargs Signed-off-by: Varsha GS --- src/instana/instrumentation/aio_pika.py | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/src/instana/instrumentation/aio_pika.py b/src/instana/instrumentation/aio_pika.py index ef16dfa9..2dcb6ad3 100644 --- a/src/instana/instrumentation/aio_pika.py +++ b/src/instana/instrumentation/aio_pika.py @@ -43,18 +43,26 @@ async def publish_with_instana( ) -> Optional["ConfirmationFrameType"]: if tracing_is_off(): return await wrapped(*args, **kwargs) - + tracer, parent_span, _ = get_tracer_tuple() parent_context = parent_span.get_span_context() if parent_span else None + def _bind_args( + message: Type["AbstractMessage"], + routing_key: str, + *args: object, + **kwargs: object, + ) -> Tuple[object, ...]: + return (message, routing_key, args, kwargs) + + (message, routing_key, args, kwargs) = _bind_args( + *args, **kwargs + ) + with tracer.start_as_current_span( "rabbitmq", span_context=parent_context ) as span: connection = instance.channel._connection - message = kwargs["message"] if kwargs.get("message") else args[0] - routing_key = ( - kwargs["routing_key"] if kwargs.get("routing_key") else args[1] - ) _extract_span_attributes( span, connection, "publish", routing_key, instance.name @@ -66,6 +74,9 @@ async def publish_with_instana( message.properties.headers, disable_w3c_trace_context=True, ) + + args = (message, routing_key) + args + try: response = await wrapped(*args, **kwargs) except Exception as exc: From c28a94ea719cc085eea798a2c334aed49f07c235 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Wed, 24 Sep 2025 14:45:58 +0530 Subject: [PATCH 61/86] test(aio-pika): verify publish works with an empty `routing_key` Signed-off-by: Varsha GS --- src/instana/instrumentation/aio_pika.py | 2 +- tests/clients/test_aio_pika.py | 55 +++++++++---------------- 2 files changed, 20 insertions(+), 37 deletions(-) diff --git a/src/instana/instrumentation/aio_pika.py b/src/instana/instrumentation/aio_pika.py index 2dcb6ad3..a47e09f7 100644 --- a/src/instana/instrumentation/aio_pika.py +++ b/src/instana/instrumentation/aio_pika.py @@ -43,7 +43,7 @@ async def publish_with_instana( ) -> Optional["ConfirmationFrameType"]: if tracing_is_off(): return await wrapped(*args, **kwargs) - + tracer, parent_span, _ = get_tracer_tuple() parent_context = parent_span.get_span_context() if parent_span else None diff --git a/tests/clients/test_aio_pika.py b/tests/clients/test_aio_pika.py index 4071e568..20e97618 100644 --- a/tests/clients/test_aio_pika.py +++ b/tests/clients/test_aio_pika.py @@ -56,6 +56,9 @@ async def publish_message(self, params_combination: str = "both_args") -> None: elif params_combination == "arg_kwarg": args = (message,) kwargs = {"routing_key": queue_name} + elif params_combination == "arg_kwarg_empty_key": + args = (message,) + kwargs = {"routing_key": ""} else: # params_combination == "both_args" args = (message, queue_name) @@ -102,6 +105,15 @@ async def on_message(msg): await queue.consume(on_message) await asyncio.sleep(1) # Wait to ensure the message is processed + def assert_span_info(self, rabbitmq_span: "ReadableSpan", sort: str, key: str = "test.queue") -> None: + assert rabbitmq_span.data["rabbitmq"]["exchange"] == "test.exchange" + assert rabbitmq_span.data["rabbitmq"]["sort"] == sort + assert rabbitmq_span.data["rabbitmq"]["address"] + assert rabbitmq_span.data["rabbitmq"]["key"] == key + assert rabbitmq_span.stack + assert isinstance(rabbitmq_span.stack, list) + assert len(rabbitmq_span.stack) > 0 + @pytest.mark.parametrize( "params_combination", ["both_args", "both_kwargs", "arg_kwarg"], @@ -127,13 +139,8 @@ def test_basic_publish(self, params_combination) -> None: assert not rabbitmq_span.ec # Span attributes - assert rabbitmq_span.data["rabbitmq"]["exchange"] == "test.exchange" - assert rabbitmq_span.data["rabbitmq"]["sort"] == "publish" - assert rabbitmq_span.data["rabbitmq"]["address"] - assert rabbitmq_span.data["rabbitmq"]["key"] == "test.queue" - assert rabbitmq_span.stack - assert isinstance(rabbitmq_span.stack, list) - assert len(rabbitmq_span.stack) > 0 + key = "" if params_combination == "arg_kwarg_empty_key" else self.queue_name + self.assert_span_info(rabbitmq_span, "publish", key) def test_basic_publish_as_root_exit_span(self) -> None: agent.options.allow_exit_as_root = True @@ -151,13 +158,7 @@ def test_basic_publish_as_root_exit_span(self) -> None: assert not rabbitmq_span.ec # Span attributes - assert rabbitmq_span.data["rabbitmq"]["exchange"] == "test.exchange" - assert rabbitmq_span.data["rabbitmq"]["sort"] == "publish" - assert rabbitmq_span.data["rabbitmq"]["address"] - assert rabbitmq_span.data["rabbitmq"]["key"] == "test.queue" - assert rabbitmq_span.stack - assert isinstance(rabbitmq_span.stack, list) - assert len(rabbitmq_span.stack) > 0 + self.assert_span_info(rabbitmq_span, "publish") @pytest.mark.parametrize( "connect_method", @@ -189,17 +190,8 @@ def test_basic_consume(self, connect_method) -> None: assert not test_span.ec # Span attributes - def assert_span_info(rabbitmq_span: "ReadableSpan", sort: str) -> None: - assert rabbitmq_span.data["rabbitmq"]["exchange"] == "test.exchange" - assert rabbitmq_span.data["rabbitmq"]["sort"] == sort - assert rabbitmq_span.data["rabbitmq"]["address"] - assert rabbitmq_span.data["rabbitmq"]["key"] == "test.queue" - assert rabbitmq_span.stack - assert isinstance(rabbitmq_span.stack, list) - assert len(rabbitmq_span.stack) > 0 - - assert_span_info(rabbitmq_publisher_span, "publish") - assert_span_info(rabbitmq_consumer_span, "consume") + self.assert_span_info(rabbitmq_publisher_span, "publish") + self.assert_span_info(rabbitmq_consumer_span, "consume") @pytest.mark.parametrize( "connect_method", @@ -231,14 +223,5 @@ def test_consume_with_exception(self, connect_method) -> None: assert not test_span.ec # Span attributes - def assert_span_info(rabbitmq_span: "ReadableSpan", sort: str) -> None: - assert rabbitmq_span.data["rabbitmq"]["exchange"] == "test.exchange" - assert rabbitmq_span.data["rabbitmq"]["sort"] == sort - assert rabbitmq_span.data["rabbitmq"]["address"] - assert rabbitmq_span.data["rabbitmq"]["key"] == "test.queue" - assert rabbitmq_span.stack - assert isinstance(rabbitmq_span.stack, list) - assert len(rabbitmq_span.stack) > 0 - - assert_span_info(rabbitmq_publisher_span, "publish") - assert_span_info(rabbitmq_consumer_span, "consume") + self.assert_span_info(rabbitmq_publisher_span, "publish") + self.assert_span_info(rabbitmq_consumer_span, "consume") From 0167611c97fb5ba11e26142191f21782391545c4 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Wed, 17 Sep 2025 16:35:19 +0200 Subject: [PATCH 62/86] ci: Add support to test Python 3.14.0rc3. Signed-off-by: Paulo Vital --- .tekton/github-pr-pipeline.yaml.part | 2 +- .tekton/pipeline.yaml | 4 ++-- .tekton/python-tracer-prepuller.yaml | 2 +- Dockerfile-py3140 | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.tekton/github-pr-pipeline.yaml.part b/.tekton/github-pr-pipeline.yaml.part index e7c15930..2e0aac50 100644 --- a/.tekton/github-pr-pipeline.yaml.part +++ b/.tekton/github-pr-pipeline.yaml.part @@ -28,7 +28,7 @@ spec: default: public.ecr.aws/docker/library/python:3.13-bookworm - name: py-314-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.14.0rc2 + default: public.ecr.aws/docker/library/python:3.14.0rc3 workspaces: - name: python-tracer-ci-pipeline-pvc tasks: diff --git a/.tekton/pipeline.yaml b/.tekton/pipeline.yaml index 14cb96d4..a87ff532 100644 --- a/.tekton/pipeline.yaml +++ b/.tekton/pipeline.yaml @@ -26,7 +26,7 @@ spec: default: public.ecr.aws/docker/library/python:3.13-bookworm - name: py-314-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.14.0rc2 + default: public.ecr.aws/docker/library/python:3.14.0rc3 workspaces: - name: python-tracer-ci-pipeline-pvc tasks: @@ -110,7 +110,7 @@ spec: - clone params: - name: py-version - value: 3.14.0rc2 + value: 3.14.0rc3 taskRef: name: python-tracer-unittest-python-next-task workspaces: diff --git a/.tekton/python-tracer-prepuller.yaml b/.tekton/python-tracer-prepuller.yaml index 76b0609a..b6a6f8a0 100644 --- a/.tekton/python-tracer-prepuller.yaml +++ b/.tekton/python-tracer-prepuller.yaml @@ -59,7 +59,7 @@ spec: image: public.ecr.aws/docker/library/python:3.13-bookworm command: ["sh", "-c", "'true'"] - name: prepuller-314 - image: public.ecr.aws/docker/library/python:3.14.0rc2 + image: public.ecr.aws/docker/library/python:3.14.0rc3 command: ["sh", "-c", "'true'"] # Use the pause container to ensure the Pod goes into a `Running` phase diff --git a/Dockerfile-py3140 b/Dockerfile-py3140 index a8aa2331..9a39e30d 100644 --- a/Dockerfile-py3140 +++ b/Dockerfile-py3140 @@ -1,4 +1,4 @@ -FROM public.ecr.aws/docker/library/python:3.14.0b2 +FROM public.ecr.aws/docker/library/python:3.14.0rc3 RUN apt-get update \ && apt-get install -y --no-install-recommends \ From a311e04e21c4311d07992cc70c16a19da5f1051d Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Wed, 17 Sep 2025 16:43:20 +0200 Subject: [PATCH 63/86] chore: remove unnecessary files. Signed-off-by: Paulo Vital --- Dockerfile-py3140 | 21 --------------------- 1 file changed, 21 deletions(-) delete mode 100644 Dockerfile-py3140 diff --git a/Dockerfile-py3140 b/Dockerfile-py3140 deleted file mode 100644 index 9a39e30d..00000000 --- a/Dockerfile-py3140 +++ /dev/null @@ -1,21 +0,0 @@ -FROM public.ecr.aws/docker/library/python:3.14.0rc3 - -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - build-essential python3-dev \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -ENV WORKDIR_=/root/base - -WORKDIR $WORKDIR_ -COPY ./tests/requirements-minimal.txt . -COPY ./tests/requirements-pre314.txt . - -ENV VIRTUAL_ENV="$WORKDIR_/venv" -RUN python -m venv $VIRTUAL_ENV - -ENV PATH="$VIRTUAL_ENV/bin:$PATH" - -RUN python -m pip install --upgrade pip \ - && python -m pip install -r requirements-pre314.txt From 8eae90ec651a2d7cab03b54cbfbb17ac22bfe8cc Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Fri, 26 Sep 2025 10:42:23 +0200 Subject: [PATCH 64/86] chore(version): Bump version to 3.8.3 Signed-off-by: Paulo Vital --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index adb951cb..034f31c6 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.8.2" +VERSION = "3.8.3" From f02e3b7d6daeb9a3bae02cdce6dc06bc01d74c8f Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Wed, 20 Aug 2025 14:12:57 +0200 Subject: [PATCH 65/86] ci: Update Kafka container. Moving to use the `ubuntu/kafka` container image as it has support to `amd64(x86_64)`, `arm64`, `ppc64le`, and `s390x`. Signed-off-by: Paulo Vital --- .circleci/config.yml | 22 ++++++++++------------ .tekton/task.yaml | 25 ++++++++++--------------- docker-compose.yml | 20 +++++++++++--------- 3 files changed, 31 insertions(+), 36 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5fef25d9..d4451306 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -244,18 +244,16 @@ jobs: - store-pytest-results - store-coverage-report - py312kafka: + py313kafka: docker: - - image: public.ecr.aws/docker/library/python:3.12 - - image: public.ecr.aws/bitnami/kafka:3.9.0 + - image: public.ecr.aws/docker/library/python:3.13 + - image: public.ecr.aws/ubuntu/zookeeper:latest + environment: + TZ: UTC + - image: public.ecr.aws/ubuntu/kafka:latest environment: - KAFKA_CFG_NODE_ID: 0 - KAFKA_CFG_PROCESS_ROLES: controller,broker - KAFKA_CFG_LISTENERS: PLAINTEXT://:9092,CONTROLLER://:9093,EXTERNAL://:9094 - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: 0@localhost:9093 - KAFKA_CFG_CONTROLLER_LISTENER_NAMES: CONTROLLER - KAFKA_CFG_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092,EXTERNAL://localhost:9094 + TZ: UTC + ZOOKEEPER_HOST: localhost working_directory: ~/repo steps: - checkout @@ -309,7 +307,7 @@ workflows: - py39cassandra - py39gevent - py312aws - - py312kafka + - py313kafka - autowrapt: matrix: parameters: @@ -322,5 +320,5 @@ workflows: - py39cassandra - py39gevent - py312aws - - py312kafka + - py313kafka - autowrapt diff --git a/.tekton/task.yaml b/.tekton/task.yaml index e5d79c92..3f3d98ec 100644 --- a/.tekton/task.yaml +++ b/.tekton/task.yaml @@ -166,23 +166,18 @@ metadata: name: python-tracer-unittest-kafka-task spec: sidecars: + - name: zookeeper + image: public.ecr.aws/ubuntu/zookeeper:latest + env: + - name: TZ + value: "UTC" - name: kafka - image: public.ecr.aws/bitnami/kafka:3.9.0 + image: public.ecr.aws/ubuntu/kafka:latest env: - - name: KAFKA_CFG_NODE_ID - value: "0" - - name: KAFKA_CFG_PROCESS_ROLES - value: "controller,broker" - - name: KAFKA_CFG_LISTENERS - value: "PLAINTEXT://:9092,CONTROLLER://:9093,EXTERNAL://:9094" - - name: KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP - value: "CONTROLLER:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT" - - name: KAFKA_CFG_CONTROLLER_QUORUM_VOTERS - value: "0@kafka:9093" - - name: KAFKA_CFG_CONTROLLER_LISTENER_NAMES - value: "CONTROLLER" - - name: KAFKA_CFG_ADVERTISED_LISTENERS - value: "PLAINTEXT://kafka:9092,EXTERNAL://localhost:9094" + - name: TZ + value: "UTC" + - name: ZOOKEEPER_HOST + value: zookeeper params: - name: imageDigest type: string diff --git a/docker-compose.yml b/docker-compose.yml index 45393b76..35dea903 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -61,16 +61,18 @@ services: - "8681:8681" - "8682:8682" + # Sidecar container for Kafka + zookeeper: + image: public.ecr.aws/ubuntu/zookeeper:latest + ports: + - 2181:2181 + environment: + - TZ=UTC + kafka: - image: public.ecr.aws/bitnami/kafka:latest + image: public.ecr.aws/ubuntu/kafka:latest # works on amd64, arm64, ppc64le and s390x ports: - '9092:9092' - - '9094:9094' environment: - - KAFKA_CFG_NODE_ID=0 - - KAFKA_CFG_PROCESS_ROLES=controller,broker - - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093,EXTERNAL://:9094 - - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT - - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@kafka:9093 - - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER - - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092,EXTERNAL://localhost:9094 + - TZ=UTC + - ZOOKEEPER_HOST=zookeeper From 2ab738479f5953d8665f15db67ffb7e4c88fc840 Mon Sep 17 00:00:00 2001 From: Cagri Yonca Date: Thu, 2 Oct 2025 15:32:25 +0200 Subject: [PATCH 66/86] ci: Fix Kafka connection issues to ubuntu/kafka Signed-off-by: Cagri Yonca --- .circleci/config.yml | 25 +++++++++++++++++++++++-- .tekton/task.yaml | 34 +++++++++++++++++++++++++++++++--- docker-compose.yml | 37 +++++++++++++++++++++++++++++-------- 3 files changed, 83 insertions(+), 13 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d4451306..1bba6674 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -247,13 +247,34 @@ jobs: py313kafka: docker: - image: public.ecr.aws/docker/library/python:3.13 - - image: public.ecr.aws/ubuntu/zookeeper:latest + - image: public.ecr.aws/ubuntu/zookeeper:3.1-22.04_edge environment: TZ: UTC - - image: public.ecr.aws/ubuntu/kafka:latest + - image: public.ecr.aws/ubuntu/kafka:3.1-22.04_edge environment: TZ: UTC ZOOKEEPER_HOST: localhost + ZOOKEEPER_PORT: 2181 + command: + - /opt/kafka/config/server.properties + - --override + - listeners=INTERNAL://0.0.0.0:9093,EXTERNAL://0.0.0.0:9094 + - --override + - advertised.listeners=INTERNAL://localhost:9093,EXTERNAL://localhost:9094 + - --override + - listener.security.protocol.map=INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT + - --override + - inter.broker.listener.name=INTERNAL + - --override + - broker.id=1 + - --override + - offsets.topic.replication.factor=1 + - --override + - transaction.state.log.replication.factor=1 + - --override + - transaction.state.log.min.isr=1 + - --override + - auto.create.topics.enable=true working_directory: ~/repo steps: - checkout diff --git a/.tekton/task.yaml b/.tekton/task.yaml index 3f3d98ec..0a9a6d05 100644 --- a/.tekton/task.yaml +++ b/.tekton/task.yaml @@ -167,17 +167,45 @@ metadata: spec: sidecars: - name: zookeeper - image: public.ecr.aws/ubuntu/zookeeper:latest + image: public.ecr.aws/ubuntu/zookeeper:3.1-22.04_edge + ports: + - containerPort: 9093 env: - name: TZ value: "UTC" - name: kafka - image: public.ecr.aws/ubuntu/kafka:latest + image: public.ecr.aws/ubuntu/kafka:3.1-22.04_edge env: - name: TZ value: "UTC" - name: ZOOKEEPER_HOST - value: zookeeper + value: localhost + - name: ZOOKEEPER_PORT + value: "2181" + ports: + - containerPort: 9093 + - containerPort: 9094 + command: + - /opt/kafka/bin/kafka-server-start.sh + - /opt/kafka/config/server.properties + - --override + - listeners=INTERNAL://0.0.0.0:9093,EXTERNAL://0.0.0.0:9094 + - --override + - advertised.listeners=INTERNAL://localhost:9093,EXTERNAL://localhost:9094 + - --override + - listener.security.protocol.map=INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT + - --override + - inter.broker.listener.name=INTERNAL + - --override + - broker.id=1 + - --override + - offsets.topic.replication.factor=1 + - --override + - transaction.state.log.replication.factor=1 + - --override + - transaction.state.log.min.isr=1 + - --override + - auto.create.topics.enable=true params: - name: imageDigest type: string diff --git a/docker-compose.yml b/docker-compose.yml index 35dea903..299806a5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -63,16 +63,37 @@ services: # Sidecar container for Kafka zookeeper: - image: public.ecr.aws/ubuntu/zookeeper:latest - ports: - - 2181:2181 - environment: - - TZ=UTC + image: public.ecr.aws/ubuntu/zookeeper:3.1-22.04_edge + ports: ["2181:2181"] + environment: [ "TZ=UTC" ] kafka: - image: public.ecr.aws/ubuntu/kafka:latest # works on amd64, arm64, ppc64le and s390x - ports: - - '9092:9092' + image: public.ecr.aws/ubuntu/kafka:3.1-22.04_edge + depends_on: [zookeeper] + ports: + - "9094:9094" + - "9093:9093" environment: - TZ=UTC - ZOOKEEPER_HOST=zookeeper + - ZOOKEEPER_PORT=2181 + command: + - /opt/kafka/config/server.properties + - --override + - listeners=INTERNAL://0.0.0.0:9093,EXTERNAL://0.0.0.0:9094 + - --override + - advertised.listeners=INTERNAL://kafka:9093,EXTERNAL://127.0.0.1:9094 + - --override + - listener.security.protocol.map=INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT + - --override + - inter.broker.listener.name=INTERNAL + - --override + - broker.id=1 + - --override + - offsets.topic.replication.factor=1 + - --override + - transaction.state.log.replication.factor=1 + - --override + - transaction.state.log.min.isr=1 + - --override + - auto.create.topics.enable=true From 02219a3aaf8e38d64cbc2da2693deadba59defd2 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 7 Oct 2025 07:12:29 +0200 Subject: [PATCH 67/86] chore(ci): Enable CircleCi automatic workflow reruns. Added the `max_auto_reruns` config to reduce the impact of temporary workflow failures due to transient issues. The automatic workflow reruns function similarly to manually selecting `Rerun workflow from failed` in the CircleCI web app. More info at https://circleci.com/docs/guides/orchestrate/automatic-reruns/ Signed-off-by: Paulo Vital --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1bba6674..a06d8287 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -319,6 +319,7 @@ jobs: workflows: tests: + max_auto_reruns: 2 jobs: - python3x: matrix: From a9c7a9179be0ee1b652cf0d87b2fbf7fff216f64 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Wed, 8 Oct 2025 11:42:30 +0530 Subject: [PATCH 68/86] currency: Add Cassandra and newly supported libraries Signed-off-by: Varsha GS --- .tekton/.currency/resources/table.json | 80 +++++++++++++++----------- 1 file changed, 45 insertions(+), 35 deletions(-) diff --git a/.tekton/.currency/resources/table.json b/.tekton/.currency/resources/table.json index 4de524a9..238526f8 100644 --- a/.tekton/.currency/resources/table.json +++ b/.tekton/.currency/resources/table.json @@ -8,9 +8,10 @@ "Cloud Native": "No" }, { - "Package name": "Celery", - "Support Policy": "45-days", - "Beta version": "No", + "Package name": "WSGI", + "Support Policy": "0-day", + "Beta version": "Yes", + "Last Supported Version": "1.0.1", "Cloud Native": "No" }, { @@ -56,124 +57,133 @@ "Cloud Native": "No" }, { - "Package name": "Webapp2", - "Support Policy": "On demand", + "Package name": "Aiohttp", + "Support Policy": "45-days", "Beta version": "No", - "Last Supported Version": "2.5.2", "Cloud Native": "No" }, { - "Package name": "WSGI", - "Support Policy": "0-day", - "Beta version": "Yes", - "Last Supported Version": "1.0.1", + "Package name": "Httpx", + "Support Policy": "45-days", + "Beta version": "No", "Cloud Native": "No" }, { - "Package name": "Aiohttp", + "Package name": "Requests", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "No" }, { - "Package name": "Asynqp", - "Support Policy": "Deprecated", + "Package name": "Urllib3", + "Support Policy": "45-days", "Beta version": "No", - "Last Supported Version": "0.6", "Cloud Native": "No" }, { - "Package name": "Boto3", + "Package name": "Grpcio", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "Yes" }, { - "Package name": "Google-cloud-pubsub", + "Package name": "Cassandra-driver", "Support Policy": "45-days", "Beta version": "No", - "Cloud Native": "Yes" + "Cloud Native": "No" }, { - "Package name": "Google-cloud-storage", + "Package name": "Mysqlclient", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "Yes" }, { - "Package name": "Grpcio", + "Package name": "PyMySQL", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "Yes" }, { - "Package name": "Mysqlclient", + "Package name": "Pymongo", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "Yes" }, { - "Package name": "Pika", + "Package name": "Psycopg2", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "No" }, { - "Package name": "PyMySQL", + "Package name": "Redis", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "Yes" }, { - "Package name": "Pymongo", + "Package name": "SQLAlchemy", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "Yes" }, { - "Package name": "Psycopg2", + "Package name": "Aioamqp", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "No" }, { - "Package name": "Redis", + "Package name": "Aio-pika", "Support Policy": "45-days", "Beta version": "No", - "Cloud Native": "Yes" + "Cloud Native": "No" }, { - "Package name": "Requests", + "Package name": "Confluent-kafka", "Support Policy": "45-days", "Beta version": "No", - "Cloud Native": "Yes" + "Cloud Native": "No" }, { - "Package name": "SQLAlchemy", + "Package name": "Kafka-python-ng", "Support Policy": "45-days", "Beta version": "No", - "Cloud Native": "Yes" + "Cloud Native": "No" }, { - "Package name": "Urllib3", + "Package name": "Pika", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "No" }, { - "Package name": "Spyne", + "Package name": "Boto3", + "Support Policy": "45-days", + "Beta version": "No", + "Cloud Native": "Yes" + }, + { + "Package name": "Google-cloud-pubsub", "Support Policy": "45-days", "Beta version": "No", - "Cloud Native": "No" + "Cloud Native": "Yes" }, { - "Package name": "Aio-pika", + "Package name": "Google-cloud-storage", "Support Policy": "45-days", "Beta version": "No", + "Cloud Native": "Yes" + }, + { + "Package name": "Gevent", + "Support Policy": "On demand", + "Beta version": "No", "Cloud Native": "No" }, { - "Package name": "Aioamqp", + "Package name": "Celery", "Support Policy": "45-days", "Beta version": "No", "Cloud Native": "No" From ccffdb505e4baf319845e666cb7468b9734dada9 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Wed, 8 Oct 2025 12:15:36 +0530 Subject: [PATCH 69/86] currency: Add new libraries to report - fix warnings Signed-off-by: Varsha GS --- .tekton/.currency/docs/report.md | 48 ++++++++++---------- .tekton/.currency/scripts/generate_report.py | 45 +++++++++--------- 2 files changed, 49 insertions(+), 44 deletions(-) diff --git a/.tekton/.currency/docs/report.md b/.tekton/.currency/docs/report.md index c3a8fa22..a739efe1 100644 --- a/.tekton/.currency/docs/report.md +++ b/.tekton/.currency/docs/report.md @@ -3,31 +3,33 @@ | Package name | Support Policy | Beta version | Last Supported Version | Latest version | Up-to-date | Release date | Latest Version Published At | Days behind | Cloud Native | |:---------------------|:-----------------|:---------------|:-------------------------|:-----------------|:-------------|:---------------|:------------------------------|:--------------|:---------------| | ASGI | 45-days | No | 3.0 | 3.0 | Yes | 2019-03-04 | 2019-03-04 | 0 day/s | No | -| Celery | 45-days | No | 5.5.3 | 5.5.3 | Yes | 2025-06-01 | 2025-06-01 | 0 day/s | No | -| Django | 45-days | No | 5.2.3 | 5.2.3 | Yes | 2025-06-10 | 2025-06-10 | 0 day/s | No | -| FastAPI | 45-days | No | 0.115.12 | 0.115.12 | Yes | 2025-03-23 | 2025-03-23 | 0 day/s | No | -| Flask | 45-days | No | 3.1.1 | 3.1.1 | Yes | 2025-05-13 | 2025-05-13 | 0 day/s | No | +| WSGI | 0-day | Yes | 1.0.1 | 1.0.1 | Yes | 2010-09-26 | 2010-09-26 | 0 day/s | No | +| Django | 45-days | No | 5.2.7 | 5.2.7 | Yes | 2025-10-01 | 2025-10-01 | 0 day/s | No | +| FastAPI | 45-days | No | 0.118.0 | 0.118.0 | Yes | 2025-09-29 | 2025-09-29 | 0 day/s | No | +| Flask | 45-days | No | 3.1.2 | 3.1.2 | Yes | 2025-08-19 | 2025-08-19 | 0 day/s | No | | Pyramid | 45-days | No | 2.0.2 | 2.0.2 | Yes | 2023-08-25 | 2023-08-25 | 0 day/s | No | | Sanic | On demand | No | 25.3.0 | 25.3.0 | Yes | 2025-03-31 | 2025-03-31 | 0 day/s | No | -| Starlette | 45-days | No | 0.47.0 | 0.47.0 | Yes | 2025-05-29 | 2025-05-29 | 0 day/s | No | -| Tornado | 45-days | No | 6.5.1 | 6.5.1 | Yes | 2025-05-22 | 2025-05-22 | 0 day/s | No | -| Webapp2 | On demand | No | 2.5.2 | 2.5.2 | Yes | 2012-09-28 | 2012-09-28 | 0 day/s | No | -| WSGI | 0-day | Yes | 1.0.1 | 1.0.1 | Yes | 2010-09-26 | 2010-09-26 | 0 day/s | No | -| Aiohttp | 45-days | No | 3.12.13 | 3.12.13 | Yes | 2025-06-14 | 2025-06-14 | 0 day/s | No | -| Asynqp | Deprecated | No | 0.6 | 0.6 | Yes | 2019-01-20 | 2019-01-20 | 0 day/s | No | -| Boto3 | 45-days | No | 1.38.36 | 1.38.36 | Yes | 2025-06-12 | 2025-06-12 | 0 day/s | Yes | -| Google-cloud-pubsub | 45-days | No | 2.30.0 | 2.30.0 | Yes | 2025-06-09 | 2025-06-09 | 0 day/s | Yes | -| Google-cloud-storage | 45-days | No | 3.1.0 | 3.1.0 | Yes | 2025-02-28 | 2025-02-28 | 0 day/s | Yes | -| Grpcio | 45-days | No | 1.73.0 | 1.73.0 | Yes | 2025-06-09 | 2025-06-09 | 0 day/s | Yes | +| Starlette | 45-days | No | 0.48.0 | 0.48.0 | Yes | 2025-09-13 | 2025-09-13 | 0 day/s | No | +| Tornado | 45-days | No | 6.5.2 | 6.5.2 | Yes | 2025-08-08 | 2025-08-08 | 0 day/s | No | +| Aiohttp | 45-days | No | 3.13.0 | 3.13.0 | Yes | 2025-10-06 | 2025-10-06 | 0 day/s | No | +| Httpx | 45-days | No | 0.28.1 | 0.28.1 | Yes | 2024-12-06 | 2024-12-06 | 0 day/s | No | +| Requests | 45-days | No | 2.32.5 | 2.32.5 | Yes | 2025-08-18 | 2025-08-18 | 0 day/s | No | +| Urllib3 | 45-days | No | 2.5.0 | 2.5.0 | Yes | 2025-06-18 | 2025-06-18 | 0 day/s | No | +| Grpcio | 45-days | No | 1.75.1 | 1.75.1 | Yes | 2025-09-26 | 2025-09-26 | 0 day/s | Yes | +| Cassandra-driver | 45-days | No | 3.29.2 | 3.29.2 | Yes | 2024-09-10 | 2024-09-10 | 0 day/s | No | | Mysqlclient | 45-days | No | 2.2.7 | 2.2.7 | Yes | 2025-01-10 | 2025-01-10 | 0 day/s | Yes | -| Pika | 45-days | No | 1.3.2 | 1.3.2 | Yes | 2023-05-05 | 2023-05-05 | 0 day/s | No | -| PyMySQL | 45-days | No | 1.1.1 | 1.1.1 | Yes | 2024-05-21 | 2024-05-21 | 0 day/s | Yes | -| Pymongo | 45-days | No | 4.13.1 | 4.13.1 | Yes | 2025-06-11 | 2025-06-11 | 0 day/s | Yes | +| PyMySQL | 45-days | No | 1.1.2 | 1.1.2 | Yes | 2025-08-24 | 2025-08-24 | 0 day/s | Yes | +| Pymongo | 45-days | No | 4.15.3 | 4.15.3 | Yes | 2025-10-07 | 2025-10-07 | 0 day/s | Yes | | Psycopg2 | 45-days | No | 2.9.10 | 2.9.10 | Yes | 2024-10-16 | 2024-10-16 | 0 day/s | No | -| Redis | 45-days | No | 6.2.0 | 6.2.0 | Yes | 2025-05-28 | 2025-05-28 | 0 day/s | Yes | -| Requests | 45-days | No | 2.32.4 | 2.32.4 | Yes | 2025-06-09 | 2025-06-09 | 0 day/s | Yes | -| SQLAlchemy | 45-days | No | 2.0.41 | 2.0.41 | Yes | 2025-05-14 | 2025-05-14 | 0 day/s | Yes | -| Urllib3 | 45-days | No | 2.4.0 | 2.4.0 | Yes | 2025-04-10 | 2025-04-10 | 0 day/s | No | -| Spyne | 45-days | No | 2.14.0 | 2.14.0 | Yes | 2022-02-03 | 2022-02-03 | 0 day/s | No | -| Aio-pika | 45-days | No | 9.5.5 | 9.5.5 | Yes | 2025-02-26 | 2025-02-26 | 0 day/s | No | +| Redis | 45-days | No | 6.4.0 | 6.4.0 | Yes | 2025-08-07 | 2025-08-07 | 0 day/s | Yes | +| SQLAlchemy | 45-days | No | 2.0.43 | 2.0.43 | Yes | 2025-08-11 | 2025-08-11 | 0 day/s | Yes | | Aioamqp | 45-days | No | 0.15.0 | 0.15.0 | Yes | 2022-04-05 | 2022-04-05 | 0 day/s | No | +| Aio-pika | 45-days | No | 9.5.7 | 9.5.7 | Yes | 2025-08-05 | 2025-08-05 | 0 day/s | No | +| Confluent-kafka | 45-days | No | 2.11.1 | 2.11.1 | Yes | 2025-08-18 | 2025-08-18 | 0 day/s | No | +| Kafka-python-ng | 45-days | No | 2.2.3 | 2.2.3 | Yes | 2024-10-02 | 2024-10-02 | 0 day/s | No | +| Pika | 45-days | No | 1.3.2 | 1.3.2 | Yes | 2023-05-05 | 2023-05-05 | 0 day/s | No | +| Boto3 | 45-days | No | 1.40.47 | 1.40.47 | Yes | 2025-10-07 | 2025-10-07 | 0 day/s | Yes | +| Google-cloud-pubsub | 45-days | No | 2.31.1 | 2.31.1 | Yes | 2025-07-28 | 2025-07-28 | 0 day/s | Yes | +| Google-cloud-storage | 45-days | No | 3.4.0 | 3.4.0 | Yes | 2025-09-15 | 2025-09-15 | 0 day/s | Yes | +| Gevent | On demand | No | 25.9.1 | 25.9.1 | Yes | 2025-09-17 | 2025-09-17 | 0 day/s | No | +| Celery | 45-days | No | 5.5.3 | 5.5.3 | Yes | 2025-06-01 | 2025-06-01 | 0 day/s | No | diff --git a/.tekton/.currency/scripts/generate_report.py b/.tekton/.currency/scripts/generate_report.py index 0d4ca056..64055b9c 100644 --- a/.tekton/.currency/scripts/generate_report.py +++ b/.tekton/.currency/scripts/generate_report.py @@ -31,7 +31,7 @@ def get_upstream_version(dependency, last_supported_version): last_supported_version_release_date = "Not found" if dependency in SPEC_MAP: # webscrape info from official website - version_pattern = "(\d+\.\d+\.?\d*)" + version_pattern = r"(\d+\.\d+\.?\d*)" latest_version_release_date = "" url = SPEC_MAP[dependency] @@ -181,17 +181,17 @@ def process_taskrun_logs( f"Retrieving container logs from the successful taskrun pod {pod_name} of taskrun {taskrun_name}.." ) if task_name == "python-tracer-unittest-gevent-starlette-task": - match = re.search("Successfully installed .* (starlette-[^\s]+)", logs) - tekton_ci_output += f"{match[1]}\n" - elif task_name == "python-tracer-unittest-googlecloud-task": - match = re.search( - "Successfully installed .* (google-cloud-storage-[^\s]+)", logs - ) + match = re.search(r"Successfully installed .*(gevent-[^\s]+) .* (starlette-[^\s]+)", logs) + tekton_ci_output += f"{match[1]}\n{match[2]}\n" + elif task_name == "python-tracer-unittest-kafka-task": + match = re.search(r"Successfully installed .*(confluent-kafka-[^\s]+) .* (kafka-python-ng-[^\s]+)", logs) + tekton_ci_output += f"{match[1]}\n{match[2]}\n" + elif task_name == "python-tracer-unittest-cassandra-task": + match = re.search(r"Successfully installed .*(cassandra-driver-[^\s]+)", logs) tekton_ci_output += f"{match[1]}\n" elif task_name == "python-tracer-unittest-default-task": - for line in logs.splitlines(): - if "Successfully installed" in line: - tekton_ci_output += line + lines = re.findall(r"^Successfully installed .*", logs, re.M) + tekton_ci_output += "\n".join(lines) break else: print( @@ -202,36 +202,39 @@ def process_taskrun_logs( def get_tekton_ci_output(): """Get the latest successful scheduled tekton pipeline output""" + # # To run locally # config.load_kube_config() + + ## To run inside the tekton kubernetes cluster config.load_incluster_config() namespace = "default" core_v1_client = client.CoreV1Api() - task_name = "python-tracer-unittest-gevent-starlette-task" taskrun_filter = lambda tr: tr["status"]["conditions"][0]["type"] == "Succeeded" # noqa: E731 + + task_name = "python-tracer-unittest-gevent-starlette-task" starlette_taskruns = get_taskruns(namespace, task_name, taskrun_filter) tekton_ci_output = process_taskrun_logs( starlette_taskruns, core_v1_client, namespace, task_name, "" ) - task_name = "python-tracer-unittest-googlecloud-task" - taskrun_filter = ( # noqa: E731 - lambda tr: tr["metadata"]["name"].endswith("unittest-googlecloud-0") - and tr["status"]["conditions"][0]["type"] == "Succeeded" + task_name = "python-tracer-unittest-kafka-task" + kafka_taskruns = get_taskruns(namespace, task_name, taskrun_filter) + + tekton_ci_output = process_taskrun_logs( + kafka_taskruns, core_v1_client, namespace, task_name, tekton_ci_output ) - googlecloud_taskruns = get_taskruns(namespace, task_name, taskrun_filter) + + task_name = "python-tracer-unittest-cassandra-task" + cassandra_taskruns = get_taskruns(namespace, task_name, taskrun_filter) tekton_ci_output = process_taskrun_logs( - googlecloud_taskruns, core_v1_client, namespace, task_name, tekton_ci_output + cassandra_taskruns, core_v1_client, namespace, task_name, tekton_ci_output ) task_name = "python-tracer-unittest-default-task" - taskrun_filter = ( # noqa: E731 - lambda tr: tr["metadata"]["name"].endswith("unittest-default-3") - and tr["status"]["conditions"][0]["type"] == "Succeeded" - ) default_taskruns = get_taskruns(namespace, task_name, taskrun_filter) tekton_ci_output = process_taskrun_logs( From 8fd76d8a0640fa230891a85432c7d0f5246492cb Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Wed, 8 Oct 2025 12:24:30 +0530 Subject: [PATCH 70/86] currency: Enhance `get_tekton_ci_output()` Signed-off-by: Varsha GS --- .tekton/.currency/scripts/generate_report.py | 57 +++++++++----------- 1 file changed, 25 insertions(+), 32 deletions(-) diff --git a/.tekton/.currency/scripts/generate_report.py b/.tekton/.currency/scripts/generate_report.py index 64055b9c..9ae08d11 100644 --- a/.tekton/.currency/scripts/generate_report.py +++ b/.tekton/.currency/scripts/generate_report.py @@ -202,44 +202,37 @@ def process_taskrun_logs( def get_tekton_ci_output(): """Get the latest successful scheduled tekton pipeline output""" - # # To run locally - # config.load_kube_config() - - ## To run inside the tekton kubernetes cluster - config.load_incluster_config() + try: + config.load_incluster_config() + print("Using in-cluster Kubernetes configuration...") + except config.config_exception.ConfigException: + # Fall back to local config if running locally and not inside cluster + config.load_kube_config() + print("Using local Kubernetes configuration...") namespace = "default" core_v1_client = client.CoreV1Api() taskrun_filter = lambda tr: tr["status"]["conditions"][0]["type"] == "Succeeded" # noqa: E731 - task_name = "python-tracer-unittest-gevent-starlette-task" - starlette_taskruns = get_taskruns(namespace, task_name, taskrun_filter) - - tekton_ci_output = process_taskrun_logs( - starlette_taskruns, core_v1_client, namespace, task_name, "" - ) - - task_name = "python-tracer-unittest-kafka-task" - kafka_taskruns = get_taskruns(namespace, task_name, taskrun_filter) - - tekton_ci_output = process_taskrun_logs( - kafka_taskruns, core_v1_client, namespace, task_name, tekton_ci_output - ) - - task_name = "python-tracer-unittest-cassandra-task" - cassandra_taskruns = get_taskruns(namespace, task_name, taskrun_filter) - - tekton_ci_output = process_taskrun_logs( - cassandra_taskruns, core_v1_client, namespace, task_name, tekton_ci_output - ) - - task_name = "python-tracer-unittest-default-task" - default_taskruns = get_taskruns(namespace, task_name, taskrun_filter) - - tekton_ci_output = process_taskrun_logs( - default_taskruns, core_v1_client, namespace, task_name, tekton_ci_output - ) + tasks = [ + "python-tracer-unittest-gevent-starlette-task", + "python-tracer-unittest-kafka-task", + "python-tracer-unittest-cassandra-task", + "python-tracer-unittest-default-task" + ] + + tekton_ci_output = "" + + for task_name in tasks: + try: + taskruns = get_taskruns(namespace, task_name, taskrun_filter) + + tekton_ci_output = process_taskrun_logs( + taskruns, core_v1_client, namespace, task_name, tekton_ci_output + ) + except Exception as exc: + print(f"Error processing task {task_name}: {str(exc)}") return tekton_ci_output From 081c01f5d7ebe3b71000ec8406d550351189ffd7 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Wed, 8 Oct 2025 12:25:45 +0530 Subject: [PATCH 71/86] ci(tekton): Run only pipelines/tasks required for currency Signed-off-by: Varsha GS --- .tekton/pipeline.yaml | 44 ---------------------------- .tekton/scheduled-eventlistener.yaml | 2 +- 2 files changed, 1 insertion(+), 45 deletions(-) diff --git a/.tekton/pipeline.yaml b/.tekton/pipeline.yaml index a87ff532..efbd6fe5 100644 --- a/.tekton/pipeline.yaml +++ b/.tekton/pipeline.yaml @@ -6,27 +6,12 @@ spec: params: - name: revision type: string - - name: py-38-imageDigest - type: string - default: public.ecr.aws/docker/library/python:3.8-bookworm - - name: py-39-imageDigest - type: string - default: public.ecr.aws/docker/library/python:3.9-bookworm - - name: py-310-imageDigest - type: string - default: public.ecr.aws/docker/library/python:3.10-bookworm - - name: py-311-imageDigest - type: string - default: public.ecr.aws/docker/library/python:3.11-bookworm - name: py-312-imageDigest type: string default: public.ecr.aws/docker/library/python:3.12-bookworm - name: py-313-imageDigest type: string default: public.ecr.aws/docker/library/python:3.13-bookworm - - name: py-314-imageDigest - type: string - default: public.ecr.aws/docker/library/python:3.14.0rc3 workspaces: - name: python-tracer-ci-pipeline-pvc tasks: @@ -48,13 +33,7 @@ spec: params: - name: imageDigest value: - - $(params.py-38-imageDigest) - - $(params.py-39-imageDigest) - - $(params.py-310-imageDigest) - - $(params.py-311-imageDigest) - - $(params.py-312-imageDigest) - $(params.py-313-imageDigest) - # - $(params.py-314-imageDigest) taskRef: name: python-tracer-unittest-default-task workspaces: @@ -82,17 +61,6 @@ spec: workspaces: - name: task-pvc workspace: python-tracer-ci-pipeline-pvc - - name: unittest-aws - runAfter: - - clone - params: - - name: imageDigest - value: $(params.py-313-imageDigest) - taskRef: - name: python-tracer-unittest-aws-task - workspaces: - - name: task-pvc - workspace: python-tracer-ci-pipeline-pvc - name: unittest-kafka runAfter: - clone @@ -104,15 +72,3 @@ spec: workspaces: - name: task-pvc workspace: python-tracer-ci-pipeline-pvc - - name: unittest-python-next - displayName: "Python next $(params.imageDigest)" - runAfter: - - clone - params: - - name: py-version - value: 3.14.0rc3 - taskRef: - name: python-tracer-unittest-python-next-task - workspaces: - - name: task-pvc - workspace: python-tracer-ci-pipeline-pvc diff --git a/.tekton/scheduled-eventlistener.yaml b/.tekton/scheduled-eventlistener.yaml index 5fdc3129..f9b8e2a6 100644 --- a/.tekton/scheduled-eventlistener.yaml +++ b/.tekton/scheduled-eventlistener.yaml @@ -25,7 +25,7 @@ spec: - name: git-commit-sha value: $(tt.params.git-commit-sha) pipelineRef: - name: github-pr-python-tracer-ci-pipeline + name: python-tracer-ci-pipeline workspaces: - name: python-tracer-ci-pipeline-pvc volumeClaimTemplate: From 9183bcbaf94ee2d2543db4f1016c56484d505475 Mon Sep 17 00:00:00 2001 From: minatooni Date: Fri, 26 Sep 2025 11:51:51 +0900 Subject: [PATCH 72/86] fix: tracing fastapi app Signed-off-by: minatooni --- src/instana/instrumentation/fastapi.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/instana/instrumentation/fastapi.py b/src/instana/instrumentation/fastapi.py index b2e9b018..68b19f6a 100644 --- a/src/instana/instrumentation/fastapi.py +++ b/src/instana/instrumentation/fastapi.py @@ -71,6 +71,10 @@ def init_with_instana( kwargs["middleware"] = [Middleware(InstanaASGIMiddleware)] elif isinstance(middleware, list): middleware.append(Middleware(InstanaASGIMiddleware)) + elif isinstance(middleware, tuple): + kwargs["middleware"] = (*middleware, Middleware(InstanaASGIMiddleware)) + else: + logger.warning("Unsupported FastAPI middleware sequence type.") exception_handlers = kwargs.get("exception_handlers") if exception_handlers is None: From 548a72be2d9c52512d22c0c832c86661f4d67edd Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Thu, 9 Oct 2025 10:08:12 +0530 Subject: [PATCH 73/86] fix: command used to run the python process - adapt to legacy systems like ibm i Signed-off-by: Varsha GS --- src/instana/fsm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/fsm.py b/src/instana/fsm.py index 7355a0ab..f9473907 100644 --- a/src/instana/fsm.py +++ b/src/instana/fsm.py @@ -119,7 +119,7 @@ def announce_sensor(self, e: Any) -> bool: # rely on ps rather than adding a dependency on something like # psutil which requires dev packages, gcc etc... proc = subprocess.Popen( - ["ps", "-p", str(pid), "-o", "command"], stdout=subprocess.PIPE + ["ps", "-p", str(pid), "-o", "args"], stdout=subprocess.PIPE ) (out, _) = proc.communicate() parts = out.split(b"\n") From 0f2cc575e4ef6b1d1789f2e40af9f22784b4fe03 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 7 Oct 2025 17:23:12 +0200 Subject: [PATCH 74/86] chore: Update README.md file. Signed-off-by: Paulo Vital --- README.md | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 514530c7..20e08ff5 100644 --- a/README.md +++ b/README.md @@ -15,9 +15,13 @@ Any feedback is welcome. Happy Python visibility. ## Installation -Instana remotely instruments your Python web servers automatically via [Instana AutoTrace™️]. To configure which Python processes this applies to, see the [configuration page]. +You can use automatic installation or manual installation as described in the following sections: -## Manual Installation +### Automatic installation + +Instana remotely instruments your Python applications automatically by [Instana AutoTrace webhook] in Kubernetes and Red Hat OpenShift clusters. However, if you prefer to install the package manually, see [Manual Installation](#manual-installation) as follows. + +### Manual Installation If you wish to instrument your applications manually, you can install the package with the following into the `virtualenv`, `pipenv`, or container (hosted on [PyPI]): @@ -27,7 +31,7 @@ or to alternatively update an existing installation: pip install -U instana -### Activating Without Code Changes +#### Activating Without Code Changes The Instana package can then be activated _without any code changes required_ by setting the following environment variable for your Python application: @@ -35,7 +39,7 @@ The Instana package can then be activated _without any code changes required_ by This will cause the Instana Python package to instrument your Python application automatically. Once it finds the Instana host agent, it will report Python metrics and distributed traces. -### Activating via Import +#### Activating With Code Changes Alternatively, if you prefer the manual method, import the `instana` package inside of your Python application: @@ -57,11 +61,11 @@ Want to instrument other languages? See our [Node.js], [Go], [Ruby] instrumenta [Instana]: https://www.instana.com/ "IBM Instana Observability" -[Instana AutoTrace™️]: https://www.ibm.com/docs/en/instana-observability/current?topic=kubernetes-instana-autotrace-webhook "Instana AutoTrace" +[Instana AutoTrace webhook]: https://www.ibm.com/docs/en/instana-observability/current?topic=kubernetes-instana-autotrace-webhook "Instana AutoTrace webhook" [configuration page]: https://www.ibm.com/docs/en/instana-observability/current?topic=package-python-configuration-configuring-instana#general "Instana Python package configuration" [PyPI]: https://pypi.python.org/pypi/instana "Instana package at PyPI" [installation document]: https://www.ibm.com/docs/en/instana-observability/current?topic=technologies-monitoring-python-instana-python-package#installation-methods "Instana Python package installation methods" -[documentation portal]: https://www.ibm.com/docs/en/instana-observability/current?topic=technologies-monitoring-python-instana-python-package "Instana Python package documentation" +[documentation portal]: https://ibm.biz/monitoring-python "Monitoring Python - IBM documentation" [Node.js]: https://github.com/instana/nodejs "Instana Node.JS Tracer" [Go]: https://github.com/instana/golang-sensor "Instana Go Tracer" [Ruby]: https://github.com/instana/ruby-sensor "Instana Ruby Tracer" From 83996ff0c288a124a4eea608f8560eae7bfc5931 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 7 Oct 2025 17:25:13 +0200 Subject: [PATCH 75/86] feat: Add support to Python 3.14.0 ... and drop support to Python 3.8. Signed-off-by: Paulo Vital --- .circleci/config.yml | 43 +++------------------------- .tekton/github-pr-pipeline.yaml.part | 7 ++--- .tekton/pipeline.yaml | 1 + .tekton/python-tracer-prepuller.yaml | 5 +--- Dockerfile | 2 +- pyproject.toml | 8 ++++-- src/instana/autoprofile/profiler.py | 7 +++-- 7 files changed, 18 insertions(+), 55 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a06d8287..83007df7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,8 +19,8 @@ commands: CHANGED_FILES=$(git diff --name-only origin/main...HEAD) # Check if any relevant files changed - echo "$CHANGED_FILES" | grep -q -E "^(src/|tests/|tests_aws/|.circleci/)" || { - echo "No changes in src/, tests/, tests_aws/, or .circleci directories. Skipping tests." + echo "$CHANGED_FILES" | grep -q -E "^(src/|tests/|tests_autowrapt/|tests_aws/|.circleci/|pyproject.toml)" || { + echo "No changes in src/, tests/, tests_autowrapt/, tests_aws/, .circleci directories or pyproject.toml file. Skipping tests." circleci step halt } @@ -161,38 +161,6 @@ jobs: - store-pytest-results - store-coverage-report - python314: - docker: - - image: ghcr.io/pvital/pvital-py3.14.0:latest - - image: public.ecr.aws/docker/library/postgres:16.2-bookworm - environment: - POSTGRES_USER: root - POSTGRES_PASSWORD: passw0rd - POSTGRES_DB: instana_test_db - - image: public.ecr.aws/docker/library/mariadb:11.3.2 - environment: - MYSQL_ROOT_PASSWORD: passw0rd - MYSQL_DATABASE: instana_test_db - - image: public.ecr.aws/docker/library/redis:7.2.4-bookworm - - image: public.ecr.aws/docker/library/rabbitmq:3.13.0 - - image: public.ecr.aws/docker/library/mongo:7.0.6 - - image: quay.io/thekevjames/gcloud-pubsub-emulator:latest - environment: - PUBSUB_EMULATOR_HOST: 0.0.0.0:8681 - PUBSUB_PROJECT1: test-project,test-topic - working_directory: ~/repo - steps: - - checkout - - check-if-tests-needed - - run: | - cp -a /root/base/venv ./venv - . venv/bin/activate - pip install 'wheel==0.45.1' - pip install -r requirements.txt - - run-tests-with-coverage-report - - store-pytest-results - - store-coverage-report - py39cassandra: docker: - image: public.ecr.aws/docker/library/python:3.9 @@ -324,8 +292,7 @@ workflows: - python3x: matrix: parameters: - py-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] - - python314 + py-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] - py39cassandra - py39gevent - py312aws @@ -333,12 +300,10 @@ workflows: - autowrapt: matrix: parameters: - py-version: ["3.11", "3.12", "3.13"] + py-version: ["3.11", "3.12", "3.13", "3.14"] - final_job: requires: - python3x - # Uncomment the following when giving real support to 3.14 - # - python314 - py39cassandra - py39gevent - py312aws diff --git a/.tekton/github-pr-pipeline.yaml.part b/.tekton/github-pr-pipeline.yaml.part index 2e0aac50..5a4782c1 100644 --- a/.tekton/github-pr-pipeline.yaml.part +++ b/.tekton/github-pr-pipeline.yaml.part @@ -8,9 +8,6 @@ spec: type: string - name: git-commit-sha type: string - - name: py-38-imageDigest - type: string - default: public.ecr.aws/docker/library/python:3.8-bookworm - name: py-39-imageDigest type: string default: public.ecr.aws/docker/library/python:3.9-bookworm @@ -28,7 +25,7 @@ spec: default: public.ecr.aws/docker/library/python:3.13-bookworm - name: py-314-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.14.0rc3 + default: public.ecr.aws/docker/library/python:3.14-bookworm workspaces: - name: python-tracer-ci-pipeline-pvc tasks: @@ -51,7 +48,7 @@ spec: - unittest-gevent-starlette - unittest-aws - unittest-kafka - - unittest-python-next +# - unittest-python-next taskRef: kind: Task name: github-set-status diff --git a/.tekton/pipeline.yaml b/.tekton/pipeline.yaml index efbd6fe5..ba2fbef0 100644 --- a/.tekton/pipeline.yaml +++ b/.tekton/pipeline.yaml @@ -34,6 +34,7 @@ spec: - name: imageDigest value: - $(params.py-313-imageDigest) + - $(params.py-314-imageDigest) taskRef: name: python-tracer-unittest-default-task workspaces: diff --git a/.tekton/python-tracer-prepuller.yaml b/.tekton/python-tracer-prepuller.yaml index b6a6f8a0..0ef3ec41 100644 --- a/.tekton/python-tracer-prepuller.yaml +++ b/.tekton/python-tracer-prepuller.yaml @@ -40,9 +40,6 @@ spec: - name: prepuller-kafka image: public.ecr.aws/bitnami/kafka:3.9.0 command: ["sh", "-c", "'true'"] - - name: prepuller-38 - image: public.ecr.aws/docker/library/python:3.8-bookworm - command: ["sh", "-c", "'true'"] - name: prepuller-39 image: public.ecr.aws/docker/library/python:3.9-bookworm command: ["sh", "-c", "'true'"] @@ -59,7 +56,7 @@ spec: image: public.ecr.aws/docker/library/python:3.13-bookworm command: ["sh", "-c", "'true'"] - name: prepuller-314 - image: public.ecr.aws/docker/library/python:3.14.0rc3 + image: public.ecr.aws/docker/library/python:3.14-bookworm command: ["sh", "-c", "'true'"] # Use the pause container to ensure the Pod goes into a `Running` phase diff --git a/Dockerfile b/Dockerfile index a193d6d1..ba04c9c6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Development Container -FROM public.ecr.aws/docker/library/python:3.12-slim-bookworm +FROM public.ecr.aws/docker/library/python:3.14-slim RUN apt-get -y -qq update && \ apt-get -y -qq upgrade && \ diff --git a/pyproject.toml b/pyproject.toml index bcd86863..5edc3a6f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ dynamic = [ ] description = "Python Distributed Tracing & Metrics Sensor for Instana." readme = "README.md" -requires-python = ">=3.8" +requires-python = ">=3.9" license = "MIT" keywords = [ "performance", @@ -31,12 +31,12 @@ classifiers = [ "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware", "Topic :: System :: Monitoring", @@ -69,7 +69,7 @@ dev = [ ] [project.urls] -Documentation = "https://www.ibm.com/docs/en/instana-observability/current?topic=technologies-monitoring-python-instana-python-package" +Documentation = "https://ibm.biz/monitoring-python" Issues = "https://github.com/instana/python-sensor/issues" Source = "https://github.com/instana/python-sensor" @@ -80,6 +80,8 @@ path = "src/instana/version.py" include = [ "/src", "/tests", + "/tests_autowrapt", + "/tests_aws", ] [tool.hatch.build.targets.wheel] diff --git a/src/instana/autoprofile/profiler.py b/src/instana/autoprofile/profiler.py index 2e685a0e..dc417c46 100644 --- a/src/instana/autoprofile/profiler.py +++ b/src/instana/autoprofile/profiler.py @@ -17,6 +17,7 @@ if TYPE_CHECKING: from types import FrameType + from instana.agent.host import HostAgent @@ -52,11 +53,11 @@ def start(self, **kwargs: Dict[str, Any]) -> None: return try: - if not min_version(3, 8): - raise Exception("Supported Python versions 3.8 or higher.") + if not min_version(3, 9): + raise EnvironmentError("Supported Python versions: 3.9 or higher.") if platform.python_implementation() != "CPython": - raise Exception("Supported Python interpreter is CPython.") + raise EnvironmentError("Supported Python interpreter: CPython.") if self.profiler_destroyed: logger.warning("Destroyed profiler cannot be started.") From 7cab540af4b9fea8d694b2c771458b8828eefe76 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 7 Oct 2025 17:32:59 +0200 Subject: [PATCH 76/86] chore(ci): Update to trixie container images... when possible, as redis doesn't have it. Signed-off-by: Paulo Vital --- .circleci/config.yml | 2 +- .tekton/.currency/currency-tasks.yaml | 2 +- .tekton/github-pr-pipeline.yaml.part | 12 ++++++------ .tekton/pipeline.yaml | 7 +++++-- .tekton/python-tracer-prepuller.yaml | 14 +++++++------- .tekton/task.yaml | 4 ++-- 6 files changed, 22 insertions(+), 19 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 83007df7..b55277ca 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -135,7 +135,7 @@ jobs: type: string docker: - image: public.ecr.aws/docker/library/python:<> - - image: public.ecr.aws/docker/library/postgres:16.2-bookworm + - image: public.ecr.aws/docker/library/postgres:16.10-trixie environment: POSTGRES_USER: root POSTGRES_PASSWORD: passw0rd diff --git a/.tekton/.currency/currency-tasks.yaml b/.tekton/.currency/currency-tasks.yaml index c35a97d2..7f5ead15 100644 --- a/.tekton/.currency/currency-tasks.yaml +++ b/.tekton/.currency/currency-tasks.yaml @@ -32,7 +32,7 @@ spec: mountPath: /workspace steps: - name: generate-currency-report - image: public.ecr.aws/docker/library/python:3.12-bookworm + image: public.ecr.aws/docker/library/python:3.12-trixie script: | #!/usr/bin/env bash cd /workspace/python-sensor/.tekton/.currency diff --git a/.tekton/github-pr-pipeline.yaml.part b/.tekton/github-pr-pipeline.yaml.part index 5a4782c1..db2319ab 100644 --- a/.tekton/github-pr-pipeline.yaml.part +++ b/.tekton/github-pr-pipeline.yaml.part @@ -10,22 +10,22 @@ spec: type: string - name: py-39-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.9-bookworm + default: public.ecr.aws/docker/library/python:3.9-trixie - name: py-310-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.10-bookworm + default: public.ecr.aws/docker/library/python:3.10-trixie - name: py-311-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.11-bookworm + default: public.ecr.aws/docker/library/python:3.11-trixie - name: py-312-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.12-bookworm + default: public.ecr.aws/docker/library/python:3.12-trixie - name: py-313-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.13-bookworm + default: public.ecr.aws/docker/library/python:3.13-trixie - name: py-314-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.14-bookworm + default: public.ecr.aws/docker/library/python:3.14-trixie workspaces: - name: python-tracer-ci-pipeline-pvc tasks: diff --git a/.tekton/pipeline.yaml b/.tekton/pipeline.yaml index ba2fbef0..a74ef6be 100644 --- a/.tekton/pipeline.yaml +++ b/.tekton/pipeline.yaml @@ -8,10 +8,13 @@ spec: type: string - name: py-312-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.12-bookworm + default: public.ecr.aws/docker/library/python:3.12-trixie - name: py-313-imageDigest type: string - default: public.ecr.aws/docker/library/python:3.13-bookworm + default: public.ecr.aws/docker/library/python:3.13-trixie + - name: py-314-imageDigest + type: string + default: public.ecr.aws/docker/library/python:3.14-trixie workspaces: - name: python-tracer-ci-pipeline-pvc tasks: diff --git a/.tekton/python-tracer-prepuller.yaml b/.tekton/python-tracer-prepuller.yaml index 0ef3ec41..3d711dab 100644 --- a/.tekton/python-tracer-prepuller.yaml +++ b/.tekton/python-tracer-prepuller.yaml @@ -35,28 +35,28 @@ spec: image: public.ecr.aws/docker/library/mariadb:11.3.2 command: ["sh", "-c", "'true'"] - name: prepuller-postgres - image: public.ecr.aws/docker/library/postgres:16.2-bookworm + image: public.ecr.aws/docker/library/postgres:16.10-trixie command: ["sh", "-c", "'true'"] - name: prepuller-kafka image: public.ecr.aws/bitnami/kafka:3.9.0 command: ["sh", "-c", "'true'"] - name: prepuller-39 - image: public.ecr.aws/docker/library/python:3.9-bookworm + image: public.ecr.aws/docker/library/python:3.9-trixie command: ["sh", "-c", "'true'"] - name: prepuller-310 - image: public.ecr.aws/docker/library/python:3.10-bookworm + image: public.ecr.aws/docker/library/python:3.10-trixie command: ["sh", "-c", "'true'"] - name: prepuller-311 - image: public.ecr.aws/docker/library/python:3.11-bookworm + image: public.ecr.aws/docker/library/python:3.11-trixie command: ["sh", "-c", "'true'"] - name: prepuller-312 - image: public.ecr.aws/docker/library/python:3.12-bookworm + image: public.ecr.aws/docker/library/python:3.12-trixie command: ["sh", "-c", "'true'"] - name: prepuller-313 - image: public.ecr.aws/docker/library/python:3.13-bookworm + image: public.ecr.aws/docker/library/python:3.13-trixie command: ["sh", "-c", "'true'"] - name: prepuller-314 - image: public.ecr.aws/docker/library/python:3.14-bookworm + image: public.ecr.aws/docker/library/python:3.14-trixie command: ["sh", "-c", "'true'"] # Use the pause container to ensure the Pod goes into a `Running` phase diff --git a/.tekton/task.yaml b/.tekton/task.yaml index 0a9a6d05..f6b21a05 100644 --- a/.tekton/task.yaml +++ b/.tekton/task.yaml @@ -104,7 +104,7 @@ spec: - name: mongo image: public.ecr.aws/docker/library/mongo:7.0.6 - name: postgres - image: public.ecr.aws/docker/library/postgres:16.2-bookworm + image: public.ecr.aws/docker/library/postgres:16.10-trixie env: - name: POSTGRES_USER value: root @@ -248,7 +248,7 @@ spec: - name: mongo image: public.ecr.aws/docker/library/mongo:7.0.6 - name: postgres - image: public.ecr.aws/docker/library/postgres:16.2-bookworm + image: public.ecr.aws/docker/library/postgres:16.10-trixie env: - name: POSTGRES_USER value: root From 1a05aba2de05d7d800596059ab4ec8b8a4d039b7 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 7 Oct 2025 17:34:23 +0200 Subject: [PATCH 77/86] chore(version): Bump version to 3.9.0 Signed-off-by: Paulo Vital --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index 034f31c6..4884162b 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.8.3" +VERSION = "3.9.0" From 0bde1d8d323f5488563e8653c9bd87da25d470e7 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Wed, 15 Oct 2025 15:31:24 +0200 Subject: [PATCH 78/86] fix: TypeError in agent announcement process - Add type check in fsm.py to ensure payload is a dictionary before passing to set_from - Add defensive check in host.py to verify required keys exist in announce response - Prevents "'bool' object is not subscriptable" error in confluent_kafka tests Signed-off-by: Paulo Vital --- src/instana/agent/host.py | 13 +++++++++---- src/instana/fsm.py | 2 +- tests/agent/test_host.py | 35 ++++++++++++++++++++++++++++++++++- 3 files changed, 44 insertions(+), 6 deletions(-) diff --git a/src/instana/agent/host.py b/src/instana/agent/host.py index ad39440c..9ecc74ca 100644 --- a/src/instana/agent/host.py +++ b/src/instana/agent/host.py @@ -138,10 +138,15 @@ def set_from( @return: None """ self.options.set_from(res_data) - self.announce_data = AnnounceData( - pid=res_data["pid"], - agentUuid=res_data["agentUuid"], - ) + + # Ensure required keys are present + if "pid" in res_data and "agentUuid" in res_data: + self.announce_data = AnnounceData( + pid=res_data["pid"], + agentUuid=res_data["agentUuid"], + ) + else: + logger.debug(f"Missing required keys in announce response: {res_data}") def get_from_structure(self) -> Dict[str, str]: """ diff --git a/src/instana/fsm.py b/src/instana/fsm.py index f9473907..c4145a5f 100644 --- a/src/instana/fsm.py +++ b/src/instana/fsm.py @@ -148,7 +148,7 @@ def announce_sensor(self, e: Any) -> bool: payload = self.agent.announce(d) - if not payload: + if not payload or not isinstance(payload, dict): logger.debug("Cannot announce sensor. Scheduling retry.") self.schedule_retry( self.announce_sensor, e, f"{self.THREAD_NAME}: announce" diff --git a/tests/agent/test_host.py b/tests/agent/test_host.py index 058c676c..613d4478 100644 --- a/tests/agent/test_host.py +++ b/tests/agent/test_host.py @@ -5,7 +5,7 @@ import json import logging import os -from typing import Generator +from typing import Any, Dict, Generator from unittest.mock import Mock import pytest @@ -717,3 +717,36 @@ def test_is_service_or_endpoint_ignored(self) -> None: # don't ignore other services assert not self.agent._HostAgent__is_endpoint_ignored("service3") assert not self.agent._HostAgent__is_endpoint_ignored("service3") + + @pytest.mark.parametrize( + "input_data", + [ + { + "agentUuid": "test-uuid", + }, + { + "pid": 1234, + }, + { + "extraHeaders": ["value-3"], + }, + ], + ids=["missing_pid", "missing_agent_uuid", "missing_both_required_keys"], + ) + def test_set_from_missing_required_keys( + self, input_data: Dict[str, Any], caplog: pytest.LogCaptureFixture + ) -> None: + """Test set_from when required keys are missing in res_data.""" + agent = HostAgent() + caplog.set_level(logging.DEBUG, logger="instana") + + res_data = { + "secrets": {"matcher": "value-1", "list": ["value-2"]}, + } + res_data.update(input_data) + + agent.set_from(res_data) + + assert agent.announce_data is None + assert "Missing required keys in announce response" in caplog.messages[-1] + assert str(res_data) in caplog.messages[-1] From ebab26eba510cf97b704e4676e51a4e626b3dc7e Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Fri, 24 Oct 2025 09:58:34 +0530 Subject: [PATCH 79/86] chore(version): Bump version to `3.9.1` Signed-off-by: Varsha GS --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index 4884162b..9c3e1cdd 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.9.0" +VERSION = "3.9.1" From e1641c03f0f87bd118fbce5fe9b658fbfe97f198 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Tue, 28 Oct 2025 14:12:06 +0530 Subject: [PATCH 80/86] chore: remove `setuptools` from project dependency - dependency on `pkg_resources` gone after `wrapt-2.0.0` Signed-off-by: Varsha GS --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5edc3a6f..c934a36c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,6 @@ dependencies = [ "opentelemetry-semantic-conventions>=0.48b0", "typing_extensions>=4.12.2", "pyyaml>=6.0.2", - "setuptools>=69.0.0; python_version >= \"3.12\"", "psutil>=5.9.0; sys_platform == \"win32\"", ] From e0f17a59b431755684d2f56442fedd3449cfaeaf Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 28 Oct 2025 16:37:25 +0100 Subject: [PATCH 81/86] fix: IndexError in the `confluent_kafka_python.py` Fixed IndexError in `confluent_kafka_python.py` by handling both positional and keyword arguments for the topic parameter in the `trace_kafka_produce` function. The issue occurred when the topic was passed as a keyword argument, resulting in an empty args tuple and causing an IndexError when trying to access `args[0]`. The solution: 1. Modified the `trace_kafka_produce` function to get the topic from either `args` or `kwargs` 2. Added safety checks to handle edge cases 3. Added two new test methods to verify the fix works with different argument patterns: - `test_trace_confluent_kafka_produce_with_keyword_topic` - `test_trace_confluent_kafka_produce_with_keyword_args` This fix ensures that the Kafka instrumentation works correctly regardless of how the `produce` method is called, improving the robustness of the Python sensor. Signed-off-by: Paulo Vital --- .../kafka/confluent_kafka_python.py | 17 ++-- tests/clients/kafka/test_confluent_kafka.py | 80 ++++++++++++++++--- 2 files changed, 79 insertions(+), 18 deletions(-) diff --git a/src/instana/instrumentation/kafka/confluent_kafka_python.py b/src/instana/instrumentation/kafka/confluent_kafka_python.py index e5d991d2..f2f327f1 100644 --- a/src/instana/instrumentation/kafka/confluent_kafka_python.py +++ b/src/instana/instrumentation/kafka/confluent_kafka_python.py @@ -14,11 +14,8 @@ from instana.log import logger from instana.propagators.format import Format from instana.singletons import get_tracer - from instana.util.traceutils import ( - get_tracer_tuple, - tracing_is_off, - ) from instana.span.span import InstanaSpan + from instana.util.traceutils import get_tracer_tuple, tracing_is_off consumer_token = None consumer_span = contextvars.ContextVar("confluent_kafka_consumer_span") @@ -69,16 +66,20 @@ def trace_kafka_produce( tracer, parent_span, _ = get_tracer_tuple() parent_context = parent_span.get_span_context() if parent_span else None + + # Get the topic from either args or kwargs + topic = args[0] if args else kwargs.get("topic", "") + is_suppressed = tracer.exporter._HostAgent__is_endpoint_ignored( "kafka", "produce", - args[0], + topic, ) with tracer.start_as_current_span( "kafka-producer", span_context=parent_context, kind=SpanKind.PRODUCER ) as span: - span.set_attribute("kafka.service", args[0]) + span.set_attribute("kafka.service", topic) span.set_attribute("kafka.access", "produce") # context propagation @@ -89,6 +90,10 @@ def trace_kafka_produce( # dictionary. To maintain compatibility with the headers for the # Kafka Python library, we will use a list of tuples. headers = args[6] if len(args) > 6 else kwargs.get("headers", []) + + # Initialize headers if it's None + if headers is None: + headers = [] suppression_header = {"x_instana_l_s": "0" if is_suppressed else "1"} headers.append(suppression_header) diff --git a/tests/clients/kafka/test_confluent_kafka.py b/tests/clients/kafka/test_confluent_kafka.py index 61f31bce..a5c9b334 100644 --- a/tests/clients/kafka/test_confluent_kafka.py +++ b/tests/clients/kafka/test_confluent_kafka.py @@ -5,30 +5,26 @@ from typing import Generator import pytest -from confluent_kafka import ( - Consumer, - KafkaException, - Producer, -) +from confluent_kafka import Consumer, KafkaException, Producer from confluent_kafka.admin import AdminClient, NewTopic -from mock import patch, Mock +from mock import Mock, patch from opentelemetry.trace import SpanKind from opentelemetry.trace.span import format_span_id from instana.configurator import config -from instana.options import StandardOptions -from instana.singletons import agent, tracer -from instana.util.config import parse_ignored_endpoints_from_yaml -from tests.helpers import get_first_span_by_filter, testenv from instana.instrumentation.kafka import confluent_kafka_python from instana.instrumentation.kafka.confluent_kafka_python import ( clear_context, - save_consumer_span_into_context, close_consumer_span, - trace_kafka_close, consumer_span, + save_consumer_span_into_context, + trace_kafka_close, ) +from instana.options import StandardOptions +from instana.singletons import agent, tracer from instana.span.span import InstanaSpan +from instana.util.config import parse_ignored_endpoints_from_yaml +from tests.helpers import get_first_span_by_filter, testenv class TestConfluentKafka: @@ -120,6 +116,66 @@ def test_trace_confluent_kafka_produce(self) -> None: assert kafka_span.data["kafka"]["service"] == testenv["kafka_topic"] assert kafka_span.data["kafka"]["access"] == "produce" + def test_trace_confluent_kafka_produce_with_keyword_topic(self) -> None: + """Test that tracing works when topic is passed as a keyword argument.""" + with tracer.start_as_current_span("test"): + # Pass topic as a keyword argument + self.producer.produce(topic=testenv["kafka_topic"], value=b"raw_bytes") + self.producer.flush(timeout=10) + + spans = self.recorder.queued_spans() + assert len(spans) == 2 + + kafka_span = spans[0] + test_span = spans[1] + + # Same traceId + assert test_span.t == kafka_span.t + + # Parent relationships + assert kafka_span.p == test_span.s + + # Error logging + assert not test_span.ec + assert not kafka_span.ec + + assert kafka_span.n == "kafka" + assert kafka_span.k == SpanKind.CLIENT + assert kafka_span.data["kafka"]["service"] == testenv["kafka_topic"] + assert kafka_span.data["kafka"]["access"] == "produce" + + def test_trace_confluent_kafka_produce_with_keyword_args(self) -> None: + """Test that tracing works when both topic and headers are passed as keyword arguments.""" + with tracer.start_as_current_span("test"): + # Pass both topic and headers as keyword arguments + self.producer.produce( + topic=testenv["kafka_topic"], + value=b"raw_bytes", + headers=[("custom-header", b"header-value")], + ) + self.producer.flush(timeout=10) + + spans = self.recorder.queued_spans() + assert len(spans) == 2 + + kafka_span = spans[0] + test_span = spans[1] + + # Same traceId + assert test_span.t == kafka_span.t + + # Parent relationships + assert kafka_span.p == test_span.s + + # Error logging + assert not test_span.ec + assert not kafka_span.ec + + assert kafka_span.n == "kafka" + assert kafka_span.k == SpanKind.CLIENT + assert kafka_span.data["kafka"]["service"] == testenv["kafka_topic"] + assert kafka_span.data["kafka"]["access"] == "produce" + def test_trace_confluent_kafka_consume(self) -> None: agent.options.set_trace_configurations() # Produce some events From b4263fcda375c056569daa3692a9a5f3c1eb1cfa Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Tue, 28 Oct 2025 16:49:16 +0100 Subject: [PATCH 82/86] fix: IndexError in the `kafka_python.py` Fixed potential IndexError in `kafka_python.py` by handling both positional and keyword arguments for the topic parameter in the `trace_kafka_send` function. The issue is similar to the one fixed in `confluent_kafka_python.py`, where an IndexError could occur when the topic was passed as a keyword argument, resulting in an empty `args` tuple. The solution: 1. Modified the `trace_kafka_send` function to get the topic from either `args` or `kwargs` 2. Added safety checks to handle edge cases 3. Added two new test methods to verify the fix works with different argument patterns: - `test_trace_kafka_python_send_with_keyword_topic` - `test_trace_kafka_python_send_with_keyword_args` This fix ensures that the Kafka instrumentation works correctly regardless of how the `send` method is called, improving the robustness of the Python sensor. Signed-off-by: Paulo Vital --- .../instrumentation/kafka/kafka_python.py | 13 ++-- tests/clients/kafka/test_kafka_python.py | 75 +++++++++++++++++-- 2 files changed, 76 insertions(+), 12 deletions(-) diff --git a/src/instana/instrumentation/kafka/kafka_python.py b/src/instana/instrumentation/kafka/kafka_python.py index 3b1423d3..307b7d52 100644 --- a/src/instana/instrumentation/kafka/kafka_python.py +++ b/src/instana/instrumentation/kafka/kafka_python.py @@ -14,11 +14,8 @@ from instana.log import logger from instana.propagators.format import Format from instana.singletons import get_tracer - from instana.util.traceutils import ( - get_tracer_tuple, - tracing_is_off, - ) from instana.span.span import InstanaSpan + from instana.util.traceutils import get_tracer_tuple, tracing_is_off if TYPE_CHECKING: from kafka.producer.future import FutureRecordMetadata @@ -38,15 +35,19 @@ def trace_kafka_send( tracer, parent_span, _ = get_tracer_tuple() parent_context = parent_span.get_span_context() if parent_span else None + + # Get the topic from either args or kwargs + topic = args[0] if args else kwargs.get("topic", "") + is_suppressed = tracer.exporter._HostAgent__is_endpoint_ignored( "kafka", "send", - args[0], + topic, ) with tracer.start_as_current_span( "kafka-producer", span_context=parent_context, kind=SpanKind.PRODUCER ) as span: - span.set_attribute("kafka.service", args[0]) + span.set_attribute("kafka.service", topic) span.set_attribute("kafka.access", "send") # context propagation diff --git a/tests/clients/kafka/test_kafka_python.py b/tests/clients/kafka/test_kafka_python.py index eb3723e3..a1d0ccbb 100644 --- a/tests/clients/kafka/test_kafka_python.py +++ b/tests/clients/kafka/test_kafka_python.py @@ -12,19 +12,18 @@ from opentelemetry.trace.span import format_span_id from instana.configurator import config -from instana.options import StandardOptions -from instana.singletons import agent, tracer -from instana.util.config import parse_ignored_endpoints_from_yaml -from tests.helpers import get_first_span_by_filter, testenv - from instana.instrumentation.kafka import kafka_python from instana.instrumentation.kafka.kafka_python import ( clear_context, - save_consumer_span_into_context, close_consumer_span, consumer_span, + save_consumer_span_into_context, ) +from instana.options import StandardOptions +from instana.singletons import agent, tracer from instana.span.span import InstanaSpan +from instana.util.config import parse_ignored_endpoints_from_yaml +from tests.helpers import get_first_span_by_filter, testenv class TestKafkaPython: @@ -122,6 +121,70 @@ def test_trace_kafka_python_send(self) -> None: assert kafka_span.data["kafka"]["service"] == testenv["kafka_topic"] assert kafka_span.data["kafka"]["access"] == "send" + def test_trace_kafka_python_send_with_keyword_topic(self) -> None: + """Test that tracing works when topic is passed as a keyword argument.""" + with tracer.start_as_current_span("test"): + # Pass topic as a keyword argument + future = self.producer.send( + topic=testenv["kafka_topic"], value=b"raw_bytes" + ) + + _ = future.get(timeout=10) # noqa: F841 + + spans = self.recorder.queued_spans() + assert len(spans) == 2 + + kafka_span = spans[0] + test_span = spans[1] + + # Same traceId + assert test_span.t == kafka_span.t + + # Parent relationships + assert kafka_span.p == test_span.s + + # Error logging + assert not test_span.ec + assert not kafka_span.ec + + assert kafka_span.n == "kafka" + assert kafka_span.k == SpanKind.CLIENT + assert kafka_span.data["kafka"]["service"] == testenv["kafka_topic"] + assert kafka_span.data["kafka"]["access"] == "send" + + def test_trace_kafka_python_send_with_keyword_args(self) -> None: + """Test that tracing works when both topic and headers are passed as keyword arguments.""" + with tracer.start_as_current_span("test"): + # Pass both topic and headers as keyword arguments + future = self.producer.send( + topic=testenv["kafka_topic"], + value=b"raw_bytes", + headers=[("custom-header", b"header-value")], + ) + + _ = future.get(timeout=10) # noqa: F841 + + spans = self.recorder.queued_spans() + assert len(spans) == 2 + + kafka_span = spans[0] + test_span = spans[1] + + # Same traceId + assert test_span.t == kafka_span.t + + # Parent relationships + assert kafka_span.p == test_span.s + + # Error logging + assert not test_span.ec + assert not kafka_span.ec + + assert kafka_span.n == "kafka" + assert kafka_span.k == SpanKind.CLIENT + assert kafka_span.data["kafka"]["service"] == testenv["kafka_topic"] + assert kafka_span.data["kafka"]["access"] == "send" + def test_trace_kafka_python_consume(self) -> None: # Produce some events self.producer.send(testenv["kafka_topic"], b"raw_bytes1") From 4121d8d621d794dccd922fe4ad7de98f79905e66 Mon Sep 17 00:00:00 2001 From: Paulo Vital Date: Wed, 29 Oct 2025 11:42:28 +0100 Subject: [PATCH 83/86] chore(version): Bump version to `3.9.2` Signed-off-by: Paulo Vital --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index 9c3e1cdd..1419967c 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.9.1" +VERSION = "3.9.2" From 5bd20fe640b395761b40949b50386a304f5a5d0f Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Thu, 13 Nov 2025 16:23:18 +0530 Subject: [PATCH 84/86] wsgi: Ensure span stays active throughout the response iteration Signed-off-by: Varsha GS --- src/instana/instrumentation/wsgi.py | 105 ++++++++++++++++++---------- 1 file changed, 69 insertions(+), 36 deletions(-) diff --git a/src/instana/instrumentation/wsgi.py b/src/instana/instrumentation/wsgi.py index 5ab7a2f7..5f039c15 100644 --- a/src/instana/instrumentation/wsgi.py +++ b/src/instana/instrumentation/wsgi.py @@ -5,7 +5,7 @@ Instana WSGI Middleware """ -from typing import Dict, Any, Callable, List, Tuple, Optional +from typing import Dict, Any, Callable, List, Tuple, Optional, Iterable, TYPE_CHECKING from opentelemetry.semconv.trace import SpanAttributes from opentelemetry import context, trace @@ -15,6 +15,8 @@ from instana.util.secrets import strip_secrets_from_query from instana.util.traceutils import extract_custom_headers +if TYPE_CHECKING: + from instana.span.span import InstanaSpan class InstanaWSGIMiddleware(object): """Instana WSGI middleware""" @@ -25,15 +27,41 @@ def __init__(self, app: object) -> None: def __call__(self, environ: Dict[str, Any], start_response: Callable) -> object: env = environ + # Extract context and start span + span_context = tracer.extract(Format.HTTP_HEADERS, env) + span = tracer.start_span("wsgi", span_context=span_context) + + # Attach context - this makes the span current + ctx = trace.set_span_in_context(span) + token = context.attach(ctx) + + # Extract custom headers from request + extract_custom_headers(span, env, format=True) + + # Set request attributes + if "PATH_INFO" in env: + span.set_attribute("http.path", env["PATH_INFO"]) + if "QUERY_STRING" in env and len(env["QUERY_STRING"]): + scrubbed_params = strip_secrets_from_query( + env["QUERY_STRING"], + agent.options.secrets_matcher, + agent.options.secrets_list, + ) + span.set_attribute("http.params", scrubbed_params) + if "REQUEST_METHOD" in env: + span.set_attribute(SpanAttributes.HTTP_METHOD, env["REQUEST_METHOD"]) + if "HTTP_HOST" in env: + span.set_attribute("http.host", env["HTTP_HOST"]) + def new_start_response( status: str, headers: List[Tuple[object, ...]], exc_info: Optional[Exception] = None, ) -> object: """Modified start response with additional headers.""" - extract_custom_headers(self.span, headers) + extract_custom_headers(span, headers) - tracer.inject(self.span.context, Format.HTTP_HEADERS, headers) + tracer.inject(span.context, Format.HTTP_HEADERS, headers) headers_str = [ (header[0], str(header[1])) @@ -41,39 +69,44 @@ def new_start_response( else header for header in headers ] - res = start_response(status, headers_str, exc_info) + # Set status code attribute sc = status.split(" ")[0] if 500 <= int(sc): - self.span.mark_as_errored() - - self.span.set_attribute(SpanAttributes.HTTP_STATUS_CODE, sc) - if self.span and self.span.is_recording(): - self.span.end() - if self.token: - context.detach(self.token) - return res - - span_context = tracer.extract(Format.HTTP_HEADERS, env) - self.span = tracer.start_span("wsgi", span_context=span_context) - - ctx = trace.set_span_in_context(self.span) - self.token = context.attach(ctx) - - extract_custom_headers(self.span, env, format=True) - - if "PATH_INFO" in env: - self.span.set_attribute("http.path", env["PATH_INFO"]) - if "QUERY_STRING" in env and len(env["QUERY_STRING"]): - scrubbed_params = strip_secrets_from_query( - env["QUERY_STRING"], - agent.options.secrets_matcher, - agent.options.secrets_list, - ) - self.span.set_attribute("http.params", scrubbed_params) - if "REQUEST_METHOD" in env: - self.span.set_attribute(SpanAttributes.HTTP_METHOD, env["REQUEST_METHOD"]) - if "HTTP_HOST" in env: - self.span.set_attribute("http.host", env["HTTP_HOST"]) - - return self.app(environ, new_start_response) + span.mark_as_errored() + + span.set_attribute(SpanAttributes.HTTP_STATUS_CODE, sc) + + return start_response(status, headers_str, exc_info) + + try: + iterable = self.app(environ, new_start_response) + + # Wrap the iterable to ensure span ends after iteration completes + return _end_span_after_iterating(iterable, span, token) + + except Exception as exc: + # If exception occurs before iteration completes, end span and detach token + if span and span.is_recording(): + span.record_exception(exc) + span.end() + if token: + context.detach(token) + raise exc + + +def _end_span_after_iterating( + iterable: Iterable[object], span: "InstanaSpan", token: object +) -> Iterable[object]: + try: + yield from iterable + finally: + # Ensure iterable cleanup (important for generators) + if hasattr(iterable, "close"): + iterable.close() + + # End span and detach token after iteration completes + if span and span.is_recording(): + span.end() + if token: + context.detach(token) From fea91b7170dbc7f59624094630815f99facb466d Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Thu, 13 Nov 2025 16:43:32 +0530 Subject: [PATCH 85/86] chore(wsgi): move setting request attributes to a separate method Signed-off-by: Varsha GS --- src/instana/instrumentation/wsgi.py | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/src/instana/instrumentation/wsgi.py b/src/instana/instrumentation/wsgi.py index 5f039c15..ea020495 100644 --- a/src/instana/instrumentation/wsgi.py +++ b/src/instana/instrumentation/wsgi.py @@ -39,19 +39,7 @@ def __call__(self, environ: Dict[str, Any], start_response: Callable) -> object: extract_custom_headers(span, env, format=True) # Set request attributes - if "PATH_INFO" in env: - span.set_attribute("http.path", env["PATH_INFO"]) - if "QUERY_STRING" in env and len(env["QUERY_STRING"]): - scrubbed_params = strip_secrets_from_query( - env["QUERY_STRING"], - agent.options.secrets_matcher, - agent.options.secrets_list, - ) - span.set_attribute("http.params", scrubbed_params) - if "REQUEST_METHOD" in env: - span.set_attribute(SpanAttributes.HTTP_METHOD, env["REQUEST_METHOD"]) - if "HTTP_HOST" in env: - span.set_attribute("http.host", env["HTTP_HOST"]) + _set_request_attributes(span, env) def new_start_response( status: str, @@ -110,3 +98,18 @@ def _end_span_after_iterating( span.end() if token: context.detach(token) + +def _set_request_attributes(span: "InstanaSpan", env: Dict[str, Any]) -> None: + if "PATH_INFO" in env: + span.set_attribute("http.path", env["PATH_INFO"]) + if "QUERY_STRING" in env and len(env["QUERY_STRING"]): + scrubbed_params = strip_secrets_from_query( + env["QUERY_STRING"], + agent.options.secrets_matcher, + agent.options.secrets_list, + ) + span.set_attribute("http.params", scrubbed_params) + if "REQUEST_METHOD" in env: + span.set_attribute(SpanAttributes.HTTP_METHOD, env["REQUEST_METHOD"]) + if "HTTP_HOST" in env: + span.set_attribute(SpanAttributes.HTTP_HOST, env["HTTP_HOST"]) From 6bc03da7cf5b099fc6565cb391398607c2fb7312 Mon Sep 17 00:00:00 2001 From: Varsha GS Date: Thu, 13 Nov 2025 17:03:02 +0530 Subject: [PATCH 86/86] chore(version): Bump version to 3.9.3 Signed-off-by: Varsha GS --- src/instana/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/instana/version.py b/src/instana/version.py index 1419967c..6db3016f 100644 --- a/src/instana/version.py +++ b/src/instana/version.py @@ -3,4 +3,4 @@ # Module version file. Used by setup.py and snapshot reporting. -VERSION = "3.9.2" +VERSION = "3.9.3"