From 48c5a749a0c60a11ece26cbfe9dd1ad3789ae1d3 Mon Sep 17 00:00:00 2001 From: Danju Visvanathan Date: Wed, 18 Feb 2026 01:55:58 +1100 Subject: [PATCH 1/7] feat: add metrics hook Signed-off-by: Danju Visvanathan --- .../contrib/hook/opentelemetry/__init__.py | 79 +--------- .../contrib/hook/opentelemetry/constants.py | 19 +++ .../contrib/hook/opentelemetry/metric.py | 78 +++++++++ .../contrib/hook/opentelemetry/trace.py | 65 ++++++++ .../tests/test_metric.py | 149 ++++++++++++++++++ .../tests/{test_otel.py => test_trace.py} | 10 +- 6 files changed, 319 insertions(+), 81 deletions(-) create mode 100644 hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py create mode 100644 hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py create mode 100644 hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py create mode 100644 hooks/openfeature-hooks-opentelemetry/tests/test_metric.py rename hooks/openfeature-hooks-opentelemetry/tests/{test_otel.py => test_trace.py} (93%) diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/__init__.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/__init__.py index 56799f6c..361d2a5a 100644 --- a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/__init__.py +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/__init__.py @@ -1,77 +1,4 @@ -import json +from .metric import MetricsHook +from .trace import TracingHook -from openfeature.exception import ErrorCode -from openfeature.flag_evaluation import FlagEvaluationDetails, Reason -from openfeature.hook import Hook, HookContext, HookHints -from opentelemetry import trace -from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE - -OTEL_EVENT_NAME = "feature_flag.evaluation" - - -class EventAttributes: - KEY = "feature_flag.key" - RESULT_VALUE = "feature_flag.result.value" - RESULT_VARIANT = "feature_flag.result.variant" - CONTEXT_ID = "feature_flag.context.id" - PROVIDER_NAME = "feature_flag.provider.name" - RESULT_REASON = "feature_flag.result.reason" - SET_ID = "feature_flag.set.id" - VERSION = "feature_flag.version" - - -class TracingHook(Hook): - def __init__(self, exclude_exceptions: bool = False): - self.exclude_exceptions = exclude_exceptions - - def finally_after( - self, - hook_context: HookContext, - details: FlagEvaluationDetails, - hints: HookHints, - ) -> None: - current_span = trace.get_current_span() - - event_attributes = { - EventAttributes.KEY: details.flag_key, - EventAttributes.RESULT_VALUE: json.dumps(details.value), - EventAttributes.RESULT_REASON: str( - details.reason or Reason.UNKNOWN - ).lower(), - } - - if details.variant: - event_attributes[EventAttributes.RESULT_VARIANT] = details.variant - - if details.reason == Reason.ERROR: - error_type = str(details.error_code or ErrorCode.GENERAL).lower() - event_attributes[ERROR_TYPE] = error_type - if details.error_message: - event_attributes["error.message"] = details.error_message - - context = hook_context.evaluation_context - if context.targeting_key: - event_attributes[EventAttributes.CONTEXT_ID] = context.targeting_key - - if hook_context.provider_metadata: - event_attributes[EventAttributes.PROVIDER_NAME] = ( - hook_context.provider_metadata.name - ) - - current_span.add_event(OTEL_EVENT_NAME, event_attributes) - - def error( - self, hook_context: HookContext, exception: Exception, hints: HookHints - ) -> None: - if self.exclude_exceptions: - return - attributes = { - EventAttributes.KEY: hook_context.flag_key, - EventAttributes.RESULT_VALUE: json.dumps(hook_context.default_value), - } - if hook_context.provider_metadata: - attributes[EventAttributes.PROVIDER_NAME] = ( - hook_context.provider_metadata.name - ) - current_span = trace.get_current_span() - current_span.record_exception(exception, attributes) +__all__ = ["MetricsHook", "TracingHook"] diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py new file mode 100644 index 00000000..c7c91180 --- /dev/null +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py @@ -0,0 +1,19 @@ +class Attributes: + OTEL_CONTEXT_ID = "feature_flag.context.id" + OTEL_EVENT_NAME = "feature_flag.evaluation" + OTEL_ERROR_TYPE = "error.type" + OTEL_ERROR_MESSAGE = "error.message" + OTEL_FLAG_KEY = "feature_flag.key" + OTEL_FLAG_VARIANT = "feature_flag.result.variant" + OTEL_PROVIDER_NAME = "feature_flag.provider.name" + OTEL_RESULT_VALUE = "feature_flag.result.value" + OTEL_RESULT_REASON = "feature_flag.result.reason" + OTEL_SET_ID = "feature_flag.set.id" + OTEL_VERSION = "feature_flag.version" + + +class Metrics: + ACTIVE_TOTAL = "feature_flag.evaluation.active_total" + SUCCESS_TOTAL = "feature_flag.evaluation.success_total" + REQUEST_TOTAL = "feature_flag.evaluation.request_total" + ERROR_TOTAL = "feature_flag.evaluation.error_total" diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py new file mode 100644 index 00000000..a8f55ee9 --- /dev/null +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py @@ -0,0 +1,78 @@ +from openfeature.flag_evaluation import FlagEvaluationDetails +from openfeature.hook import Hook, HookContext, HookHints +from opentelemetry import metrics + +from .constants import Attributes, Metrics + + +class MetricsHook(Hook): + def __init__(self) -> None: + meter: metrics.Meter = metrics.get_meter("openfeature.hooks.opentelemetry") + self.evaluation_active_total = meter.create_up_down_counter( + Metrics.ACTIVE_TOTAL, "active flag evaluations" + ) + self.evaluation_error_total = meter.create_counter( + Metrics.ERROR_TOTAL, "error flag evaluations" + ) + self.evaluation_success_total = meter.create_counter( + Metrics.SUCCESS_TOTAL, "success flag evaluations" + ) + self.evaluation_request_total = meter.create_counter( + Metrics.REQUEST_TOTAL, "request flag evaluations" + ) + + def before(self, hook_context: HookContext, hints: HookHints) -> None: + attributes = { + Attributes.OTEL_FLAG_KEY: hook_context.flag_key, + } + if hook_context.provider_metadata: + attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + self.evaluation_active_total.add(1, attributes) + self.evaluation_request_total.add(1, attributes) + + def after( + self, + hook_context: HookContext, + details: FlagEvaluationDetails, + hints: HookHints, + ) -> None: + attributes = { + Attributes.OTEL_FLAG_KEY: details.flag_key, + } + if details.variant: + attributes[Attributes.OTEL_FLAG_VARIANT] = details.variant + if hook_context.provider_metadata: + attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + self.evaluation_success_total.add(1, attributes) + + def error( + self, hook_context: HookContext, exception: Exception, hints: HookHints + ) -> None: + attributes = { + Attributes.OTEL_FLAG_KEY: hook_context.flag_key, + "exception": str(exception).lower(), + } + if hook_context.provider_metadata: + attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + self.evaluation_error_total.add(1, attributes) + + def finally_after( + self, + hook_context: HookContext, + details: FlagEvaluationDetails, + hints: HookHints, + ) -> None: + attributes = { + Attributes.OTEL_FLAG_KEY: hook_context.flag_key, + } + if hook_context.provider_metadata: + attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + self.evaluation_active_total.add(-1, attributes) diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py new file mode 100644 index 00000000..1fac4a92 --- /dev/null +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py @@ -0,0 +1,65 @@ +import json + +from openfeature.exception import ErrorCode +from openfeature.flag_evaluation import FlagEvaluationDetails, Reason +from openfeature.hook import Hook, HookContext, HookHints +from opentelemetry import trace + +from .constants import Attributes + + +class TracingHook(Hook): + def __init__(self, exclude_exceptions: bool = False): + self.exclude_exceptions = exclude_exceptions + + def finally_after( + self, + hook_context: HookContext, + details: FlagEvaluationDetails, + hints: HookHints, + ) -> None: + current_span = trace.get_current_span() + + event_attributes = { + Attributes.OTEL_FLAG_KEY: details.flag_key, + Attributes.OTEL_RESULT_VALUE: json.dumps(details.value), + Attributes.OTEL_RESULT_REASON: str( + details.reason or Reason.UNKNOWN + ).lower(), + } + + if details.variant: + event_attributes[Attributes.OTEL_FLAG_VARIANT] = details.variant + + if details.reason == Reason.ERROR: + error_type = str(details.error_code or ErrorCode.GENERAL).lower() + event_attributes[Attributes.OTEL_ERROR_TYPE] = error_type + if details.error_message: + event_attributes["error.message"] = details.error_message + + context = hook_context.evaluation_context + if context.targeting_key: + event_attributes[Attributes.OTEL_CONTEXT_ID] = context.targeting_key + + if hook_context.provider_metadata: + event_attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + + current_span.add_event(Attributes.OTEL_EVENT_NAME, event_attributes) + + def error( + self, hook_context: HookContext, exception: Exception, hints: HookHints + ) -> None: + if self.exclude_exceptions: + return + attributes = { + Attributes.OTEL_FLAG_KEY: hook_context.flag_key, + Attributes.OTEL_RESULT_VALUE: json.dumps(hook_context.default_value), + } + if hook_context.provider_metadata: + attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + current_span = trace.get_current_span() + current_span.record_exception(exception, attributes) diff --git a/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py new file mode 100644 index 00000000..e4d11023 --- /dev/null +++ b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py @@ -0,0 +1,149 @@ +from unittest.mock import Mock + +import pytest +from opentelemetry import metrics + +from openfeature.contrib.hook.opentelemetry import MetricsHook +from openfeature.evaluation_context import EvaluationContext +from openfeature.flag_evaluation import Reason +from openfeature.hook import FlagEvaluationDetails, FlagType, HookContext +from openfeature.provider.metadata import Metadata + + +@pytest.fixture +def mock_get_meter(monkeypatch): + mock_counters = { + "feature_flag.evaluation.active_total": Mock(spec=metrics.UpDownCounter), + "feature_flag.evaluation.error_total": Mock(spec=metrics.Counter), + "feature_flag.evaluation.success_total": Mock(spec=metrics.Counter), + "feature_flag.evaluation.request_total": Mock(spec=metrics.Counter), + } + + def side_effect(*args, **kwargs): + return mock_counters[args[0]] + + mock_meter = Mock( + spec=metrics.Meter, + create_up_down_counter=side_effect, + create_counter=side_effect, + ) + monkeypatch.setattr(metrics, "get_meter", lambda name: mock_meter) + + return mock_meter, mock_counters + + +def test_metric_before(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook() + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + + hook.before(hook_context, hints={}) + mock_counters["feature_flag.evaluation.active_total"].add.assert_called_once_with( + 1, + { + "feature_flag.key": "flag_key", + "feature_flag.provider.name": "test-provider", + }, + ) + mock_counters["feature_flag.evaluation.request_total"].add.assert_called_once_with( + 1, + { + "feature_flag.key": "flag_key", + "feature_flag.provider.name": "test-provider", + }, + ) + mock_counters["feature_flag.evaluation.error_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() + + +def test_metric_after(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook() + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + details = FlagEvaluationDetails( + flag_key="flag_key", + value=True, + variant="enabled", + reason=Reason.TARGETING_MATCH, + error_code=None, + error_message=None, + ) + hook.after(hook_context, details, hints={}) + mock_counters["feature_flag.evaluation.success_total"].add.assert_called_once_with( + 1, + { + "feature_flag.key": "flag_key", + "feature_flag.result.variant": "enabled", + "feature_flag.provider.name": "test-provider", + }, + ) + mock_counters["feature_flag.evaluation.error_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.active_total"].add.assert_not_called() + + +def test_metric_error(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook() + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + hook.error(hook_context, Exception("test error"), hints={}) + mock_counters["feature_flag.evaluation.error_total"].add.assert_called_once_with( + 1, + { + "feature_flag.key": "flag_key", + "feature_flag.provider.name": "test-provider", + "exception": "test error", + }, + ) + mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.active_total"].add.assert_not_called() + + +def test_metric_finally_after(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook() + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + details = FlagEvaluationDetails( + flag_key="flag_key", + value=True, + variant="enabled", + reason=Reason.TARGETING_MATCH, + error_code=None, + error_message=None, + ) + hook.finally_after(hook_context, details, hints={}) + mock_counters["feature_flag.evaluation.active_total"].add.assert_called_once_with( + -1, + { + "feature_flag.key": "flag_key", + "feature_flag.provider.name": "test-provider", + }, + ) + mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.error_total"].add.assert_not_called() diff --git a/hooks/openfeature-hooks-opentelemetry/tests/test_otel.py b/hooks/openfeature-hooks-opentelemetry/tests/test_trace.py similarity index 93% rename from hooks/openfeature-hooks-opentelemetry/tests/test_otel.py rename to hooks/openfeature-hooks-opentelemetry/tests/test_trace.py index b0279f47..82b9ccbf 100644 --- a/hooks/openfeature-hooks-opentelemetry/tests/test_otel.py +++ b/hooks/openfeature-hooks-opentelemetry/tests/test_trace.py @@ -17,7 +17,7 @@ def mock_get_current_span(monkeypatch): monkeypatch.setattr(trace, "get_current_span", Mock()) -def test_finally_after(mock_get_current_span): +def test_trace_finally_after(mock_get_current_span): # Given hook = TracingHook() hook_context = HookContext( @@ -56,7 +56,7 @@ def test_finally_after(mock_get_current_span): ) -def test_after_evaluation_error(mock_get_current_span): +def test_trace_after_evaluation_error(mock_get_current_span): # Given hook = TracingHook() hook_context = HookContext( @@ -94,7 +94,7 @@ def test_after_evaluation_error(mock_get_current_span): ) -def test_error(mock_get_current_span): +def test_trace_error(mock_get_current_span): # Given hook = TracingHook() hook_context = HookContext( @@ -121,7 +121,7 @@ def test_error(mock_get_current_span): mock_span.record_exception.assert_called_once_with(exception, attributes) -def test_error_exclude_exceptions(mock_get_current_span): +def test_trace_error_exclude_exceptions(mock_get_current_span): # Given hook = TracingHook(exclude_exceptions=True) hook_context = HookContext( @@ -141,7 +141,7 @@ def test_error_exclude_exceptions(mock_get_current_span): mock_span.record_exception.assert_not_called() -def test_error_no_provider_metadata(mock_get_current_span): +def test_trace_error_no_provider_metadata(mock_get_current_span): # Given hook = TracingHook() hook_context = HookContext( From f10820cfbe28092c2d6838abd65c65d31a1e7825 Mon Sep 17 00:00:00 2001 From: Danju Visvanathan Date: Thu, 19 Feb 2026 01:26:00 +1100 Subject: [PATCH 2/7] fix: use attribute name from constants Signed-off-by: Danju Visvanathan --- .../src/openfeature/contrib/hook/opentelemetry/trace.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py index 1fac4a92..af2faa26 100644 --- a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py @@ -35,7 +35,7 @@ def finally_after( error_type = str(details.error_code or ErrorCode.GENERAL).lower() event_attributes[Attributes.OTEL_ERROR_TYPE] = error_type if details.error_message: - event_attributes["error.message"] = details.error_message + event_attributes[Attributes.OTEL_ERROR_MESSAGE] = details.error_message context = hook_context.evaluation_context if context.targeting_key: From a6bd8c677af886fa2e0397be12d28a86c8ddff1d Mon Sep 17 00:00:00 2001 From: Danju Visvanathan Date: Thu, 19 Feb 2026 02:30:21 +1100 Subject: [PATCH 3/7] fix: add reason attribute to feature_flag.evaluation_success metric Signed-off-by: Danju Visvanathan --- .../src/openfeature/contrib/hook/opentelemetry/metric.py | 5 ++++- hooks/openfeature-hooks-opentelemetry/tests/test_metric.py | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py index a8f55ee9..2c54973a 100644 --- a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py @@ -1,4 +1,4 @@ -from openfeature.flag_evaluation import FlagEvaluationDetails +from openfeature.flag_evaluation import FlagEvaluationDetails, Reason from openfeature.hook import Hook, HookContext, HookHints from opentelemetry import metrics @@ -40,6 +40,9 @@ def after( ) -> None: attributes = { Attributes.OTEL_FLAG_KEY: details.flag_key, + Attributes.OTEL_RESULT_REASON: str( + details.reason or Reason.UNKNOWN + ).lower(), } if details.variant: attributes[Attributes.OTEL_FLAG_VARIANT] = details.variant diff --git a/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py index e4d11023..ede4671c 100644 --- a/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py +++ b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py @@ -85,6 +85,7 @@ def test_metric_after(mock_get_meter): 1, { "feature_flag.key": "flag_key", + "feature_flag.result.reason": "targeting_match", "feature_flag.result.variant": "enabled", "feature_flag.provider.name": "test-provider", }, From 6e657af9d4972c332b6cae04f002590e9c90b445 Mon Sep 17 00:00:00 2001 From: Danju Visvanathan Date: Fri, 20 Feb 2026 22:45:38 +1100 Subject: [PATCH 4/7] fix: rename active_total -> active_count Signed-off-by: Danju Visvanathan --- .../contrib/hook/opentelemetry/constants.py | 2 +- .../contrib/hook/opentelemetry/metric.py | 17 +++++++++-------- .../tests/test_metric.py | 10 +++++----- 3 files changed, 15 insertions(+), 14 deletions(-) diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py index c7c91180..4ebbe852 100644 --- a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py @@ -13,7 +13,7 @@ class Attributes: class Metrics: - ACTIVE_TOTAL = "feature_flag.evaluation.active_total" + ACTIVE_COUNT = "feature_flag.evaluation.active_count" SUCCESS_TOTAL = "feature_flag.evaluation.success_total" REQUEST_TOTAL = "feature_flag.evaluation.request_total" ERROR_TOTAL = "feature_flag.evaluation.error_total" diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py index 2c54973a..26486f1e 100644 --- a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py @@ -1,6 +1,7 @@ from openfeature.flag_evaluation import FlagEvaluationDetails, Reason from openfeature.hook import Hook, HookContext, HookHints from opentelemetry import metrics +from opentelemetry.util.types import AttributeValue from .constants import Attributes, Metrics @@ -8,8 +9,8 @@ class MetricsHook(Hook): def __init__(self) -> None: meter: metrics.Meter = metrics.get_meter("openfeature.hooks.opentelemetry") - self.evaluation_active_total = meter.create_up_down_counter( - Metrics.ACTIVE_TOTAL, "active flag evaluations" + self.evaluation_active_count = meter.create_up_down_counter( + Metrics.ACTIVE_COUNT, "active flag evaluations" ) self.evaluation_error_total = meter.create_counter( Metrics.ERROR_TOTAL, "error flag evaluations" @@ -22,14 +23,14 @@ def __init__(self) -> None: ) def before(self, hook_context: HookContext, hints: HookHints) -> None: - attributes = { + attributes: dict[str, AttributeValue] = { Attributes.OTEL_FLAG_KEY: hook_context.flag_key, } if hook_context.provider_metadata: attributes[Attributes.OTEL_PROVIDER_NAME] = ( hook_context.provider_metadata.name ) - self.evaluation_active_total.add(1, attributes) + self.evaluation_active_count.add(1, attributes) self.evaluation_request_total.add(1, attributes) def after( @@ -38,7 +39,7 @@ def after( details: FlagEvaluationDetails, hints: HookHints, ) -> None: - attributes = { + attributes: dict[str, AttributeValue] = { Attributes.OTEL_FLAG_KEY: details.flag_key, Attributes.OTEL_RESULT_REASON: str( details.reason or Reason.UNKNOWN @@ -55,7 +56,7 @@ def after( def error( self, hook_context: HookContext, exception: Exception, hints: HookHints ) -> None: - attributes = { + attributes: dict[str, AttributeValue] = { Attributes.OTEL_FLAG_KEY: hook_context.flag_key, "exception": str(exception).lower(), } @@ -71,11 +72,11 @@ def finally_after( details: FlagEvaluationDetails, hints: HookHints, ) -> None: - attributes = { + attributes: dict[str, AttributeValue] = { Attributes.OTEL_FLAG_KEY: hook_context.flag_key, } if hook_context.provider_metadata: attributes[Attributes.OTEL_PROVIDER_NAME] = ( hook_context.provider_metadata.name ) - self.evaluation_active_total.add(-1, attributes) + self.evaluation_active_count.add(-1, attributes) diff --git a/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py index ede4671c..5b9f8d92 100644 --- a/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py +++ b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py @@ -13,7 +13,7 @@ @pytest.fixture def mock_get_meter(monkeypatch): mock_counters = { - "feature_flag.evaluation.active_total": Mock(spec=metrics.UpDownCounter), + "feature_flag.evaluation.active_count": Mock(spec=metrics.UpDownCounter), "feature_flag.evaluation.error_total": Mock(spec=metrics.Counter), "feature_flag.evaluation.success_total": Mock(spec=metrics.Counter), "feature_flag.evaluation.request_total": Mock(spec=metrics.Counter), @@ -44,7 +44,7 @@ def test_metric_before(mock_get_meter): ) hook.before(hook_context, hints={}) - mock_counters["feature_flag.evaluation.active_total"].add.assert_called_once_with( + mock_counters["feature_flag.evaluation.active_count"].add.assert_called_once_with( 1, { "feature_flag.key": "flag_key", @@ -92,7 +92,7 @@ def test_metric_after(mock_get_meter): ) mock_counters["feature_flag.evaluation.error_total"].add.assert_not_called() mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() - mock_counters["feature_flag.evaluation.active_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.active_count"].add.assert_not_called() def test_metric_error(mock_get_meter): @@ -116,7 +116,7 @@ def test_metric_error(mock_get_meter): ) mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() - mock_counters["feature_flag.evaluation.active_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.active_count"].add.assert_not_called() def test_metric_finally_after(mock_get_meter): @@ -138,7 +138,7 @@ def test_metric_finally_after(mock_get_meter): error_message=None, ) hook.finally_after(hook_context, details, hints={}) - mock_counters["feature_flag.evaluation.active_total"].add.assert_called_once_with( + mock_counters["feature_flag.evaluation.active_count"].add.assert_called_once_with( -1, { "feature_flag.key": "flag_key", From 5f330c76fbb250b465c5e0fe3722e9bcf327e885 Mon Sep 17 00:00:00 2001 From: Danju Visvanathan Date: Fri, 20 Feb 2026 23:30:02 +1100 Subject: [PATCH 5/7] feat: add parameter to extract extra attributes from FlagMetadata Signed-off-by: Danju Visvanathan --- .../contrib/hook/opentelemetry/metric.py | 20 ++++++- .../tests/test_metric.py | 57 +++++++++++++++++++ 2 files changed, 75 insertions(+), 2 deletions(-) diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py index 26486f1e..e604849b 100644 --- a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py @@ -1,4 +1,6 @@ -from openfeature.flag_evaluation import FlagEvaluationDetails, Reason +import typing + +from openfeature.flag_evaluation import FlagEvaluationDetails, FlagMetadata, Reason from openfeature.hook import Hook, HookContext, HookHints from opentelemetry import metrics from opentelemetry.util.types import AttributeValue @@ -7,7 +9,8 @@ class MetricsHook(Hook): - def __init__(self) -> None: + def __init__(self, extra_attributes: typing.Optional[list[str]] = None) -> None: + self.extra_attributes = extra_attributes or [] meter: metrics.Meter = metrics.get_meter("openfeature.hooks.opentelemetry") self.evaluation_active_count = meter.create_up_down_counter( Metrics.ACTIVE_COUNT, "active flag evaluations" @@ -79,4 +82,17 @@ def finally_after( attributes[Attributes.OTEL_PROVIDER_NAME] = ( hook_context.provider_metadata.name ) + attributes = attributes | attributes_from_dimensions( + self.extra_attributes, details.flag_metadata + ) self.evaluation_active_count.add(-1, attributes) + + +def attributes_from_dimensions( + extra_attributes: list[str], metadata: FlagMetadata +) -> dict[str, AttributeValue]: + attributes: dict[str, AttributeValue] = {} + for attribute in extra_attributes: + if (attr := metadata.get(attribute)) is not None: + attributes[attribute] = attr + return attributes diff --git a/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py index 5b9f8d92..bd26bcbf 100644 --- a/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py +++ b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py @@ -148,3 +148,60 @@ def test_metric_finally_after(mock_get_meter): mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() mock_counters["feature_flag.evaluation.error_total"].add.assert_not_called() + + +def test_metric_finally_after_with_extra_dimensions(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook(extra_attributes=["scope", "test"]) + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + details = FlagEvaluationDetails( + flag_key="flag_key", + value=True, + variant="enabled", + reason=Reason.TARGETING_MATCH, + flag_metadata={"scope": "application", "test": True}, + ) + hook.finally_after(hook_context, details, hints={}) + mock_counters["feature_flag.evaluation.active_count"].add.assert_called_once_with( + -1, + { + "feature_flag.key": "flag_key", + "feature_flag.provider.name": "test-provider", + "scope": "application", + "test": True, + }, + ) + + +def test_metric_finally_after_with_extra_dimensions_missing_attribute(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook(extra_attributes=["scope", "test"]) + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + details = FlagEvaluationDetails( + flag_key="flag_key", + value=True, + variant="enabled", + reason=Reason.TARGETING_MATCH, + flag_metadata={"test": True}, + ) + hook.finally_after(hook_context, details, hints={}) + mock_counters["feature_flag.evaluation.active_count"].add.assert_called_once_with( + -1, + { + "feature_flag.key": "flag_key", + "feature_flag.provider.name": "test-provider", + "test": True, + }, + ) From f9fb3581df2214a37a8d5331cb9bfaeed4ec87b5 Mon Sep 17 00:00:00 2001 From: Danju Visvanathan Date: Sat, 21 Feb 2026 00:06:33 +1100 Subject: [PATCH 6/7] fix: move extra attribute retrieval to after hook Signed-off-by: Danju Visvanathan --- .../contrib/hook/opentelemetry/metric.py | 6 +- .../tests/test_metric.py | 78 ++++++++++--------- 2 files changed, 45 insertions(+), 39 deletions(-) diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py index e604849b..ff4fbbfb 100644 --- a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py @@ -54,6 +54,9 @@ def after( attributes[Attributes.OTEL_PROVIDER_NAME] = ( hook_context.provider_metadata.name ) + attributes = attributes | attributes_from_dimensions( + self.extra_attributes, details.flag_metadata + ) self.evaluation_success_total.add(1, attributes) def error( @@ -82,9 +85,6 @@ def finally_after( attributes[Attributes.OTEL_PROVIDER_NAME] = ( hook_context.provider_metadata.name ) - attributes = attributes | attributes_from_dimensions( - self.extra_attributes, details.flag_metadata - ) self.evaluation_active_count.add(-1, attributes) diff --git a/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py index bd26bcbf..f649773f 100644 --- a/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py +++ b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py @@ -95,9 +95,9 @@ def test_metric_after(mock_get_meter): mock_counters["feature_flag.evaluation.active_count"].add.assert_not_called() -def test_metric_error(mock_get_meter): +def test_metric_after_with_extra_dimensions(mock_get_meter): _, mock_counters = mock_get_meter - hook = MetricsHook() + hook = MetricsHook(extra_attributes=["scope", "test"]) hook_context = HookContext( flag_key="flag_key", flag_type=FlagType.BOOLEAN, @@ -105,23 +105,32 @@ def test_metric_error(mock_get_meter): evaluation_context=EvaluationContext(), provider_metadata=Metadata(name="test-provider"), ) - hook.error(hook_context, Exception("test error"), hints={}) - mock_counters["feature_flag.evaluation.error_total"].add.assert_called_once_with( + details = FlagEvaluationDetails( + flag_key="flag_key", + value=True, + variant="enabled", + reason=Reason.TARGETING_MATCH, + flag_metadata={"scope": "application", "test": True}, + error_code=None, + error_message=None, + ) + hook.after(hook_context, details, hints={}) + mock_counters["feature_flag.evaluation.success_total"].add.assert_called_once_with( 1, { "feature_flag.key": "flag_key", + "feature_flag.result.reason": "targeting_match", + "feature_flag.result.variant": "enabled", "feature_flag.provider.name": "test-provider", - "exception": "test error", + "scope": "application", + "test": True, }, ) - mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() - mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() - mock_counters["feature_flag.evaluation.active_count"].add.assert_not_called() -def test_metric_finally_after(mock_get_meter): +def test_metric_after_with_extra_dimensions_missing_attribute(mock_get_meter): _, mock_counters = mock_get_meter - hook = MetricsHook() + hook = MetricsHook(extra_attributes=["scope", "test"]) hook_context = HookContext( flag_key="flag_key", flag_type=FlagType.BOOLEAN, @@ -134,25 +143,24 @@ def test_metric_finally_after(mock_get_meter): value=True, variant="enabled", reason=Reason.TARGETING_MATCH, - error_code=None, - error_message=None, + flag_metadata={"test": True}, ) - hook.finally_after(hook_context, details, hints={}) - mock_counters["feature_flag.evaluation.active_count"].add.assert_called_once_with( - -1, + hook.after(hook_context, details, hints={}) + mock_counters["feature_flag.evaluation.success_total"].add.assert_called_once_with( + 1, { "feature_flag.key": "flag_key", + "feature_flag.result.reason": "targeting_match", + "feature_flag.result.variant": "enabled", "feature_flag.provider.name": "test-provider", + "test": True, }, ) - mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() - mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() - mock_counters["feature_flag.evaluation.error_total"].add.assert_not_called() -def test_metric_finally_after_with_extra_dimensions(mock_get_meter): +def test_metric_error(mock_get_meter): _, mock_counters = mock_get_meter - hook = MetricsHook(extra_attributes=["scope", "test"]) + hook = MetricsHook() hook_context = HookContext( flag_key="flag_key", flag_type=FlagType.BOOLEAN, @@ -160,28 +168,23 @@ def test_metric_finally_after_with_extra_dimensions(mock_get_meter): evaluation_context=EvaluationContext(), provider_metadata=Metadata(name="test-provider"), ) - details = FlagEvaluationDetails( - flag_key="flag_key", - value=True, - variant="enabled", - reason=Reason.TARGETING_MATCH, - flag_metadata={"scope": "application", "test": True}, - ) - hook.finally_after(hook_context, details, hints={}) - mock_counters["feature_flag.evaluation.active_count"].add.assert_called_once_with( - -1, + hook.error(hook_context, Exception("test error"), hints={}) + mock_counters["feature_flag.evaluation.error_total"].add.assert_called_once_with( + 1, { "feature_flag.key": "flag_key", "feature_flag.provider.name": "test-provider", - "scope": "application", - "test": True, + "exception": "test error", }, ) + mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.active_count"].add.assert_not_called() -def test_metric_finally_after_with_extra_dimensions_missing_attribute(mock_get_meter): +def test_metric_finally_after(mock_get_meter): _, mock_counters = mock_get_meter - hook = MetricsHook(extra_attributes=["scope", "test"]) + hook = MetricsHook() hook_context = HookContext( flag_key="flag_key", flag_type=FlagType.BOOLEAN, @@ -194,7 +197,8 @@ def test_metric_finally_after_with_extra_dimensions_missing_attribute(mock_get_m value=True, variant="enabled", reason=Reason.TARGETING_MATCH, - flag_metadata={"test": True}, + error_code=None, + error_message=None, ) hook.finally_after(hook_context, details, hints={}) mock_counters["feature_flag.evaluation.active_count"].add.assert_called_once_with( @@ -202,6 +206,8 @@ def test_metric_finally_after_with_extra_dimensions_missing_attribute(mock_get_m { "feature_flag.key": "flag_key", "feature_flag.provider.name": "test-provider", - "test": True, }, ) + mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.error_total"].add.assert_not_called() From 1a86d277f992507d5921f0ab57e2089ea948768e Mon Sep 17 00:00:00 2001 From: Danju Visvanathan Date: Sat, 21 Feb 2026 00:13:51 +1100 Subject: [PATCH 7/7] chore: rename extra attributes method Signed-off-by: Danju Visvanathan --- .../src/openfeature/contrib/hook/opentelemetry/metric.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py index ff4fbbfb..5823b38c 100644 --- a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py @@ -54,7 +54,7 @@ def after( attributes[Attributes.OTEL_PROVIDER_NAME] = ( hook_context.provider_metadata.name ) - attributes = attributes | attributes_from_dimensions( + attributes = attributes | get_extra_attributes( self.extra_attributes, details.flag_metadata ) self.evaluation_success_total.add(1, attributes) @@ -88,7 +88,7 @@ def finally_after( self.evaluation_active_count.add(-1, attributes) -def attributes_from_dimensions( +def get_extra_attributes( extra_attributes: list[str], metadata: FlagMetadata ) -> dict[str, AttributeValue]: attributes: dict[str, AttributeValue] = {}